language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/ReactiveTypeAdapters.java | {
"start": 7929,
"end": 8306
} | enum ____ implements Function<Publisher<?>, Completable> {
INSTANCE;
@Override
public Completable apply(Publisher<?> source) {
return RxReactiveStreams.toCompletable(source);
}
}
/**
* An adapter {@link Function} to adopt a {@link Publisher} to {@link Observable}.
*/
public | PublisherToRxJava1CompletableAdapter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/SuperCallToObjectMethodTest.java | {
"start": 3425,
"end": 3894
} | class ____ {
int i;
@Override
public boolean equals(Object obj) {
if (obj instanceof Foo) {
return i == ((Foo) obj).i;
}
return this == obj;
}
}
""")
.doTest();
}
@Test
public void refactoringNeedsParens() {
refactoringHelper()
.addInputLines(
"Foo.java",
"""
| Foo |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/plugins/DoNotMockEnforcer.java | {
"start": 2473,
"end": 3322
} | class
____ (type == null) {
return null;
}
if (Cache.MOCKABLE_TYPES.contains(type)) {
return null;
}
String warning = checkTypeForDoNotMockViolation(type);
if (warning != null) {
return warning;
}
warning = recursiveCheckDoNotMockAnnotationForType(type.getSuperclass());
if (warning != null) {
return warning;
}
for (Class<?> aClass : type.getInterfaces()) {
warning = recursiveCheckDoNotMockAnnotationForType(aClass);
if (warning != null) {
return warning;
}
}
Cache.MOCKABLE_TYPES.add(type);
return null;
}
/**
* Static cache for types that are known to be mockable and
* thus may be skipped while traversing the | if |
java | google__error-prone | annotations/src/main/java/com/google/errorprone/annotations/CompileTimeConstant.java | {
"start": 1585,
"end": 2301
} | class ____ that is declared {@code final} and has the {@link
* CompileTimeConstant} annotation, or
* <li>the expression is a {@link String}, and formed from the concatenation of symbols which meet
* these conditions, or
* <li>the expression is a ternary condition, where both branches satisfy these conditions, or
* <li>the expression is a switch expression, where every case is either a constant expression or
* throws, or
* <li>the expression is an immutable collection with all values known to satisfy these conditions
* (for example, {@code ImmutableSet.of("a", "b", "c")}).
* </ol>
*
* <p>For example, the following code snippet is legal:
*
* <pre>{@code
* public | field |
java | google__guice | extensions/dagger-adapter/test/com/google/inject/daggeradapter/MultibindsTest.java | {
"start": 1443,
"end": 2969
} | interface ____ {
@Multibinds
Set<Number> set();
@Multibinds
Map<Integer, Double> map();
@Multibinds
@TestQualifier
Set<Number> qualifiedSet();
@Multibinds
@TestQualifier
Map<Integer, Double> qualifiedMap();
}
public void testBinds() {
Injector injector = Guice.createInjector(DaggerAdapter.from(BasicModule.class));
Binding<Set<Number>> setBinding = injector.getBinding(new Key<Set<Number>>() {});
assertThat(setBinding).hasProvidedValueThat().isEqualTo(ImmutableSet.of());
assertThat(setBinding).hasSource(BasicModule.class, "set");
Binding<Map<Integer, Double>> mapBinding =
injector.getBinding(new Key<Map<Integer, Double>>() {});
assertThat(mapBinding).hasProvidedValueThat().isEqualTo(ImmutableMap.of());
assertThat(mapBinding).hasSource(BasicModule.class, "map");
Binding<Set<Number>> qualifiedSetBinding =
injector.getBinding(Key.get(new TypeLiteral<Set<Number>>() {}, TestQualifier.class));
assertThat(qualifiedSetBinding).hasProvidedValueThat().isEqualTo(ImmutableSet.of());
assertThat(qualifiedSetBinding).hasSource(BasicModule.class, "qualifiedSet");
Binding<Map<Integer, Double>> qualifiedMapBinding =
injector.getBinding(
Key.get(new TypeLiteral<Map<Integer, Double>>() {}, TestQualifier.class));
assertThat(qualifiedMapBinding).hasProvidedValueThat().isEqualTo(ImmutableMap.of());
assertThat(qualifiedMapBinding).hasSource(BasicModule.class, "qualifiedMap");
}
}
| BasicModule |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-metadata/src/main/java/org/springframework/boot/configurationmetadata/Hints.java | {
"start": 1022,
"end": 2677
} | class ____ {
private final List<ValueHint> keyHints = new ArrayList<>();
private final List<ValueProvider> keyProviders = new ArrayList<>();
private final List<ValueHint> valueHints = new ArrayList<>();
private final List<ValueProvider> valueProviders = new ArrayList<>();
/**
* The list of well-defined keys, if any. Only applicable if the type of the related
* item is a {@link java.util.Map}. If no extra {@link ValueProvider provider} is
* specified, these values are to be considered a closed-set of the available keys for
* the map.
* @return the key hints
*/
public List<ValueHint> getKeyHints() {
return this.keyHints;
}
/**
* The value providers that are applicable to the keys of this item. Only applicable
* if the type of the related item is a {@link java.util.Map}. Only one
* {@link ValueProvider} is enabled for a key: the first in the list that is supported
* should be used.
* @return the key providers
*/
public List<ValueProvider> getKeyProviders() {
return this.keyProviders;
}
/**
* The list of well-defined values, if any. If no extra {@link ValueProvider provider}
* is specified, these values are to be considered a closed-set of the available
* values for this item.
* @return the value hints
*/
public List<ValueHint> getValueHints() {
return this.valueHints;
}
/**
* The value providers that are applicable to this item. Only one
* {@link ValueProvider} is enabled for an item: the first in the list that is
* supported should be used.
* @return the value providers
*/
public List<ValueProvider> getValueProviders() {
return this.valueProviders;
}
}
| Hints |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/handlers/SseResponseWriterHandler.java | {
"start": 272,
"end": 715
} | class ____ implements ServerRestHandler {
public static final SseResponseWriterHandler INSTANCE = new SseResponseWriterHandler();
public SseResponseWriterHandler() {
}
@Override
public void handle(ResteasyReactiveRequestContext requestContext) throws Exception {
requestContext.getSseEventSink().sendInitialResponse(requestContext.serverResponse());
requestContext.suspend();
}
}
| SseResponseWriterHandler |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/aot/BeanFactoryInitializationAotProcessor.java | {
"start": 1766,
"end": 1945
} | interface ____ not contributed.
*
* @author Phillip Webb
* @author Stephane Nicoll
* @since 6.0
* @see BeanFactoryInitializationAotContribution
*/
@FunctionalInterface
public | is |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/DeadLetterChannelSetHeaderTest.java | {
"start": 1037,
"end": 1969
} | class ____ extends ContextTestSupport {
@Test
public void testDLCSetHeader() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:error");
mock.expectedBodiesReceived("Hello World");
mock.expectedHeaderReceived("foo", "123");
mock.expectedHeaderReceived("bar", "456");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").errorHandler(deadLetterChannel("direct:error"))
.throwException(new IllegalArgumentException("Damn"));
from("direct:error").setHeader("foo", constant("123")).setHeader("bar", constant("456")).to("mock:error");
}
};
}
}
| DeadLetterChannelSetHeaderTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableInternalHelper.java | {
"start": 944,
"end": 1009
} | class ____ support Observable with inner classes.
*/
public final | to |
java | google__guice | extensions/throwingproviders/test/com/google/inject/throwingproviders/CheckedProvidersTest.java | {
"start": 5058,
"end": 5448
} | class ____ extends Error {}
public void testCheckThrowable_errorNotDeclared_throwsExpectedError() throws Exception {
FooCheckedProvider provider =
CheckedProviders.throwing(FooCheckedProvider.class, ExpectedError.class);
try {
provider.get();
fail("Expected an exception to be thrown");
} catch (ExpectedError e) {
// expected
}
}
}
| ExpectedError |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/consumer/InputGateTestBase.java | {
"start": 1271,
"end": 4006
} | class ____ {
int gateIndex;
@BeforeEach
void resetGateIndex() {
gateIndex = 0;
}
protected void testIsAvailable(
InputGate inputGateToTest,
SingleInputGate inputGateToNotify,
TestInputChannel inputChannelWithNewData)
throws Exception {
assertThat(inputGateToTest.getAvailableFuture()).isNotDone();
assertThat(inputGateToTest.pollNext()).isNotPresent();
CompletableFuture<?> future = inputGateToTest.getAvailableFuture();
assertThat(inputGateToTest.getAvailableFuture()).isNotDone();
assertThat(inputGateToTest.pollNext()).isNotPresent();
assertThat(inputGateToTest.getAvailableFuture()).isEqualTo(future);
inputChannelWithNewData.readBuffer();
inputGateToNotify.notifyChannelNonEmpty(inputChannelWithNewData);
assertThat(future).isDone();
assertThat(inputGateToTest.getAvailableFuture())
.isDone()
.isEqualTo(PullingAsyncDataInput.AVAILABLE);
}
protected void testIsAvailableAfterFinished(
InputGate inputGateToTest, Runnable endOfPartitionEvent) throws Exception {
CompletableFuture<?> available = inputGateToTest.getAvailableFuture();
assertThat(available).isNotDone();
assertThat(inputGateToTest.pollNext()).isNotPresent();
endOfPartitionEvent.run();
assertThat(inputGateToTest.pollNext()).isNotEmpty(); // EndOfPartitionEvent
assertThat(available).isDone();
assertThat(inputGateToTest.getAvailableFuture()).isDone();
assertThat(inputGateToTest.getAvailableFuture()).isEqualTo(PullingAsyncDataInput.AVAILABLE);
}
protected SingleInputGate createInputGate() {
return createInputGate(2);
}
protected SingleInputGate createInputGate(int numberOfInputChannels) {
return createInputGate(null, numberOfInputChannels, ResultPartitionType.PIPELINED);
}
protected SingleInputGate createInputGate(
NettyShuffleEnvironment environment,
int numberOfInputChannels,
ResultPartitionType partitionType) {
SingleInputGateBuilder builder =
new SingleInputGateBuilder()
.setNumberOfChannels(numberOfInputChannels)
.setSingleInputGateIndex(gateIndex++)
.setResultPartitionType(partitionType);
if (environment != null) {
builder = builder.setupBufferPoolFactory(environment);
}
SingleInputGate inputGate = builder.build();
assertThat(inputGate.getConsumedPartitionType()).isEqualTo(partitionType);
return inputGate;
}
}
| InputGateTestBase |
java | apache__rocketmq | common/src/main/java/org/apache/rocketmq/common/stats/RTStatsItem.java | {
"start": 1079,
"end": 1698
} | class ____ extends StatsItem {
public RTStatsItem(String statsName, String statsKey, ScheduledExecutorService scheduledExecutorService,
Logger logger) {
super(statsName, statsKey, scheduledExecutorService, logger);
}
/**
* For Response Time stat Item, the print detail should be a little different, TPS and SUM makes no sense.
* And we give a name "AVGRT" rather than AVGPT for value getAvgpt()
*/
@Override
protected String statPrintDetail(StatsSnapshot ss) {
return String.format("TIMES: %d AVGRT: %.2f", ss.getTimes(), ss.getAvgpt());
}
}
| RTStatsItem |
java | apache__kafka | storage/src/main/java/org/apache/kafka/server/log/remote/quota/RLMQuotaMetrics.java | {
"start": 1163,
"end": 2352
} | class ____ implements AutoCloseable {
private final SensorAccess sensorAccess;
private final Metrics metrics;
private final String name;
private final String descriptionFormat;
private final String group;
private final long expirationTime;
public RLMQuotaMetrics(Metrics metrics, String name, String group, String descriptionFormat, long expirationTime) {
ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
this.sensorAccess = new SensorAccess(lock, metrics);
this.metrics = metrics;
this.name = name;
this.group = group;
this.expirationTime = expirationTime;
this.descriptionFormat = descriptionFormat;
}
public Sensor sensor() {
return sensorAccess.getOrCreate(name, expirationTime, s -> {
s.add(metrics.metricName(name + "-avg", group,
String.format(descriptionFormat, "average")), new Avg());
s.add(metrics.metricName(name + "-max", group,
String.format(descriptionFormat, "maximum")), new Max());
});
}
@Override
public void close() {
this.metrics.removeSensor(name);
}
}
| RLMQuotaMetrics |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeDelayOtherPublisher.java | {
"start": 1212,
"end": 1641
} | class ____<T, U> extends AbstractMaybeWithUpstream<T, T> {
final Publisher<U> other;
public MaybeDelayOtherPublisher(MaybeSource<T> source, Publisher<U> other) {
super(source);
this.other = other;
}
@Override
protected void subscribeActual(MaybeObserver<? super T> observer) {
source.subscribe(new DelayMaybeObserver<>(observer, other));
}
static final | MaybeDelayOtherPublisher |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceOrderByExpressionWithEval.java | {
"start": 880,
"end": 2052
} | class ____ extends OptimizerRules.OptimizerRule<OrderBy> {
private static int counter = 0;
@Override
protected LogicalPlan rule(OrderBy orderBy) {
int size = orderBy.order().size();
List<Alias> evals = new ArrayList<>(size);
List<Order> newOrders = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
var order = orderBy.order().get(i);
if (order.child() instanceof Attribute == false) {
var name = rawTemporaryName("order_by", String.valueOf(i), String.valueOf(counter++));
var eval = new Alias(order.child().source(), name, order.child());
newOrders.add(order.replaceChildren(List.of(eval.toAttribute())));
evals.add(eval);
} else {
newOrders.add(order);
}
}
if (evals.isEmpty()) {
return orderBy;
} else {
var newOrderBy = new OrderBy(orderBy.source(), new Eval(orderBy.source(), orderBy.child(), evals), newOrders);
return new Project(orderBy.source(), newOrderBy, orderBy.output());
}
}
}
| ReplaceOrderByExpressionWithEval |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java | {
"start": 1798,
"end": 2656
} | class ____ extends ActionResponse implements ToXContentObject {
private final Collection<CertificateInfo> certificates;
public Response(Collection<CertificateInfo> certificates) {
this.certificates = certificates;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray();
for (CertificateInfo cert : certificates) {
cert.toXContent(builder, params);
}
return builder.endArray();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(certificates.size());
for (CertificateInfo cert : certificates) {
cert.writeTo(out);
}
}
}
public static | Response |
java | apache__maven | compat/maven-model-builder/src/test/java/org/apache/maven/model/interpolation/StringVisitorModelInterpolatorTest.java | {
"start": 874,
"end": 1151
} | class ____ extends AbstractModelInterpolatorTest {
@Override
protected ModelInterpolator createInterpolator() {
return new StringVisitorModelInterpolator().setVersionPropertiesProcessor(new DefaultModelVersionProcessor());
}
}
| StringVisitorModelInterpolatorTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntities.java | {
"start": 1306,
"end": 1530
} | class ____ a set of timeline entities.
*/
@XmlRootElement(name = "entities")
@XmlAccessorType(XmlAccessType.NONE)
@InterfaceAudience.Public
@InterfaceStability.Unstable
@JsonIgnoreProperties({"children", "parent"})
public | hosts |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java | {
"start": 2235,
"end": 2486
} | class ____ for this parser with a 'parameters' field that maps to
// PutFollowParameters class. But since two minor version are already released with duplicate follow parameters
// in several APIs, PutFollowParameters is now the Value | here |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/metrics/MetricsOptions.java | {
"start": 633,
"end": 857
} | class ____ be extended by provider implementations to configure
* those specific implementations.
*
* @author <a href="mailto:julien@julienviet.com">Julien Viet</a>
*/
@DataObject
@JsonGen(publicConverter = false)
public | can |
java | spring-projects__spring-security | oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/authentication/OAuth2AuthorizationCodeAuthenticationProviderTests.java | {
"start": 2420,
"end": 7656
} | class ____ {
private ClientRegistration clientRegistration;
private OAuth2AuthorizationRequest authorizationRequest;
private OAuth2AccessTokenResponseClient<OAuth2AuthorizationCodeGrantRequest> accessTokenResponseClient;
private OAuth2AuthorizationCodeAuthenticationProvider authenticationProvider;
@BeforeEach
@SuppressWarnings("unchecked")
public void setUp() {
this.clientRegistration = TestClientRegistrations.clientRegistration().build();
this.authorizationRequest = TestOAuth2AuthorizationRequests.request().build();
this.accessTokenResponseClient = mock(OAuth2AccessTokenResponseClient.class);
this.authenticationProvider = new OAuth2AuthorizationCodeAuthenticationProvider(this.accessTokenResponseClient);
}
@Test
public void constructorWhenAccessTokenResponseClientIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() -> new OAuth2AuthorizationCodeAuthenticationProvider(null));
}
@Test
public void supportsWhenTypeOAuth2AuthorizationCodeAuthenticationTokenThenReturnTrue() {
assertThat(this.authenticationProvider.supports(OAuth2AuthorizationCodeAuthenticationToken.class)).isTrue();
}
@Test
public void authenticateWhenAuthorizationErrorResponseThenThrowOAuth2AuthorizationException() {
OAuth2AuthorizationResponse authorizationResponse = TestOAuth2AuthorizationResponses.error()
.errorCode(OAuth2ErrorCodes.INVALID_REQUEST)
.build();
OAuth2AuthorizationExchange authorizationExchange = new OAuth2AuthorizationExchange(this.authorizationRequest,
authorizationResponse);
assertThatExceptionOfType(OAuth2AuthorizationException.class)
.isThrownBy(() -> this.authenticationProvider.authenticate(
new OAuth2AuthorizationCodeAuthenticationToken(this.clientRegistration, authorizationExchange)))
.withMessageContaining(OAuth2ErrorCodes.INVALID_REQUEST);
}
@Test
public void authenticateWhenAuthorizationResponseStateNotEqualAuthorizationRequestStateThenThrowOAuth2AuthorizationException() {
OAuth2AuthorizationResponse authorizationResponse = TestOAuth2AuthorizationResponses.success()
.state("67890")
.build();
OAuth2AuthorizationExchange authorizationExchange = new OAuth2AuthorizationExchange(this.authorizationRequest,
authorizationResponse);
assertThatExceptionOfType(OAuth2AuthorizationException.class)
.isThrownBy(() -> this.authenticationProvider.authenticate(
new OAuth2AuthorizationCodeAuthenticationToken(this.clientRegistration, authorizationExchange)))
.withMessageContaining("invalid_state_parameter");
}
@Test
public void authenticateWhenAuthorizationSuccessResponseThenExchangedForAccessToken() {
OAuth2AccessTokenResponse accessTokenResponse = TestOAuth2AccessTokenResponses.accessTokenResponse()
.refreshToken("refresh")
.build();
given(this.accessTokenResponseClient.getTokenResponse(any())).willReturn(accessTokenResponse);
OAuth2AuthorizationExchange authorizationExchange = new OAuth2AuthorizationExchange(this.authorizationRequest,
TestOAuth2AuthorizationResponses.success().build());
OAuth2AuthorizationCodeAuthenticationToken authenticationResult = (OAuth2AuthorizationCodeAuthenticationToken) this.authenticationProvider
.authenticate(
new OAuth2AuthorizationCodeAuthenticationToken(this.clientRegistration, authorizationExchange));
assertThat(authenticationResult.isAuthenticated()).isTrue();
assertThat(authenticationResult.getPrincipal()).isEqualTo(this.clientRegistration.getClientId());
assertThat(authenticationResult.getCredentials())
.isEqualTo(accessTokenResponse.getAccessToken().getTokenValue());
assertThat(authenticationResult.getAuthorities()).isEqualTo(Collections.emptyList());
assertThat(authenticationResult.getClientRegistration()).isEqualTo(this.clientRegistration);
assertThat(authenticationResult.getAuthorizationExchange()).isEqualTo(authorizationExchange);
assertThat(authenticationResult.getAccessToken()).isEqualTo(accessTokenResponse.getAccessToken());
assertThat(authenticationResult.getRefreshToken()).isEqualTo(accessTokenResponse.getRefreshToken());
}
// gh-5368
@Test
public void authenticateWhenAuthorizationSuccessResponseThenAdditionalParametersIncluded() {
Map<String, Object> additionalParameters = new HashMap<>();
additionalParameters.put("param1", "value1");
additionalParameters.put("param2", "value2");
OAuth2AccessTokenResponse accessTokenResponse = TestOAuth2AccessTokenResponses.accessTokenResponse()
.additionalParameters(additionalParameters)
.build();
given(this.accessTokenResponseClient.getTokenResponse(any())).willReturn(accessTokenResponse);
OAuth2AuthorizationExchange authorizationExchange = new OAuth2AuthorizationExchange(this.authorizationRequest,
TestOAuth2AuthorizationResponses.success().build());
OAuth2AuthorizationCodeAuthenticationToken authentication = (OAuth2AuthorizationCodeAuthenticationToken) this.authenticationProvider
.authenticate(
new OAuth2AuthorizationCodeAuthenticationToken(this.clientRegistration, authorizationExchange));
assertThat(authentication.getAdditionalParameters())
.containsAllEntriesOf(accessTokenResponse.getAdditionalParameters());
}
}
| OAuth2AuthorizationCodeAuthenticationProviderTests |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/JSqlParserQueryEnhancerUnitTests.java | {
"start": 1379,
"end": 13824
} | class ____ extends QueryEnhancerTckTests {
@Override
QueryEnhancer createQueryEnhancer(DeclaredQuery query) {
return new JSqlParserQueryEnhancer(query);
}
@Test // GH-3546
void shouldApplySorting() {
QueryEnhancer enhancer = createQueryEnhancer(DeclaredQuery.jpqlQuery("SELECT e FROM Employee e"));
String sql = enhancer.rewrite(new DefaultQueryRewriteInformation(Sort.by("foo", "bar"),
ReturnedType.of(Object.class, Object.class, new SpelAwareProxyProjectionFactory())));
assertThat(sql).isEqualTo("SELECT e FROM Employee e ORDER BY e.foo ASC, e.bar ASC");
}
@Test // GH-3886
void shouldApplySortingWithNullsPrecedence() {
QueryEnhancer enhancer = createQueryEnhancer(DeclaredQuery.jpqlQuery("SELECT e FROM Employee e"));
String sql = enhancer.rewrite(new DefaultQueryRewriteInformation(
Sort.by(Sort.Order.asc("foo").with(Sort.NullHandling.NULLS_LAST),
Sort.Order.desc("bar").with(Sort.NullHandling.NULLS_FIRST)),
ReturnedType.of(Object.class, Object.class, new SpelAwareProxyProjectionFactory())));
assertThat(sql).isEqualTo("SELECT e FROM Employee e ORDER BY e.foo ASC NULLS LAST, e.bar DESC NULLS FIRST");
}
@Test // GH-3707
void countQueriesShouldConsiderPrimaryTableAlias() {
QueryEnhancer enhancer = createQueryEnhancer(DeclaredQuery.nativeQuery("""
SELECT DISTINCT a.*, b.b1
FROM TableA a
JOIN TableB b ON a.b = b.b
LEFT JOIN TableC c ON b.c = c.c
ORDER BY b.b1, a.a1, a.a2
"""));
String sql = enhancer.createCountQueryFor(null);
assertThat(sql).startsWith("SELECT count(DISTINCT a.*) FROM TableA a");
}
@Override
@ParameterizedTest // GH-2773
@MethodSource("jpqlCountQueries")
void shouldDeriveJpqlCountQuery(String query, String expected) {
assumeThat(query).as("JSQLParser does not support JPQL").isNull();
}
@Test
// GH-2578
void setOperationListWorks() {
String setQuery = "select SOME_COLUMN from SOME_TABLE where REPORTING_DATE = :REPORTING_DATE \n" //
+ "except \n" //
+ "select SOME_COLUMN from SOME_OTHER_TABLE where REPORTING_DATE = :REPORTING_DATE";
DefaultEntityQuery query = new TestEntityQuery(setQuery, true);
QueryEnhancer queryEnhancer = QueryEnhancer.create(query);
assertThat(query.getAlias()).isNullOrEmpty();
assertThat(query.getProjection()).isEqualToIgnoringCase("SOME_COLUMN");
assertThat(query.hasConstructorExpression()).isFalse();
assertThat(queryEnhancer.createCountQueryFor(null)).isEqualToIgnoringCase(setQuery);
assertThat(queryEnhancer.rewrite(getRewriteInformation(Sort.by("SOME_COLUMN"))))
.endsWith("ORDER BY SOME_COLUMN ASC");
assertThat(queryEnhancer.detectAlias()).isNullOrEmpty();
assertThat(queryEnhancer.getProjection()).isEqualToIgnoringCase("SOME_COLUMN");
assertThat(queryEnhancer.hasConstructorExpression()).isFalse();
}
@Test // GH-2578
void complexSetOperationListWorks() {
String setQuery = "select SOME_COLUMN from SOME_TABLE where REPORTING_DATE = :REPORTING_DATE \n" //
+ "except \n" //
+ "select SOME_COLUMN from SOME_OTHER_TABLE where REPORTING_DATE = :REPORTING_DATE \n" //
+ "union select SOME_COLUMN from SOME_OTHER_OTHER_TABLE";
DefaultEntityQuery query = new TestEntityQuery(setQuery, true);
QueryEnhancer queryEnhancer = QueryEnhancerFactory.forQuery(query).create(query);
assertThat(query.getAlias()).isNullOrEmpty();
assertThat(query.getProjection()).isEqualToIgnoringCase("SOME_COLUMN");
assertThat(query.hasConstructorExpression()).isFalse();
assertThat(queryEnhancer.createCountQueryFor(null)).isEqualToIgnoringCase(setQuery);
assertThat(queryEnhancer.rewrite(getRewriteInformation(Sort.by("SOME_COLUMN").ascending())))
.endsWith("ORDER BY SOME_COLUMN ASC");
assertThat(queryEnhancer.detectAlias()).isNullOrEmpty();
assertThat(queryEnhancer.getProjection()).isEqualToIgnoringCase("SOME_COLUMN");
assertThat(queryEnhancer.hasConstructorExpression()).isFalse();
}
@Test // GH-2578
void deeplyNestedcomplexSetOperationListWorks() {
String setQuery = "SELECT CustomerID FROM (\n" //
+ "\t\t\tselect * from Customers\n" //
+ "\t\t\texcept\n"//
+ "\t\t\tselect * from Customers where country = 'Austria'\n"//
+ "\t)\n" //
+ "\texcept\n"//
+ "\tselect CustomerID from customers where country = 'Germany'\n"//
+ "\t;";
DefaultEntityQuery query = new TestEntityQuery(setQuery, true);
QueryEnhancer queryEnhancer = QueryEnhancerFactory.forQuery(query).create(query);
assertThat(query.getAlias()).isNullOrEmpty();
assertThat(query.getProjection()).isEqualToIgnoringCase("CustomerID");
assertThat(query.hasConstructorExpression()).isFalse();
assertThat(queryEnhancer.createCountQueryFor(null)).isEqualToIgnoringCase(setQuery);
assertThat(queryEnhancer.rewrite(getRewriteInformation(Sort.by("CustomerID").descending())))
.endsWith("ORDER BY CustomerID DESC");
assertThat(queryEnhancer.detectAlias()).isNullOrEmpty();
assertThat(queryEnhancer.getProjection()).isEqualToIgnoringCase("CustomerID");
assertThat(queryEnhancer.hasConstructorExpression()).isFalse();
}
@Test // GH-2578
void valuesStatementsWorks() {
String setQuery = "VALUES (1, 2, 'test')";
DefaultEntityQuery query = new TestEntityQuery(setQuery, true);
QueryEnhancer queryEnhancer = QueryEnhancerFactory.forQuery(query).create(query);
assertThat(query.getAlias()).isNullOrEmpty();
assertThat(query.getProjection()).isNullOrEmpty();
assertThat(query.hasConstructorExpression()).isFalse();
assertThat(queryEnhancer.createCountQueryFor(null)).isEqualToIgnoringCase(setQuery);
assertThat(queryEnhancer.rewrite(getRewriteInformation(Sort.by("CustomerID").descending()))).isEqualTo(setQuery);
assertThat(queryEnhancer.detectAlias()).isNullOrEmpty();
assertThat(queryEnhancer.getProjection()).isNullOrEmpty();
assertThat(queryEnhancer.hasConstructorExpression()).isFalse();
}
@Test // GH-2578
void withStatementsWorks() {
String setQuery = "with sample_data(day, value) as (values ((0, 13), (1, 12), (2, 15), (3, 4), (4, 8), (5, 16))) \n"
+ "select day, value from sample_data as a";
DefaultEntityQuery query = new TestEntityQuery(setQuery, true);
QueryEnhancer queryEnhancer = QueryEnhancerFactory.forQuery(query).create(query);
assertThat(query.getAlias()).isEqualToIgnoringCase("a");
assertThat(query.getProjection()).isEqualToIgnoringCase("day, value");
assertThat(query.hasConstructorExpression()).isFalse();
assertThat(queryEnhancer.createCountQueryFor(null).toLowerCase()).isEqualToIgnoringWhitespace(
"with sample_data (day, value) as (values ((0, 13), (1, 12), (2, 15), (3, 4), (4, 8), (5, 16))) "
+ "select count(1) from sample_data as a");
assertThat(queryEnhancer.rewrite(getRewriteInformation(Sort.by("day").descending())))
.endsWith("ORDER BY a.day DESC");
assertThat(queryEnhancer.detectAlias()).isEqualToIgnoringCase("a");
assertThat(queryEnhancer.getProjection()).isEqualToIgnoringCase("day, value");
assertThat(queryEnhancer.hasConstructorExpression()).isFalse();
}
@Test // GH-2578
void multipleWithStatementsWorks() {
String setQuery = "with sample_data(day, value) as (values ((0, 13), (1, 12), (2, 15), (3, 4), (4, 8), (5, 16))), test2 as (values (1,2,3)) \n"
+ "select day, value from sample_data as a";
DefaultEntityQuery query = new TestEntityQuery(setQuery, true);
QueryEnhancer queryEnhancer = QueryEnhancerFactory.forQuery(query).create(query);
assertThat(query.getAlias()).isEqualToIgnoringCase("a");
assertThat(query.getProjection()).isEqualToIgnoringCase("day, value");
assertThat(query.hasConstructorExpression()).isFalse();
assertThat(queryEnhancer.createCountQueryFor(null).toLowerCase()).isEqualToIgnoringWhitespace(
"with sample_data (day, value) as (values ((0, 13), (1, 12), (2, 15), (3, 4), (4, 8), (5, 16))), test2 as (values (1, 2, 3)) "
+ "select count(1) from sample_data as a");
assertThat(queryEnhancer.rewrite(getRewriteInformation(Sort.by("day").descending())))
.endsWith("ORDER BY a.day DESC");
assertThat(queryEnhancer.detectAlias()).isEqualToIgnoringCase("a");
assertThat(queryEnhancer.getProjection()).isEqualToIgnoringCase("day, value");
assertThat(queryEnhancer.hasConstructorExpression()).isFalse();
}
@Test // GH-3038
void truncateStatementShouldWork() {
DefaultEntityQuery query = new TestEntityQuery("TRUNCATE TABLE foo", true);
QueryEnhancer queryEnhancer = QueryEnhancerFactory.forQuery(query).create(query);
assertThat(query.getAlias()).isNull();
assertThat(query.getProjection()).isEmpty();
assertThat(query.hasConstructorExpression()).isFalse();
assertThatIllegalStateException()
.isThrownBy(() -> queryEnhancer.rewrite(getRewriteInformation(Sort.by("day").descending())))
.isInstanceOf(IllegalStateException.class)
.withMessageContaining("Cannot apply sorting to OTHER statement");
assertThat(queryEnhancer.detectAlias()).isNull();
assertThat(queryEnhancer.getProjection()).isEmpty();
assertThat(queryEnhancer.hasConstructorExpression()).isFalse();
}
@Test // GH-3869
void shouldWorkWithParenthesesSelect() {
DefaultEntityQuery query = new TestEntityQuery("(SELECT is_contained_in(:innerId, :outerId))", true);
QueryEnhancer queryEnhancer = QueryEnhancerFactory.forQuery(query).create(query);
assertThat(query.getQueryString()).isEqualTo("(SELECT is_contained_in(:innerId, :outerId))");
assertThat(query.getAlias()).isNull();
assertThat(queryEnhancer.getProjection()).isEqualTo("is_contained_in(:innerId, :outerId)");
}
@ParameterizedTest // GH-2641
@MethodSource("mergeStatementWorksSource")
void mergeStatementWorksWithJSqlParser(String queryString, String alias) {
DefaultEntityQuery query = new TestEntityQuery(queryString, true);
QueryEnhancer queryEnhancer = QueryEnhancerFactory.forQuery(query).create(query);
assertThat(queryEnhancer.detectAlias()).isEqualTo(alias);
assertThat(QueryUtils.detectAlias(queryString)).isNull();
assertThat(queryEnhancer.detectAlias()).isEqualTo(alias);
assertThat(queryEnhancer.getProjection()).isEmpty();
assertThat(queryEnhancer.hasConstructorExpression()).isFalse();
}
static Stream<Arguments> mergeStatementWorksSource() {
return Stream.of( //
Arguments.of(
"merge into a using (select id, value from b) query on (a.id = query.id) when matched then update set a.value = value",
"query"),
Arguments.of(
"merge into a using (select id2, value from b) on (id = id2) when matched then update set a.value = value",
null));
}
@Test // GH-2856
void nativeInsertQueryThrowsExceptionForCountQuery() {
DeclaredQuery query = DeclaredQuery.nativeQuery("INSERT INTO users(name) VALUES('John')");
QueryEnhancer enhancer = new JSqlParserQueryEnhancer(query);
assertThatIllegalStateException().isThrownBy(() -> enhancer.createCountQueryFor(null))
.withMessageContaining("Cannot derive count query for INSERT statement").withMessageContaining("SELECT");
}
@Test // GH-2856
void nativeUpdateQueryThrowsExceptionForSorting() {
DeclaredQuery query = DeclaredQuery.nativeQuery("UPDATE users SET name = 'test'");
QueryEnhancer enhancer = new JSqlParserQueryEnhancer(query);
// When/Then: Should throw IllegalStateException for sorting
Sort sort = Sort.by("id");
QueryEnhancer.QueryRewriteInformation rewriteInfo = new DefaultQueryRewriteInformation(
sort, ReturnedType.of(Object.class, Object.class, new SpelAwareProxyProjectionFactory()));
assertThatIllegalStateException().isThrownBy(() -> enhancer.rewrite(rewriteInfo))
.withMessageContaining("Cannot apply sorting to UPDATE statement").withMessageContaining("SELECT");
}
@Test // GH-2856
void nativeAllowsUnsortedForNonSelectQueries() {
DeclaredQuery query = DeclaredQuery.nativeQuery("UPDATE users SET name = 'test'");
QueryEnhancer enhancer = new JSqlParserQueryEnhancer(query);
QueryEnhancer.QueryRewriteInformation rewriteInfo = new DefaultQueryRewriteInformation(
Sort.unsorted(), ReturnedType.of(Object.class, Object.class, new SpelAwareProxyProjectionFactory()));
String result = enhancer.rewrite(rewriteInfo);
assertThat(result).containsIgnoringCase("UPDATE users");
}
private static DefaultQueryRewriteInformation getRewriteInformation(Sort sort) {
return new DefaultQueryRewriteInformation(sort,
ReturnedType.of(Object.class, Object.class, new SpelAwareProxyProjectionFactory()));
}
}
| JSqlParserQueryEnhancerUnitTests |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/result/view/LocaleContextResolverIntegrationTests.java | {
"start": 4106,
"end": 4200
} | class ____ {
@GetMapping("/")
public String foo() {
return "foo";
}
}
}
| TestController |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ExtendsAutoValueTest.java | {
"start": 8526,
"end": 8601
} | class ____ {}
@Generated("generator")
public | AutoClass |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/support/FactoryBeanRegistrySupport.java | {
"start": 1513,
"end": 1618
} | class ____ {@link AbstractBeanFactory}.
*
* @author Juergen Hoeller
* @since 2.5.1
*/
public abstract | for |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/basic/ReloadAssociatedEntitiesTest.java | {
"start": 7414,
"end": 7927
} | class ____ {
@Id
@GeneratedValue
private Long id;
private String name;
@OneToMany(mappedBy = "three")
private Set<ConcreteTwo> twos = new HashSet<>();
public ConcreteThree() {
}
public ConcreteThree(String name, Set<ConcreteTwo> twos) {
this.name = name;
this.twos = twos;
}
public Long getId() {
return id;
}
public Set<ConcreteTwo> getTwos() {
return twos;
}
public String getName() {
return name;
}
}
@Entity(name = "SimpleOne")
public static | ConcreteThree |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/main/java/org/springframework/boot/web/server/reactive/context/AnnotationConfigReactiveWebServerApplicationContext.java | {
"start": 2775,
"end": 7046
} | class ____ extends ReactiveWebServerApplicationContext
implements AnnotationConfigRegistry {
private final AnnotatedBeanDefinitionReader reader;
private final ClassPathBeanDefinitionScanner scanner;
private final Set<Class<?>> annotatedClasses = new LinkedHashSet<>();
private String @Nullable [] basePackages;
/**
* Create a new {@link AnnotationConfigReactiveWebServerApplicationContext} that needs
* to be populated through {@link #register} calls and then manually
* {@linkplain #refresh refreshed}.
*/
public AnnotationConfigReactiveWebServerApplicationContext() {
this.reader = new AnnotatedBeanDefinitionReader(this);
this.scanner = new ClassPathBeanDefinitionScanner(this);
}
/**
* Create a new {@link AnnotationConfigReactiveWebServerApplicationContext} with the
* given {@code DefaultListableBeanFactory}. The context needs to be populated through
* {@link #register} calls and then manually {@linkplain #refresh refreshed}.
* @param beanFactory the DefaultListableBeanFactory instance to use for this context
*/
public AnnotationConfigReactiveWebServerApplicationContext(DefaultListableBeanFactory beanFactory) {
super(beanFactory);
this.reader = new AnnotatedBeanDefinitionReader(this);
this.scanner = new ClassPathBeanDefinitionScanner(this);
}
/**
* Create a new {@link AnnotationConfigReactiveWebServerApplicationContext}, deriving
* bean definitions from the given annotated classes and automatically refreshing the
* context.
* @param annotatedClasses one or more annotated classes, e.g. {@code @Configuration}
* classes
*/
public AnnotationConfigReactiveWebServerApplicationContext(Class<?>... annotatedClasses) {
this();
register(annotatedClasses);
refresh();
}
/**
* Create a new {@link AnnotationConfigReactiveWebServerApplicationContext}, scanning
* for bean definitions in the given packages and automatically refreshing the
* context.
* @param basePackages the packages to check for annotated classes
*/
public AnnotationConfigReactiveWebServerApplicationContext(String... basePackages) {
this();
scan(basePackages);
refresh();
}
/**
* {@inheritDoc}
* <p>
* Delegates given environment to underlying {@link AnnotatedBeanDefinitionReader} and
* {@link ClassPathBeanDefinitionScanner} members.
*/
@Override
public void setEnvironment(ConfigurableEnvironment environment) {
super.setEnvironment(environment);
this.reader.setEnvironment(environment);
this.scanner.setEnvironment(environment);
}
/**
* Provide a custom {@link BeanNameGenerator} for use with
* {@link AnnotatedBeanDefinitionReader} and/or
* {@link ClassPathBeanDefinitionScanner}, if any.
* <p>
* Default is
* {@link org.springframework.context.annotation.AnnotationBeanNameGenerator}.
* <p>
* Any call to this method must occur prior to calls to {@link #register(Class...)}
* and/or {@link #scan(String...)}.
* @param beanNameGenerator the bean name generator
* @see AnnotatedBeanDefinitionReader#setBeanNameGenerator
* @see ClassPathBeanDefinitionScanner#setBeanNameGenerator
*/
public void setBeanNameGenerator(BeanNameGenerator beanNameGenerator) {
this.reader.setBeanNameGenerator(beanNameGenerator);
this.scanner.setBeanNameGenerator(beanNameGenerator);
getBeanFactory().registerSingleton(AnnotationConfigUtils.CONFIGURATION_BEAN_NAME_GENERATOR, beanNameGenerator);
}
/**
* Set the {@link ScopeMetadataResolver} to use for detected bean classes.
* <p>
* The default is an {@link AnnotationScopeMetadataResolver}.
* <p>
* Any call to this method must occur prior to calls to {@link #register(Class...)}
* and/or {@link #scan(String...)}.
* @param scopeMetadataResolver the scope metadata resolver
*/
public void setScopeMetadataResolver(ScopeMetadataResolver scopeMetadataResolver) {
this.reader.setScopeMetadataResolver(scopeMetadataResolver);
this.scanner.setScopeMetadataResolver(scopeMetadataResolver);
}
/**
* Register one or more annotated classes to be processed. Note that
* {@link #refresh()} must be called in order for the context to fully process the new
* class.
* <p>
* Calls to {@code #register} are idempotent; adding the same annotated | AnnotationConfigReactiveWebServerApplicationContext |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-integration/src/main/java/smoketest/integration/HelloWorldService.java | {
"start": 722,
"end": 1009
} | class ____ {
private final ServiceProperties configuration;
public HelloWorldService(ServiceProperties configuration) {
this.configuration = configuration;
}
public String getHelloMessage(String name) {
return this.configuration.getGreeting() + " " + name;
}
}
| HelloWorldService |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/support/BoundedAsyncPool.java | {
"start": 16061,
"end": 16124
} | enum ____ {
ACTIVE, TERMINATING, TERMINATED;
}
}
| State |
java | quarkusio__quarkus | integration-tests/oidc-wiremock/src/main/java/io/quarkus/it/keycloak/StartupResource.java | {
"start": 221,
"end": 566
} | class ____ {
private final StartupService startupService;
public StartupResource(StartupService startupService) {
this.startupService = startupService;
}
@GET
public Map<String, Map<String, Set<String>>> tenantToIdentityWithRole() {
return startupService.getTenantToIdentityWithRole();
}
}
| StartupResource |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/jmx/access/MBeanClientInterceptor.java | {
"start": 3772,
"end": 7987
} | class ____
implements MethodInterceptor, BeanClassLoaderAware, InitializingBean, DisposableBean {
/** Logger available to subclasses. */
protected final Log logger = LogFactory.getLog(getClass());
private @Nullable MBeanServerConnection server;
private @Nullable JMXServiceURL serviceUrl;
private @Nullable Map<String, ?> environment;
private @Nullable String agentId;
private boolean connectOnStartup = true;
private boolean refreshOnConnectFailure = false;
private @Nullable ObjectName objectName;
private boolean useStrictCasing = true;
private @Nullable Class<?> managementInterface;
private @Nullable ClassLoader beanClassLoader = ClassUtils.getDefaultClassLoader();
private final ConnectorDelegate connector = new ConnectorDelegate();
private @Nullable MBeanServerConnection serverToUse;
private @Nullable MBeanServerInvocationHandler invocationHandler;
private Map<String, MBeanAttributeInfo> allowedAttributes = Collections.emptyMap();
private Map<MethodCacheKey, MBeanOperationInfo> allowedOperations = Collections.emptyMap();
private final Map<Method, String[]> signatureCache = new HashMap<>();
private final Object preparationMonitor = new Object();
/**
* Set the {@code MBeanServerConnection} used to connect to the
* MBean which all invocations are routed to.
*/
public void setServer(MBeanServerConnection server) {
this.server = server;
}
/**
* Set the service URL of the remote {@code MBeanServer}.
*/
public void setServiceUrl(String url) throws MalformedURLException {
this.serviceUrl = new JMXServiceURL(url);
}
/**
* Specify the environment for the JMX connector.
* @see javax.management.remote.JMXConnectorFactory#connect(javax.management.remote.JMXServiceURL, java.util.Map)
*/
public void setEnvironment(@Nullable Map<String, ?> environment) {
this.environment = environment;
}
/**
* Allow {@code Map} access to the environment to be set for the connector,
* with the option to add or override specific entries.
* <p>Useful for specifying entries directly, for example via
* {@code environment[myKey]}. This is particularly useful for
* adding or overriding entries in child bean definitions.
*/
public @Nullable Map<String, ?> getEnvironment() {
return this.environment;
}
/**
* Set the agent id of the {@code MBeanServer} to locate.
* <p>Default is none. If specified, this will result in an
* attempt being made to locate the attendant MBeanServer, unless
* the {@link #setServiceUrl "serviceUrl"} property has been set.
* @see javax.management.MBeanServerFactory#findMBeanServer(String)
* <p>Specifying the empty String indicates the platform MBeanServer.
*/
public void setAgentId(String agentId) {
this.agentId = agentId;
}
/**
* Set whether the proxy should connect to the {@code MBeanServer}
* at creation time ({@code true}) or the first time it is invoked
* ({@code false}). Default is {@code true}.
*/
public void setConnectOnStartup(boolean connectOnStartup) {
this.connectOnStartup = connectOnStartup;
}
/**
* Set whether to refresh the MBeanServer connection on connect failure.
* Default is {@code false}.
* <p>Can be turned on to allow for hot restart of the JMX server,
* automatically reconnecting and retrying in case of an IOException.
*/
public void setRefreshOnConnectFailure(boolean refreshOnConnectFailure) {
this.refreshOnConnectFailure = refreshOnConnectFailure;
}
/**
* Set the {@code ObjectName} of the MBean which calls are routed to,
* as {@code ObjectName} instance or as {@code String}.
*/
public void setObjectName(Object objectName) throws MalformedObjectNameException {
this.objectName = ObjectNameManager.getInstance(objectName);
}
/**
* Set whether to use strict casing for attributes. Enabled by default.
* <p>When using strict casing, a JavaBean property with a getter such as
* {@code getFoo()} translates to an attribute called {@code Foo}.
* With strict casing disabled, {@code getFoo()} would translate to just
* {@code foo}.
*/
public void setUseStrictCasing(boolean useStrictCasing) {
this.useStrictCasing = useStrictCasing;
}
/**
* Set the management | MBeanClientInterceptor |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/language/BeanTest.java | {
"start": 1314,
"end": 4695
} | class ____ extends LanguageTestSupport {
@Test
public void testSimpleExpressions() {
assertExpression("foo.echo('e::o')", "e::o");
assertExpression("foo.echo('e.o')", "e.o");
assertExpression("my.company.MyClass::echo('a')", "a");
assertExpression("my.company.MyClass::echo('a.b')", "a.b");
assertExpression("my.company.MyClass::echo('a::b')", "a::b");
assertExpression("foo.cheese", "abc");
assertExpression("foo?method=cheese", "abc");
assertExpression("my.company.MyClass::cheese", "abc");
assertExpression("foo?method=echo('e::o')", "e::o");
}
@Test
public void testPredicates() {
assertPredicate("foo.isFooHeaderAbc");
assertPredicate("foo?method=isFooHeaderAbc");
assertPredicate("my.company.MyClass::isFooHeaderAbc");
}
@Test
public void testDoubleColon() {
assertPredicate("foo::isFooHeaderAbc");
NoSuchBeanException e = assertThrows(NoSuchBeanException.class,
() -> assertPredicateFails("foo:isFooHeaderAbc"),
"Should throw exception");
assertEquals("foo:isFooHeaderAbc", e.getName());
}
@Test
public void testBeanTypeExpression() {
Expression exp = new BeanExpression(MyUser.class, null);
exp.init(context);
Exchange exchange = createExchangeWithBody("Claus");
Object result = exp.evaluate(exchange, Object.class);
assertEquals("Hello Claus", result);
}
@Test
public void testBeanTypeAndMethodExpression() {
Expression exp = new BeanExpression(MyUser.class, "hello");
exp.init(context);
Exchange exchange = createExchangeWithBody("Claus");
Object result = exp.evaluate(exchange, Object.class);
assertEquals("Hello Claus", result);
}
@Test
public void testBeanInstanceAndMethodExpression() {
MyUser user = new MyUser();
Expression exp = new BeanExpression(user, "hello");
exp.init(context);
Exchange exchange = createExchangeWithBody("Claus");
Object result = exp.evaluate(exchange, Object.class);
assertEquals("Hello Claus", result);
}
@Test
public void testNoMethod() {
MyUser user = new MyUser();
Exception e = assertThrows(Exception.class, () -> {
Expression exp = new BeanExpression(user, "unknown");
exp.init(context);
}, "Should throw exception");
MethodNotFoundException mnfe = assertIsInstanceOf(MethodNotFoundException.class, e);
assertSame(user, mnfe.getBean());
assertEquals("unknown", mnfe.getMethodName());
}
@Test
public void testNoMethodBeanLookup() {
MethodNotFoundException e = assertThrows(MethodNotFoundException.class, () -> {
Expression exp = new BeanExpression("foo", "cake");
exp.init(context);
}, "Should throw exception");
assertEquals("cake", e.getMethodName());
}
@Override
protected String getLanguageName() {
return "bean";
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry answer = super.createCamelRegistry();
answer.bind("foo", new MyBean());
answer.bind("my.company.MyClass", new MyBean());
return answer;
}
public static | BeanTest |
java | grpc__grpc-java | servlet/src/undertowTest/java/io/grpc/servlet/UndertowTransportTest.java | {
"start": 2254,
"end": 10033
} | class ____ extends AbstractTransportTest {
private static final String HOST = "localhost";
private static final String MYAPP = "/service";
private final FakeClock fakeClock = new FakeClock();
private Undertow undertowServer;
private DeploymentManager manager;
private int port;
@After
@Override
public void tearDown() throws InterruptedException {
super.tearDown();
if (undertowServer != null) {
undertowServer.stop();
}
if (manager != null) {
try {
manager.stop();
} catch (ServletException e) {
throw new AssertionError("failed to stop container", e);
}
}
}
@Override
protected InternalServer newServer(List<ServerStreamTracer.Factory>
streamTracerFactories) {
return new InternalServer() {
final InternalServer delegate =
new ServletServerBuilder().buildTransportServers(streamTracerFactories);
@Override
public void start(ServerListener listener) throws IOException {
delegate.start(listener);
ScheduledExecutorService scheduler = fakeClock.getScheduledExecutorService();
ServerTransportListener serverTransportListener =
listener.transportCreated(new ServerTransportImpl(scheduler));
ServletAdapter adapter =
new ServletAdapter(serverTransportListener, streamTracerFactories,
ServletAdapter.DEFAULT_METHOD_NAME_RESOLVER,
Integer.MAX_VALUE);
GrpcServlet grpcServlet = new GrpcServlet(adapter);
InstanceFactory<? extends Servlet> instanceFactory =
() -> new ImmediateInstanceHandle<>(grpcServlet);
DeploymentInfo servletBuilder =
deployment()
.setClassLoader(UndertowInteropTest.class.getClassLoader())
.setContextPath(MYAPP)
.setDeploymentName("UndertowTransportTest.war")
.addServlets(
servlet("TransportTestServlet", GrpcServlet.class, instanceFactory)
.addMapping("/*")
.setAsyncSupported(true));
manager = defaultContainer().addDeployment(servletBuilder);
manager.deploy();
HttpHandler servletHandler;
try {
servletHandler = manager.start();
} catch (ServletException e) {
throw new RuntimeException(e);
}
PathHandler path =
Handlers.path(Handlers.redirect(MYAPP))
.addPrefixPath("/", servletHandler); // for unimplementedService test
undertowServer =
Undertow.builder()
.setServerOption(UndertowOptions.ENABLE_HTTP2, true)
.setServerOption(UndertowOptions.SHUTDOWN_TIMEOUT, 5000 /* 5 sec */)
.addHttpListener(0, HOST)
.setHandler(path)
.build();
undertowServer.start();
port = ((InetSocketAddress) undertowServer.getListenerInfo().get(0).getAddress()).getPort();
}
@Override
public void shutdown() {
delegate.shutdown();
}
@Override
public SocketAddress getListenSocketAddress() {
return delegate.getListenSocketAddress();
}
@Override
public InternalInstrumented<SocketStats> getListenSocketStats() {
return delegate.getListenSocketStats();
}
@Override
public List<? extends SocketAddress> getListenSocketAddresses() {
return delegate.getListenSocketAddresses();
}
@Nullable
@Override
public List<InternalInstrumented<SocketStats>> getListenSocketStatsList() {
return delegate.getListenSocketStatsList();
}
};
}
@Override
protected InternalServer newServer(int port,
List<ServerStreamTracer.Factory> streamTracerFactories) {
return newServer(streamTracerFactories);
}
@Override
protected ManagedClientTransport newClientTransport(InternalServer server) {
NettyChannelBuilder nettyChannelBuilder = NettyChannelBuilder
// Although specified here, address is ignored because we never call build.
.forAddress("localhost", 0)
.flowControlWindow(65 * 1024)
.negotiationType(NegotiationType.PLAINTEXT);
InternalNettyChannelBuilder
.setTransportTracerFactory(nettyChannelBuilder, fakeClockTransportTracer);
ClientTransportFactory clientFactory =
InternalNettyChannelBuilder.buildTransportFactory(nettyChannelBuilder);
return clientFactory.newClientTransport(
new InetSocketAddress("localhost", port),
new ClientTransportFactory.ClientTransportOptions()
.setAuthority(testAuthority(server))
.setEagAttributes(eagAttrs()),
transportLogger());
}
@Override
protected String testAuthority(InternalServer server) {
return "localhost:" + port;
}
@Override
protected void advanceClock(long offset, TimeUnit unit) {
fakeClock.forwardNanos(unit.toNanos(offset));
}
@Override
protected long fakeCurrentTimeNanos() {
return fakeClock.getTicker().read();
}
@Override
@Ignore("Skip the test, server lifecycle is managed by the container")
@Test
public void serverAlreadyListening() {}
@Override
@Ignore("Skip the test, server lifecycle is managed by the container")
@Test
public void openStreamPreventsTermination() {}
@Override
@Ignore("Skip the test, server lifecycle is managed by the container")
@Test
public void shutdownNowKillsServerStream() {}
@Override
@Ignore("Skip the test, server lifecycle is managed by the container")
@Test
public void serverNotListening() {}
@Override
@Ignore("Skip the test, can not set HTTP/2 SETTINGS_MAX_HEADER_LIST_SIZE")
@Test
public void serverChecksInboundMetadataSize() {}
// FIXME
@Override
@Ignore("Undertow is broken on client GOAWAY")
@Test
public void newStream_duringShutdown() {}
// FIXME
@Override
@Ignore("Undertow is broken on client GOAWAY")
@Test
public void ping_duringShutdown() {}
// FIXME
@Override
@Ignore("Undertow is broken on client RST_STREAM")
@Test
public void frameAfterRstStreamShouldNotBreakClientChannel() {}
// FIXME
@Override
@Ignore("Undertow is broken on client RST_STREAM")
@Test
public void shutdownNowKillsClientStream() {}
// FIXME: https://github.com/grpc/grpc-java/issues/8925
@Override
@Ignore("flaky")
@Test
public void clientCancelFromWithinMessageRead() {}
// FIXME
@Override
@Ignore("Servlet flow control not implemented yet")
@Test
public void flowControlPushBack() {}
// FIXME
@Override
@Ignore("Servlet flow control not implemented yet")
@Test
public void flowControlDoesNotDeadlockLargeMessage() {
}
@Override
@Ignore("Server side sockets are managed by the servlet container")
@Test
public void socketStats() {}
@Override
@Ignore("serverTransportListener will not terminate")
@Test
public void clientStartAndStopOnceConnected() {}
@Override
@Ignore("clientStreamTracer1.getInboundTrailers() is not null; listeners.poll() doesn't apply")
@Test
public void serverCancel() {}
@Override
@Ignore("This doesn't apply: Ensure that for a closed ServerStream, interactions are noops")
@Test
public void interactionsAfterServerStreamCloseAreNoops() {}
@Override
@Ignore("listeners.poll() doesn't apply")
@Test
public void interactionsAfterClientStreamCancelAreNoops() {}
@Override
@Ignore("assertNull(serverStatus.getCause()) isn't true")
@Test
public void clientCancel() {}
@Override
@Ignore("regression since bumping grpc v1.46 to v1.53")
@Test
public void messageProducerOnlyProducesRequestedMessages() {}
}
| UndertowTransportTest |
java | google__dagger | hilt-compiler/main/java/dagger/hilt/processor/internal/Processors.java | {
"start": 5090,
"end": 5450
} | class ____ on an annotation. */
public static ImmutableList<XTypeElement> getAnnotationClassValues(
XAnnotation annotation, String key) {
ImmutableList<XTypeElement> values = getOptionalAnnotationClassValues(annotation, key);
ProcessorErrors.checkState(
values.size() >= 1,
annotation.getTypeElement(),
"@%s, '%s' | attribute |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/annotation/CustomAutowireConfigurer.java | {
"start": 1880,
"end": 2669
} | class ____ implements BeanFactoryPostProcessor, BeanClassLoaderAware, Ordered {
private int order = Ordered.LOWEST_PRECEDENCE; // default: same as non-Ordered
private @Nullable Set<?> customQualifierTypes;
private @Nullable ClassLoader beanClassLoader = ClassUtils.getDefaultClassLoader();
public void setOrder(int order) {
this.order = order;
}
@Override
public int getOrder() {
return this.order;
}
@Override
public void setBeanClassLoader(@Nullable ClassLoader beanClassLoader) {
this.beanClassLoader = beanClassLoader;
}
/**
* Register custom qualifier annotation types to be considered
* when autowiring beans. Each element of the provided set may
* be either a Class instance or a String representation of the
* fully-qualified | CustomAutowireConfigurer |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/ParallelFluxHide.java | {
"start": 1020,
"end": 2034
} | class ____<T> extends ParallelFlux<T> implements Scannable{
final ParallelFlux<T> source;
ParallelFluxHide(ParallelFlux<T> source) {
this.source = ParallelFlux.from(source);
}
@Override
public int getPrefetch() {
return source.getPrefetch();
}
@Override
public int parallelism() {
return source.parallelism();
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return source;
if (key == Attr.PREFETCH) return getPrefetch();
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
if (key == InternalProducerAttr.INSTANCE) return true;
return null;
}
@Override
public void subscribe(CoreSubscriber<? super T>[] subscribers) {
if (!validate(subscribers)) {
return;
}
int n = subscribers.length;
@SuppressWarnings("unchecked") CoreSubscriber<? super T>[] parents =
new CoreSubscriber[n];
for (int i = 0; i < n; i++) {
parents[i] = new FluxHide.HideSubscriber<>(subscribers[i]);
}
source.subscribe(parents);
}
}
| ParallelFluxHide |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_2900/Issue2982.java | {
"start": 219,
"end": 706
} | class ____ extends TestCase {
@Test
public void test_for_issue() {
String jsonStr = "[ { \"activity_type\" : 0, \"activity_id\" : \"***\", \"activity_tip\" : \"***\", \"position\" : \"1\" }, { \"activity_type\" : 0, \"activity_id\" : \"2669\", \"activity_tip\" : \"****\", \"position\" : \"1\" }]";
assertTrue(JSONArray.isValidArray(jsonStr));
assertTrue(JSON.isValidArray(jsonStr));
assertTrue(JSONObject.isValidArray(jsonStr));
}
}
| Issue2982 |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/subselect/SubselectAndSingleAttributeIdClassTest.java | {
"start": 5381,
"end": 5914
} | class ____ {
@Column(name = "ID", nullable = false, precision = 9)
private Integer id;
public MyGrandchildId() {
}
public MyGrandchildId(Integer id) {
this.id = id;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
MyGrandchildId that = (MyGrandchildId) o;
return Objects.equals( id, that.id );
}
@Override
public int hashCode() {
return Objects.hash( id );
}
}
}
| MyGrandchildId |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng7335MissingJarInParallelBuild.java | {
"start": 931,
"end": 1524
} | class ____ extends AbstractMavenIntegrationTestCase {
private static final String PROJECT_PATH = "/mng-7335-missing-jar-in-parallel-build";
@Test
public void testMissingJarInParallelBuild() throws IOException, VerificationException {
final File projectDir = extractResources(PROJECT_PATH);
final Verifier verifier = newVerifier(projectDir.getAbsolutePath());
verifier.addCliArgument("-T1C");
verifier.addCliArguments("clean", "package");
verifier.execute();
verifier.verifyErrorFreeLog();
}
}
| MavenITmng7335MissingJarInParallelBuild |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/Http2Exception.java | {
"start": 12838,
"end": 13524
} | class ____ extends Http2Exception implements Iterable<StreamException> {
private static final long serialVersionUID = 7091134858213711015L;
private final List<StreamException> exceptions;
public CompositeStreamException(Http2Error error, int initialCapacity) {
super(error, ShutdownHint.NO_SHUTDOWN);
exceptions = new ArrayList<StreamException>(initialCapacity);
}
public void add(StreamException e) {
exceptions.add(e);
}
@Override
public Iterator<StreamException> iterator() {
return exceptions.iterator();
}
}
private static final | CompositeStreamException |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/WildcardInputTypeStrategy.java | {
"start": 1583,
"end": 2704
} | class ____ implements InputTypeStrategy {
private static final ArgumentCount PASSING_ARGUMENT_COUNT = ConstantArgumentCount.any();
private final ArgumentCount argumentCount;
public WildcardInputTypeStrategy(ArgumentCount argumentCount) {
this.argumentCount = argumentCount;
}
public WildcardInputTypeStrategy() {
this(PASSING_ARGUMENT_COUNT);
}
@Override
public ArgumentCount getArgumentCount() {
return argumentCount;
}
@Override
public Optional<List<DataType>> inferInputTypes(
CallContext callContext, boolean throwOnFailure) {
return Optional.of(callContext.getArgumentDataTypes());
}
@Override
public List<Signature> getExpectedSignatures(FunctionDefinition definition) {
return Collections.singletonList(Signature.of(Argument.of("*")));
}
@Override
public boolean equals(Object o) {
return this == o || o instanceof WildcardInputTypeStrategy;
}
@Override
public int hashCode() {
return WildcardInputTypeStrategy.class.hashCode();
}
}
| WildcardInputTypeStrategy |
java | spring-projects__spring-boot | module/spring-boot-webmvc-test/src/test/java/org/springframework/boot/webmvc/test/autoconfigure/mockmvc/WebMvcTestPrintDefaultIntegrationTests.java | {
"start": 2518,
"end": 2745
} | class ____ {
@Autowired
private MockMvcTester mvc;
@Test
void test() {
assertThat(this.mvc.get().uri("/one")).hasStatusOk().hasBodyTextEqualTo("one");
}
}
@WebMvcTest
@AutoConfigureMockMvc
static | ShouldNotPrint |
java | spring-projects__spring-boot | module/spring-boot-couchbase/src/main/java/org/springframework/boot/couchbase/autoconfigure/CouchbaseProperties.java | {
"start": 2411,
"end": 3447
} | class ____ {
/**
* PEM-formatted certificates for certificate-based cluster authentication.
*/
private @Nullable String certificates;
/**
* PEM-formatted private key for certificate-based cluster authentication.
*/
private @Nullable String privateKey;
/**
* Private key password for certificate-based cluster authentication.
*/
private @Nullable String privateKeyPassword;
public @Nullable String getCertificates() {
return this.certificates;
}
public void setCertificates(@Nullable String certificates) {
this.certificates = certificates;
}
public @Nullable String getPrivateKey() {
return this.privateKey;
}
public void setPrivateKey(@Nullable String privateKey) {
this.privateKey = privateKey;
}
public @Nullable String getPrivateKeyPassword() {
return this.privateKeyPassword;
}
public void setPrivateKeyPassword(@Nullable String privateKeyPassword) {
this.privateKeyPassword = privateKeyPassword;
}
}
public static | Pem |
java | apache__camel | dsl/camel-endpointdsl/src/test/java/org/apache/camel/builder/endpoint/SedaEnrichSimpleExpressionTest.java | {
"start": 989,
"end": 1873
} | class ____ extends BaseEndpointDslTest {
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new EndpointRouteBuilder() {
@Override
public void configure() throws Exception {
from(direct("start")).enrich(seda("${exchangeProperty.whereTo}").advanced().offerTimeout(1000))
.to("mock:result");
from("seda:cheese")
.transform().constant("Hello World");
}
};
}
@Test
public void test() throws Exception {
MockEndpoint resultEndpoint = getMockEndpoint("mock:result");
resultEndpoint.expectedBodiesReceived("Hello World");
template.sendBodyAndProperty("direct:start", "Empty", "whereTo", "cheese");
MockEndpoint.assertIsSatisfied(context);
}
}
| SedaEnrichSimpleExpressionTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/monitor/process/ProcessService.java | {
"start": 1984,
"end": 2300
} | class ____ extends SingleObjectCache<ProcessStats> {
ProcessStatsCache(TimeValue interval, ProcessStats initValue) {
super(interval, initValue);
}
@Override
protected ProcessStats refresh() {
return ProcessProbe.processStats();
}
}
}
| ProcessStatsCache |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/opensaml5Main/java/org/springframework/security/saml2/provider/service/authentication/OpenSaml5AuthenticationProvider.java | {
"start": 30163,
"end": 34508
} | class ____ {
private final List<ConditionValidator> conditions = new ArrayList<>();
private final List<SubjectConfirmationValidator> subjects = new ArrayList<>();
private final Map<String, Object> validationParameters = new HashMap<>();
private Builder() {
this.conditions.add(new AudienceRestrictionConditionValidator());
this.conditions.add(new DelegationRestrictionConditionValidator());
this.conditions.add(new ValidConditionValidator(OneTimeUse.DEFAULT_ELEMENT_NAME));
this.conditions.add(new ProxyRestrictionConditionValidator());
this.subjects.add(new BearerSubjectConfirmationValidator());
this.validationParameters.put(SAML2AssertionValidationParameters.CLOCK_SKEW, Duration.ofMinutes(5));
}
/**
* Use this clock skew for validating assertion timestamps. The default is 5
* minutes.
* @param duration the duration to use
* @return the {@link Builder} for further configuration
*/
public Builder clockSkew(Duration duration) {
this.validationParameters.put(SAML2AssertionValidationParameters.CLOCK_SKEW, duration);
return this;
}
/**
* Mutate the map of {@link ValidationContext} static parameters. By default,
* these include:
* <ul>
* <li>{@link SAML2AssertionValidationParameters#SC_VALID_IN_RESPONSE_TO}</li>>
* <li>{@link SAML2AssertionValidationParameters#COND_VALID_AUDIENCES}</li>>
* <li>{@link SAML2AssertionValidationParameters#SC_VALID_RECIPIENTS}</li>>
* <li>{@link SAML2AssertionValidationParameters#VALID_ISSUERS}</li>>
* <li>{@link SAML2AssertionValidationParameters#SC_CHECK_ADDRESS}</li>>
* <li>{@link SAML2AssertionValidationParameters#CLOCK_SKEW}</li>>
* </ul>
*
* Note that several of these are required by various validation steps, for
* example {@code COND_VALID_AUDIENCES} is needed by
* {@link BearerSubjectConfirmationValidator}. If you do not want these, the
* best way to remove them is to remove the {@link #conditionValidators} or
* {@link #subjectValidators} themselves
* @param parameters the mutator to change the set of parameters
* @return
*/
public Builder validationContextParameters(Consumer<Map<String, Object>> parameters) {
parameters.accept(this.validationParameters);
return this;
}
/**
* Mutate the list of {@link ConditionValidator}s. By default, these include:
* <ul>
* <li>{@link AudienceRestrictionConditionValidator}</li>
* <li>{@link DelegationRestrictionConditionValidator}</li>
* <li>{@link ProxyRestrictionConditionValidator}</li>
* </ul>
* Note that it also adds a validator that skips the {@code saml2:OneTimeUse}
* element since this validator does not have caching facilities. However, you
* can construct your own instance of
* {@link org.opensaml.saml.saml2.assertion.impl.OneTimeUseConditionValidator}
* and supply it here.
* @param conditions the mutator for changing the list of conditions to use
* @return the {@link Builder} for further configuration
*/
public Builder conditionValidators(Consumer<List<ConditionValidator>> conditions) {
conditions.accept(this.conditions);
return this;
}
/**
* Mutate the list of {@link ConditionValidator}s.
* <p>
* By default it only has {@link BearerSubjectConfirmationValidator} for which
* address validation is skipped.
*
* To turn address validation on, use
* {@link #validationContextParameters(Consumer)} to set the
* {@link SAML2AssertionValidationParameters#SC_CHECK_ADDRESS} value.
* @param subjects the mutator for changing the list of conditions to use
* @return the {@link Builder} for further configuration
*/
public Builder subjectValidators(Consumer<List<SubjectConfirmationValidator>> subjects) {
subjects.accept(this.subjects);
return this;
}
/**
* Build the {@link AssertionValidator}
* @return the {@link AssertionValidator}
*/
public AssertionValidator build() {
AssertionValidator validator = new AssertionValidator(new ValidSignatureAssertionValidator(
this.conditions, this.subjects, List.of(), null, null, null));
validator.setValidationContextParameters((params) -> params.putAll(this.validationParameters));
return validator;
}
}
private static final | Builder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java | {
"start": 5683,
"end": 12612
} | class ____ {
private final ActionListener<Response> listener;
private final Request request;
private volatile ProjectStateObserver observer;
private ShardIterator shardIt;
AsyncSingleAction(Request request, ActionListener<Response> listener) {
this.request = request;
this.listener = listener;
}
public void start() {
final ProjectState state = getProjectState();
this.observer = new ProjectStateObserver(state, clusterService, request.timeout(), logger, threadPool.getThreadContext());
doStart(state);
}
protected void doStart(ProjectState projectState) {
try {
ClusterBlockException blockException = checkGlobalBlock(projectState);
if (blockException != null) {
if (blockException.retryable()) {
retry(blockException);
return;
} else {
throw blockException;
}
}
try {
request.concreteIndex(indexNameExpressionResolver.concreteWriteIndex(projectState.metadata(), request).getName());
} catch (IndexNotFoundException e) {
if (request.includeDataStreams() == false && e.getMetadataKeys().contains(EXCLUDED_DATA_STREAMS_KEY)) {
throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams");
} else {
throw e;
}
}
resolveRequest(projectState, request);
blockException = checkRequestBlock(projectState, request);
if (blockException != null) {
if (blockException.retryable()) {
retry(blockException);
return;
} else {
throw blockException;
}
}
shardIt = shards(projectState, request);
} catch (Exception e) {
listener.onFailure(e);
return;
}
// no shardIt, might be in the case between index gateway recovery and shardIt initialization
if (shardIt.size() == 0) {
retry(null);
return;
}
// this transport only make sense with an iterator that returns a single shard routing (like primary)
assert shardIt.size() == 1;
ShardRouting shard = shardIt.nextOrNull();
assert shard != null;
if (shard.active() == false) {
retry(null);
return;
}
request.shardId = shardIt.shardId();
DiscoveryNode node = projectState.cluster().nodes().get(shard.currentNodeId());
transportService.sendRequest(
node,
shardActionName,
request,
transportOptions(),
new ActionListenerResponseHandler<>(
listener,
TransportInstanceSingleOperationAction.this::newResponse,
TransportResponseHandler.TRANSPORT_WORKER
) {
@Override
public void handleException(TransportException exp) {
final Throwable cause = exp.unwrapCause();
// if we got disconnected from the node, or the node / shard is not in the right state (being closed)
if (cause instanceof ConnectTransportException || cause instanceof NodeClosedException || retryOnFailure(exp)) {
retry((Exception) cause);
} else {
listener.onFailure(exp);
}
}
}
);
}
void retry(@Nullable final Exception failure) {
if (observer.isTimedOut()) {
// we running as a last attempt after a timeout has happened. don't retry
Exception listenFailure = failure;
if (listenFailure == null) {
if (shardIt == null) {
listenFailure = new UnavailableShardsException(
request.concreteIndex(),
-1,
"Timeout waiting for [{}], request: {}",
request.timeout(),
actionName
);
} else {
listenFailure = new UnavailableShardsException(
shardIt.shardId(),
"[{}] shardIt, [{}] active : Timeout waiting for [{}], request: {}",
shardIt.size(),
shardIt.sizeActive(),
request.timeout(),
actionName
);
}
}
listener.onFailure(listenFailure);
return;
}
observer.waitForNextChange(new ProjectStateObserver.Listener() {
@Override
public void onProjectStateChange(ProjectState projectState) {
doStart(projectState);
}
@Override
public void onProjectMissing(ProjectId projectId, ClusterState clusterState) {
listener.onFailure(
new ResourceNotFoundException(
"project ["
+ projectId
+ "] does not exist in cluster state ["
+ clusterState.stateUUID()
+ "] version ["
+ clusterState.version()
+ "]"
)
);
}
@Override
public void onClusterServiceClose() {
listener.onFailure(new NodeClosedException(clusterService.localNode()));
}
@Override
public void onTimeout(TimeValue timeout) {
// just to be on the safe side, see if we can start it now?
observer.observeLastAppliedState(this);
}
}, request.timeout());
}
}
private void handleShardRequest(Request request, TransportChannel channel, Task task) {
executor(request.shardId).execute(
ActionRunnable.wrap(new ChannelActionListener<Response>(channel), l -> shardOperation(request, l))
);
}
}
| AsyncSingleAction |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/isolation/PluginsRecommenders.java | {
"start": 7640,
"end": 8229
} | class ____ extends ConverterPluginVersionRecommender {
@Override
protected String converterConfig() {
return ConnectorConfig.HEADER_CONVERTER_CLASS_CONFIG;
}
@Override
protected Function<String, List<Object>> recommendations() {
return converterClass -> plugins.headerConverters(converterClass).stream()
.map(PluginDesc::version).distinct().collect(Collectors.toList());
}
}
// Recommender for transformation and predicate plugins
public abstract | HeaderConverterPluginVersionRecommender |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java | {
"start": 5401,
"end": 9543
} | class ____ extends GroupingDocValuesSelector<BytesRef> {
private SortedDocValues values;
private int ord;
Keyword(MappedFieldType fieldType) {
super(fieldType.name());
}
@Override
public org.apache.lucene.search.grouping.GroupSelector.State advanceTo(int doc) throws IOException {
if (values.advanceExact(doc)) {
ord = values.ordValue();
return State.ACCEPT;
} else {
ord = -1;
return State.SKIP;
}
}
@Override
public BytesRef currentValue() {
if (ord == -1) {
return null;
} else {
try {
return values.lookupOrd(ord);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
@Override
public BytesRef copyValue() {
BytesRef value = currentValue();
if (value == null) {
return null;
} else {
return BytesRef.deepCopyOf(value);
}
}
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
LeafReader reader = readerContext.reader();
DocValuesType type = getDocValuesType(reader, field);
if (type == null || type == DocValuesType.NONE) {
values = DocValues.emptySorted();
return;
}
switch (type) {
case SORTED -> values = DocValues.getSorted(reader, field);
case SORTED_SET -> {
final SortedSetDocValues sorted = DocValues.getSortedSet(reader, field);
values = DocValues.unwrapSingleton(sorted);
if (values == null) {
values = new AbstractSortedDocValues() {
private int ord;
@Override
public boolean advanceExact(int target) throws IOException {
if (sorted.advanceExact(target)) {
if (sorted.docValueCount() > 1) {
throw new IllegalArgumentException(
"failed to extract doc:" + target + ", the grouping field must be single valued"
);
}
ord = (int) sorted.nextOrd();
return true;
} else {
return false;
}
}
@Override
public int docID() {
return sorted.docID();
}
@Override
public int ordValue() {
return ord;
}
@Override
public BytesRef lookupOrd(int ord) throws IOException {
return sorted.lookupOrd(ord);
}
@Override
public int getValueCount() {
return (int) sorted.getValueCount();
}
};
}
}
default -> throw new IllegalArgumentException("unexpected doc values type " + type + "` for field `" + field + "`");
}
}
@Override
public void setScorer(Scorable scorer) throws IOException {}
}
private static DocValuesType getDocValuesType(LeafReader in, String field) {
FieldInfo fi = in.getFieldInfos().fieldInfo(field);
if (fi != null) {
return fi.getDocValuesType();
}
return null;
}
}
| Keyword |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableWindow.java | {
"start": 8232,
"end": 14941
} | class ____<T>
extends AtomicInteger
implements FlowableSubscriber<T>, Subscription, Runnable {
private static final long serialVersionUID = 2428527070996323976L;
final Subscriber<? super Flowable<T>> downstream;
final SpscLinkedArrayQueue<UnicastProcessor<T>> queue;
final long size;
final long skip;
final ArrayDeque<UnicastProcessor<T>> windows;
final AtomicBoolean once;
final AtomicBoolean firstRequest;
final AtomicLong requested;
final AtomicInteger wip;
final int bufferSize;
long index;
long produced;
Subscription upstream;
volatile boolean done;
Throwable error;
volatile boolean cancelled;
WindowOverlapSubscriber(Subscriber<? super Flowable<T>> actual, long size, long skip, int bufferSize) {
super(1);
this.downstream = actual;
this.size = size;
this.skip = skip;
this.queue = new SpscLinkedArrayQueue<>(bufferSize);
this.windows = new ArrayDeque<>();
this.once = new AtomicBoolean();
this.firstRequest = new AtomicBoolean();
this.requested = new AtomicLong();
this.wip = new AtomicInteger();
this.bufferSize = bufferSize;
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
downstream.onSubscribe(this);
}
}
@Override
public void onNext(T t) {
long i = index;
UnicastProcessor<T> newWindow = null;
if (i == 0) {
if (!cancelled) {
getAndIncrement();
newWindow = UnicastProcessor.create(bufferSize, this);
windows.offer(newWindow);
}
}
i++;
for (Processor<T, T> w : windows) {
w.onNext(t);
}
if (newWindow != null) {
queue.offer(newWindow);
drain();
}
long p = produced + 1;
if (p == size) {
produced = p - skip;
Processor<T, T> w = windows.poll();
if (w != null) {
w.onComplete();
}
} else {
produced = p;
}
if (i == skip) {
index = 0;
} else {
index = i;
}
}
@Override
public void onError(Throwable t) {
for (Processor<T, T> w : windows) {
w.onError(t);
}
windows.clear();
error = t;
done = true;
drain();
}
@Override
public void onComplete() {
for (Processor<T, T> w : windows) {
w.onComplete();
}
windows.clear();
done = true;
drain();
}
void drain() {
if (wip.getAndIncrement() != 0) {
return;
}
final Subscriber<? super Flowable<T>> a = downstream;
final SpscLinkedArrayQueue<UnicastProcessor<T>> q = queue;
int missed = 1;
outer:
for (;;) {
if (cancelled) {
UnicastProcessor<T> up = null;
while ((up = q.poll()) != null) {
up.onComplete();
}
} else {
long r = requested.get();
long e = 0;
while (e != r) {
boolean d = done;
UnicastProcessor<T> t = q.poll();
boolean empty = t == null;
if (cancelled) {
continue outer;
}
if (checkTerminated(d, empty, a, q)) {
return;
}
if (empty) {
break;
}
FlowableWindowSubscribeIntercept<T> intercept = new FlowableWindowSubscribeIntercept<>(t);
a.onNext(intercept);
if (intercept.tryAbandon()) {
t.onComplete();
}
e++;
}
if (e == r) {
if (cancelled) {
continue;
}
if (checkTerminated(done, q.isEmpty(), a, q)) {
return;
}
}
if (e != 0L && r != Long.MAX_VALUE) {
requested.addAndGet(-e);
}
}
missed = wip.addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
boolean checkTerminated(boolean d, boolean empty, Subscriber<?> a, SpscLinkedArrayQueue<?> q) {
if (d) {
Throwable e = error;
if (e != null) {
q.clear();
a.onError(e);
return true;
} else
if (empty) {
a.onComplete();
return true;
}
}
return false;
}
@Override
public void request(long n) {
if (SubscriptionHelper.validate(n)) {
BackpressureHelper.add(requested, n);
if (!firstRequest.get() && firstRequest.compareAndSet(false, true)) {
long u = BackpressureHelper.multiplyCap(skip, n - 1);
long v = BackpressureHelper.addCap(size, u);
upstream.request(v);
} else {
long u = BackpressureHelper.multiplyCap(skip, n);
upstream.request(u);
}
drain();
}
}
@Override
public void cancel() {
cancelled = true;
if (once.compareAndSet(false, true)) {
run();
}
drain();
}
@Override
public void run() {
if (decrementAndGet() == 0) {
upstream.cancel();
}
}
}
}
| WindowOverlapSubscriber |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/ssl/FileCertificateProvider.java | {
"start": 2311,
"end": 11014
} | class ____ implements CertificateProvider {
private static final Logger LOG = LoggerFactory.getLogger(FileCertificateProvider.class);
private final String name;
private final Flux<KeyStore> flux;
private final WatchService watchService;
/**
* Create a provider that loads and optionally refreshes certificate material from disk.
*
* @param config file configuration
* @param scheduler scheduled executor for periodic refresh
* @param blockingExecutor executor used for blocking file watching
* @throws Exception if the initial load fails or watcher setup fails
*/
FileCertificateProvider(
@NonNull Config config,
@NonNull @jakarta.inject.Named(TaskExecutors.SCHEDULED) ExecutorService scheduler,
@NonNull @jakarta.inject.Named(TaskExecutors.BLOCKING) Executor blockingExecutor
) throws Exception {
if (config.refreshMode == RefreshMode.NONE) {
flux = Flux.just(load(config));
watchService = null;
} else {
Sinks.Many<KeyStore> sink = Sinks.many().replay().latest();
flux = sink.asFlux();
WatchService ws = null;
if (config.refreshMode == RefreshMode.FILE_WATCHER || config.refreshMode == RefreshMode.FILE_WATCHER_OR_SCHEDULER) {
Path directory = config.path.getParent();
try {
ws = directory.getFileSystem().newWatchService();
directory.register(ws, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY);
} catch (UnsupportedOperationException uoe) {
if (ws != null) {
try {
ws.close();
} catch (IOException ioe) {
uoe.addSuppressed(ioe);
}
ws = null;
}
if (config.refreshMode == RefreshMode.FILE_WATCHER) {
throw uoe;
} else {
LOG.debug("Failed to create watch service, falling back on scheduled refresh", uoe);
}
}
}
this.watchService = ws;
sink.tryEmitNext(load(config)).orThrow();
if (ws != null) {
WatchService finalWs = ws;
blockingExecutor.execute(() -> {
while (true) {
try {
WatchKey key = finalWs.take();
boolean changed = false;
for (WatchEvent<?> event : key.pollEvents()) {
if (event.context() instanceof Path ctx && (ctx.getFileName().equals(config.path.getFileName()) || (config.certificatePath != null && ctx.getFileName().equals(config.certificatePath.getFileName())))) {
changed = true;
break;
}
}
key.reset();
if (changed) {
loadSafe(sink, config);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
} catch (ClosedWatchServiceException e) {
break;
}
}
});
} else {
((ScheduledExecutorService) scheduler).scheduleWithFixedDelay(
() -> loadSafe(sink, config),
config.refreshInterval.toNanos(),
config.refreshInterval.toNanos(),
TimeUnit.NANOSECONDS);
}
}
name = config.name;
}
/**
* Stop watching files and release resources.
*
* @throws IOException if closing the watch service fails
*/
@PreDestroy
void close() throws IOException {
watchService.close();
}
private static void loadSafe(Sinks.Many<KeyStore> sink, Config config) {
try {
sink.tryEmitNext(load(config)).orThrow();
} catch (Exception e) {
LOG.error("Failed to load certificate file", e);
}
}
private static @NonNull KeyStore load(Config config) throws GeneralSecurityException, PemParser.NotPemException, IOException {
byte[] mainBytes = Files.readAllBytes(config.path);
byte[] certBytes;
if (config.certificatePath != null) {
if (config.format != Format.PEM) {
throw new ConfigurationException("A separate certificate-path is only permitted for PEM format. Please mark this certificate as PEM format explicitly.");
}
certBytes = Files.readAllBytes(config.certificatePath);
} else {
certBytes = null;
}
return load(config, mainBytes, certBytes);
}
static @NonNull KeyStore load(AbstractCertificateFileConfig config, byte[] mainBytes, byte[] certBytes) throws GeneralSecurityException, PemParser.NotPemException, IOException {
KeyStore ks;
if (config.format == null) {
try {
ks = load(config, mainBytes, certBytes, Format.JKS);
} catch (IOException e) {
if (e.getCause() instanceof UnrecoverableKeyException) {
throw e;
}
try {
ks = load(config, mainBytes, certBytes, Format.PEM);
} catch (PemParser.NotPemException f) {
// probably should have been loaded as KS
e.addSuppressed(new Exception("Also tried and failed to load the input as PEM", f));
throw e;
} catch (Exception f) {
// probably should have been loaded as PEM
f.addSuppressed(new Exception("Also tried and failed to load the input as a key store", e));
throw f;
}
}
} else {
ks = load(config, mainBytes, certBytes, config.format);
}
return ks;
}
private static KeyStore load(AbstractCertificateFileConfig config, byte @NonNull [] mainBytes, byte @Nullable [] certBytes, @NonNull Format format) throws GeneralSecurityException, IOException, PemParser.NotPemException {
KeyStore ks = KeyStore.getInstance(format == Format.JKS ? "JKS" : "PKCS12");
if (format == Format.PEM) {
ks.load(null, null);
PemParser pemParser = new PemParser(null, config.password);
List<Object> mainObjects = pemParser.loadPem(mainBytes);
if (mainObjects.get(0) instanceof PrivateKey pk) {
List<Object> certObjects;
if (mainObjects.size() > 1) {
certObjects = mainObjects.subList(1, mainObjects.size());
if (certBytes != null) {
throw new ConfigurationException("Separate cert-path given but main file also contained certificates");
}
} else if (certBytes != null) {
certObjects = pemParser.loadPem(certBytes);
} else {
certObjects = List.of();
}
ks.setKeyEntry("key", pk, null, SslBuilder.certificates(certObjects).toArray(new X509Certificate[0]));
} else {
if (certBytes != null) {
throw new ConfigurationException("Separate cert-path given but main file only contained certificates");
}
List<X509Certificate> certificates = SslBuilder.certificates(mainObjects);
for (int i = 0; i < certificates.size(); i++) {
ks.setCertificateEntry("cert" + i, certificates.get(i));
}
}
} else {
ks.load(new ByteArrayInputStream(mainBytes), config.password == null ? null : config.password.toCharArray());
}
return ks;
}
@NonNull
@Override
public Publisher<@NonNull KeyStore> getKeyStore() {
return flux;
}
@Override
public @NonNull String getName() {
return name;
}
/**
* Configuration for file-based certificate material. Supports JKS/PKCS12 and PEM,
* with optional automatic reloading.
*/
@EachProperty(CONFIG_PREFIX + ".file")
@BootstrapContextCompatible
public static final | FileCertificateProvider |
java | quarkusio__quarkus | integration-tests/narayana-lra/src/test/java/io/quarkus/it/lra/LRADevServicesStartTest.java | {
"start": 275,
"end": 661
} | class ____ {
DevServicesContext context;
@Test
public void testDevServicesStarted() {
assertThat(context.devServicesProperties(), hasKey("quarkus.lra.coordinator-url"));
assertThat(context.devServicesProperties(), hasKey("quarkus.lra.base-uri"));
assertThat(context.devServicesProperties(), hasKey("quarkus.http.host"));
}
}
| LRADevServicesStartTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/javadoc/InvalidInlineTagTest.java | {
"start": 1078,
"end": 1506
} | class ____ {
private final BugCheckerRefactoringTestHelper refactoring =
BugCheckerRefactoringTestHelper.newInstance(InvalidInlineTag.class, getClass());
private final CompilationTestHelper helper =
CompilationTestHelper.newInstance(InvalidInlineTag.class, getClass());
@Test
public void typo() {
refactoring
.addInputLines(
"Test.java",
"""
| InvalidInlineTagTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/pc/Phone.java | {
"start": 397,
"end": 1041
} | class ____ {
@Id
private Long id;
@Column(name = "`number`")
private String number;
@ManyToOne(fetch = FetchType.LAZY)
private Person owner;
//Getters and setters are omitted for brevity
//end::pc-cascade-domain-model-example[]
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getNumber() {
return number;
}
public void setNumber(String number) {
this.number = number;
}
public Person getOwner() {
return owner;
}
public void setOwner(Person owner) {
this.owner = owner;
}
//tag::pc-cascade-domain-model-example[]
}
//end::pc-cascade-domain-model-example[]
| Phone |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/AnySetterTest.java | {
"start": 3134,
"end": 3400
} | class ____ extends Bean797Base
{
@Override
public Map<String, JsonNode> getUndefinedProperties() {
return new HashMap<String, JsonNode>();
}
}
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS)
static abstract | Bean797BaseImpl |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/FluxSwitchOnNextTest.java | {
"start": 754,
"end": 1106
} | class ____ {
@Test
public void switchOnNext() {
StepVerifier.create(Flux.switchOnNext(Flux.just(Flux.just("Three", "Two", "One"),
Flux.just("Zero"))))
.expectNext("Three")
.expectNext("Two")
.expectNext("One")
.expectNext("Zero")
.verifyComplete();
}
}
| FluxSwitchOnNextTest |
java | quarkusio__quarkus | integration-tests/hibernate-orm-tenancy/connection-resolver-legacy-qualifiers/src/main/java/io/quarkus/it/hibernate/multitenancy/fruit/FruitResource.java | {
"start": 904,
"end": 5247
} | class ____ {
private static final Logger LOG = Logger.getLogger(FruitResource.class.getName());
@Inject
EntityManager entityManager;
@GET
@Path("fruits")
public Fruit[] getDefault() {
return get();
}
@GET
@Path("{tenant}/fruits")
public Fruit[] getTenant() {
return get();
}
private Fruit[] get() {
return entityManager.createNamedQuery("Fruits.findAll", Fruit.class)
.getResultList().toArray(new Fruit[0]);
}
@GET
@Path("fruits/{id}")
public Fruit getSingleDefault(@PathParam("id") int id) {
return findById(id);
}
@GET
@Path("{tenant}/fruits/{id}")
public Fruit getSingleTenant(@PathParam("id") int id) {
return findById(id);
}
private Fruit findById(int id) {
Fruit entity = entityManager.find(Fruit.class, id);
if (entity == null) {
throw new WebApplicationException("Fruit with id of " + id + " does not exist.", 404);
}
return entity;
}
@POST
@Transactional
@Path("fruits")
public Response createDefault(@NotNull Fruit fruit) {
return create(fruit);
}
@POST
@Transactional
@Path("{tenant}/fruits")
public Response createTenant(@NotNull Fruit fruit) {
return create(fruit);
}
private Response create(@NotNull Fruit fruit) {
if (fruit.getId() != null) {
throw new WebApplicationException("Id was invalidly set on request.", 422);
}
LOG.debugv("Create {0}", fruit.getName());
entityManager.persist(fruit);
return Response.ok(fruit).status(201).build();
}
@PUT
@Path("fruits/{id}")
@Transactional
public Fruit updateDefault(@PathParam("id") int id, @NotNull Fruit fruit) {
return update(id, fruit);
}
@PUT
@Path("{tenant}/fruits/{id}")
@Transactional
public Fruit updateTenant(@PathParam("id") int id, @NotNull Fruit fruit) {
return update(id, fruit);
}
private Fruit update(@NotNull @PathParam("id") int id, @NotNull Fruit fruit) {
if (fruit.getName() == null) {
throw new WebApplicationException("Fruit Name was not set on request.", 422);
}
Fruit entity = entityManager.find(Fruit.class, id);
if (entity == null) {
throw new WebApplicationException("Fruit with id of " + id + " does not exist.", 404);
}
entity.setName(fruit.getName());
LOG.debugv("Update #{0} {1}", fruit.getId(), fruit.getName());
return entity;
}
@DELETE
@Path("fruits/{id}")
@Transactional
public Response deleteDefault(@PathParam("id") int id) {
return delete(id);
}
@DELETE
@Path("{tenant}/fruits/{id}")
@Transactional
public Response deleteTenant(@PathParam("id") int id) {
return delete(id);
}
private Response delete(int id) {
Fruit fruit = entityManager.getReference(Fruit.class, id);
if (fruit == null) {
throw new WebApplicationException("Fruit with id of " + id + " does not exist.", 404);
}
LOG.debugv("Delete #{0} {1}", fruit.getId(), fruit.getName());
entityManager.remove(fruit);
return Response.status(204).build();
}
@GET
@Path("fruitsFindBy")
public Response findByDefault(@NotNull @QueryParam("type") String type, @NotNull @QueryParam("value") String value) {
return findBy(type, value);
}
@GET
@Path("{tenant}/fruitsFindBy")
public Response findByTenant(@NotNull @QueryParam("type") String type, @NotNull @QueryParam("value") String value) {
return findBy(type, value);
}
private Response findBy(@NotNull String type, @NotNull String value) {
if (!"name".equalsIgnoreCase(type)) {
throw new IllegalArgumentException("Currently only 'fruitsFindBy?type=name' is supported");
}
List<Fruit> list = entityManager.createNamedQuery("Fruits.findByName", Fruit.class).setParameter("name", value)
.getResultList();
if (list.size() == 0) {
return Response.status(404).build();
}
Fruit fruit = list.get(0);
return Response.status(200).entity(fruit).build();
}
@Provider
public static | FruitResource |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/state/internals/MergedSortedCacheWindowStoreKeyValueIterator.java | {
"start": 4072,
"end": 4191
} | interface ____ {
Bytes toBytes(final Bytes key, final long windowStart, final int seqNum);
}
}
| WindowKeyToBytes |
java | grpc__grpc-java | stub/src/test/java/io/grpc/stub/AbstractStubTest.java | {
"start": 1284,
"end": 2467
} | class ____ extends BaseAbstractStubTest<NoopStub> {
@Override
NoopStub create(Channel channel, CallOptions callOptions) {
return new NoopStub(channel, callOptions);
}
@Test
public void defaultCallOptions() {
NoopStub stub = NoopStub.newStub(new StubFactory<NoopStub>() {
@Override
public NoopStub newStub(Channel channel, CallOptions callOptions) {
return create(channel, callOptions);
}
}, channel, CallOptions.DEFAULT);
assertThat(stub.getCallOptions().getOption(ClientCalls.STUB_TYPE_OPTION))
.isNull();
}
@Test
@IgnoreJRERequirement
public void testDuration() {
NoopStub stub = NoopStub.newStub(new StubFactory<NoopStub>() {
@Override
public NoopStub newStub(Channel channel, CallOptions callOptions) {
return create(channel, callOptions);
}
}, channel, CallOptions.DEFAULT);
NoopStub stubInstance = stub.withDeadlineAfter(Duration.ofMinutes(1L));
Deadline actual = stubInstance.getCallOptions().getDeadline();
Deadline expected = Deadline.after(1, MINUTES);
assertAbout(deadline()).that(actual).isWithin(10, MILLISECONDS).of(expected);
}
| AbstractStubTest |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/authentication/configuration/AuthenticationConfigurationTests.java | {
"start": 17219,
"end": 17344
} | class ____ extends GlobalAuthenticationConfigurerAdapter {
}
@Configuration
static | NoOpGlobalAuthenticationConfigurerAdapter |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/authentication/configuration/AuthenticationConfigurationTests.java | {
"start": 23272,
"end": 23715
} | class ____ {
AuthenticationProvider provider = mock(AuthenticationProvider.class);
UserDetailsService uds = mock(UserDetailsService.class);
@Bean
UserDetailsService userDetailsService() {
return this.uds;
}
@Bean
AuthenticationProvider authenticationProvider() {
return this.provider;
}
}
@Configuration
@EnableGlobalMethodSecurity(prePostEnabled = true)
static | AuthenticationProviderBeanAndUserDetailsServiceConfig |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java | {
"start": 5042,
"end": 25247
} | class ____ {
private SelfRenewingLease folderLease;
private String srcKey;
private String dstKey;
private FileMetadata[] fileMetadata = null; // descriptions of source files
private ArrayList<String> fileStrings = null;
private NativeAzureFileSystem fs;
private static final int MAX_RENAME_PENDING_FILE_SIZE = 10000000;
private static final int FORMATTING_BUFFER = 10000;
private boolean committed;
public static final String SUFFIX = "-RenamePending.json";
private static final ObjectReader READER = new ObjectMapper()
.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
.readerFor(JsonNode.class);
// Prepare in-memory information needed to do or redo a folder rename.
public FolderRenamePending(String srcKey, String dstKey, SelfRenewingLease lease,
NativeAzureFileSystem fs) throws IOException {
this.srcKey = srcKey;
this.dstKey = dstKey;
this.folderLease = lease;
this.fs = fs;
// List all the files in the folder.
long start = Time.monotonicNow();
fileMetadata = fs.getStoreInterface().list(srcKey, AZURE_LIST_ALL,
AZURE_UNBOUNDED_DEPTH);
long end = Time.monotonicNow();
LOG.debug("Time taken to list {} blobs for rename operation is: {} ms", fileMetadata.length, (end - start));
this.committed = true;
}
// Prepare in-memory information needed to do or redo folder rename from
// a -RenamePending.json file read from storage. This constructor is to use during
// redo processing.
public FolderRenamePending(Path redoFile, NativeAzureFileSystem fs)
throws IllegalArgumentException, IOException {
this.fs = fs;
// open redo file
Path f = redoFile;
int l;
byte[] bytes;
try (FSDataInputStream input = fs.open(f)) {
bytes = new byte[MAX_RENAME_PENDING_FILE_SIZE];
l = input.read(bytes);
}
if (l <= 0) {
// Jira HADOOP-12678 -Handle empty rename pending metadata file during
// atomic rename in redo path. If during renamepending file is created
// but not written yet, then this means that rename operation
// has not started yet. So we should delete rename pending metadata file.
LOG.error("Deleting empty rename pending file "
+ redoFile + " -- no data available");
deleteRenamePendingFile(fs, redoFile);
return;
}
if (l == MAX_RENAME_PENDING_FILE_SIZE) {
throw new IOException(
"Error reading pending rename file contents -- "
+ "maximum file size exceeded");
}
String contents = new String(bytes, 0, l, StandardCharsets.UTF_8);
// parse the JSON
JsonNode json = null;
try {
json = READER.readValue(contents);
this.committed = true;
} catch (JsonMappingException e) {
// The -RedoPending.json file is corrupted, so we assume it was
// not completely written
// and the redo operation did not commit.
this.committed = false;
} catch (JsonParseException e) {
this.committed = false;
} catch (IOException e) {
this.committed = false;
}
if (!this.committed) {
LOG.error("Deleting corruped rename pending file {} \n {}",
redoFile, contents);
// delete the -RenamePending.json file
deleteRenamePendingFile(fs, redoFile);
return;
}
// initialize this object's fields
ArrayList<String> fileStrList = new ArrayList<String>();
JsonNode oldFolderName = json.get("OldFolderName");
JsonNode newFolderName = json.get("NewFolderName");
if (oldFolderName == null || newFolderName == null) {
this.committed = false;
} else {
this.srcKey = oldFolderName.textValue();
this.dstKey = newFolderName.textValue();
if (this.srcKey == null || this.dstKey == null) {
this.committed = false;
} else {
JsonNode fileList = json.get("FileList");
if (fileList == null) {
this.committed = false;
} else {
for (int i = 0; i < fileList.size(); i++) {
fileStrList.add(fileList.get(i).textValue());
}
}
}
}
this.fileStrings = fileStrList;
}
public FileMetadata[] getFiles() {
return fileMetadata;
}
public SelfRenewingLease getFolderLease() {
return folderLease;
}
/**
* Deletes rename pending metadata file
* @param fs -- the file system
* @param redoFile - rename pending metadata file path
* @throws IOException - If deletion fails
*/
@VisibleForTesting
void deleteRenamePendingFile(FileSystem fs, Path redoFile)
throws IOException {
try {
fs.delete(redoFile, false);
} catch (IOException e) {
// If the rename metadata was not found then somebody probably
// raced with us and finished the delete first
Throwable t = e.getCause();
if (t != null && t instanceof StorageException
&& "BlobNotFound".equals(((StorageException) t).getErrorCode())) {
LOG.warn("rename pending file " + redoFile + " is already deleted");
} else {
throw e;
}
}
}
/**
* Write to disk the information needed to redo folder rename,
* in JSON format. The file name will be
* {@code wasb://<sourceFolderPrefix>/folderName-RenamePending.json}
* The file format will be:
* <pre>{@code
* {
* FormatVersion: "1.0",
* OperationTime: "<YYYY-MM-DD HH:MM:SS.MMM>",
* OldFolderName: "<key>",
* NewFolderName: "<key>",
* FileList: [ <string> , <string> , ... ]
* }
*
* Here's a sample:
* {
* FormatVersion: "1.0",
* OperationUTCTime: "2014-07-01 23:50:35.572",
* OldFolderName: "user/ehans/folderToRename",
* NewFolderName: "user/ehans/renamedFolder",
* FileList: [
* "innerFile",
* "innerFile2"
* ]
* } }</pre>
* @param fs file system on which a file is written.
* @throws IOException Thrown when fail to write file.
*/
public void writeFile(NativeAzureFileSystem fs) throws IOException {
Path path = getRenamePendingFilePath();
LOG.debug("Preparing to write atomic rename state to {}", path.toString());
OutputStream output = null;
String contents = makeRenamePendingFileContents();
// Write file.
try {
output = fs.createInternal(path, FsPermission.getFileDefault(), false, null);
output.write(contents.getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
throw new IOException("Unable to write RenamePending file for folder rename from "
+ srcKey + " to " + dstKey, e);
} finally {
NativeAzureFileSystemHelper.cleanup(LOG, output);
}
}
/**
* Return the contents of the JSON file to represent the operations
* to be performed for a folder rename.
*
* @return JSON string which represents the operation.
*/
public String makeRenamePendingFileContents() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
String time = sdf.format(new Date());
// Make file list string
StringBuilder builder = new StringBuilder();
builder.append("[\n");
for (int i = 0; i != fileMetadata.length; i++) {
if (i > 0) {
builder.append(",\n");
}
builder.append(" ");
String noPrefix = StringUtils.removeStart(fileMetadata[i].getKey(), srcKey + "/");
// Quote string file names, escaping any possible " characters or other
// necessary characters in the name.
builder.append(quote(noPrefix));
if (builder.length() >=
MAX_RENAME_PENDING_FILE_SIZE - FORMATTING_BUFFER) {
// Give up now to avoid using too much memory.
LOG.error("Internal error: Exceeded maximum rename pending file size of {} bytes.",
MAX_RENAME_PENDING_FILE_SIZE);
// return some bad JSON with an error message to make it human readable
return "exceeded maximum rename pending file size";
}
}
builder.append("\n ]");
String fileList = builder.toString();
// Make file contents as a string. Again, quote file names, escaping
// characters as appropriate.
String contents = "{\n"
+ " FormatVersion: \"1.0\",\n"
+ " OperationUTCTime: \"" + time + "\",\n"
+ " OldFolderName: " + quote(srcKey) + ",\n"
+ " NewFolderName: " + quote(dstKey) + ",\n"
+ " FileList: " + fileList + "\n"
+ "}\n";
return contents;
}
/**
* This is an exact copy of org.codehaus.jettison.json.JSONObject.quote
* method.
*
* Produce a string in double quotes with backslash sequences in all the
* right places. A backslash will be inserted within </, allowing JSON
* text to be delivered in HTML. In JSON text, a string cannot contain a
* control character or an unescaped quote or backslash.
* @param string A String
* @return A String correctly formatted for insertion in a JSON text.
*/
private String quote(String string) {
if (string == null || string.length() == 0) {
return "\"\"";
}
char c = 0;
int i;
int len = string.length();
StringBuilder sb = new StringBuilder(len + 4);
String t;
sb.append('"');
for (i = 0; i < len; i += 1) {
c = string.charAt(i);
switch (c) {
case '\\':
case '"':
sb.append('\\');
sb.append(c);
break;
case '/':
sb.append('\\');
sb.append(c);
break;
case '\b':
sb.append("\\b");
break;
case '\t':
sb.append("\\t");
break;
case '\n':
sb.append("\\n");
break;
case '\f':
sb.append("\\f");
break;
case '\r':
sb.append("\\r");
break;
default:
if (c < ' ') {
t = "000" + Integer.toHexString(c);
sb.append("\\u" + t.substring(t.length() - 4));
} else {
sb.append(c);
}
}
}
sb.append('"');
return sb.toString();
}
public String getSrcKey() {
return srcKey;
}
public String getDstKey() {
return dstKey;
}
public FileMetadata getSourceMetadata() throws IOException {
return fs.getStoreInterface().retrieveMetadata(srcKey);
}
/**
* Execute a folder rename. This is the execution path followed
* when everything is working normally. See redo() for the alternate
* execution path for the case where we're recovering from a folder rename
* failure.
* @throws IOException Thrown when fail to renaming.
*/
public void execute() throws IOException {
AzureFileSystemThreadTask task = new AzureFileSystemThreadTask() {
@Override
public boolean execute(FileMetadata file) throws IOException{
renameFile(file);
return true;
}
};
AzureFileSystemThreadPoolExecutor executor = this.fs.getThreadPoolExecutor(this.fs.renameThreadCount,
"AzureBlobRenameThread", "Rename", getSrcKey(), AZURE_RENAME_THREADS);
executor.executeParallel(this.getFiles(), task);
// Rename the source folder 0-byte root file itself.
FileMetadata srcMetadata2 = this.getSourceMetadata();
if (srcMetadata2.getBlobMaterialization() ==
BlobMaterialization.Explicit) {
// It already has a lease on it from the "prepare" phase so there's no
// need to get one now. Pass in existing lease to allow file delete.
fs.getStoreInterface().rename(this.getSrcKey(), this.getDstKey(),
false, folderLease);
}
// Update the last-modified time of the parent folders of both source and
// destination.
fs.updateParentFolderLastModifiedTime(srcKey);
fs.updateParentFolderLastModifiedTime(dstKey);
}
// Rename a single file
@VisibleForTesting
void renameFile(FileMetadata file) throws IOException{
// Rename all materialized entries under the folder to point to the
// final destination.
if (file.getBlobMaterialization() == BlobMaterialization.Explicit) {
String srcName = file.getKey();
String suffix = srcName.substring((this.getSrcKey()).length());
String dstName = this.getDstKey() + suffix;
// Rename gets exclusive access (via a lease) for files
// designated for atomic rename.
// The main use case is for HBase write-ahead log (WAL) and data
// folder processing correctness. See the rename code for details.
boolean acquireLease = this.fs.getStoreInterface().isAtomicRenameKey(srcName);
this.fs.getStoreInterface().rename(srcName, dstName, acquireLease, null);
}
}
/** Clean up after execution of rename.
* @throws IOException Thrown when fail to clean up.
* */
public void cleanup() throws IOException {
if (fs.getStoreInterface().isAtomicRenameKey(srcKey)) {
// Remove RenamePending file
fs.delete(getRenamePendingFilePath(), false);
// Freeing source folder lease is not necessary since the source
// folder file was deleted.
}
}
private Path getRenamePendingFilePath() {
String fileName = srcKey + SUFFIX;
Path fileNamePath = keyToPath(fileName);
Path path = fs.makeAbsolute(fileNamePath);
return path;
}
/**
* Recover from a folder rename failure by redoing the intended work,
* as recorded in the -RenamePending.json file.
*
* @throws IOException Thrown when fail to redo.
*/
public void redo() throws IOException {
if (!committed) {
// Nothing to do. The -RedoPending.json file should have already been
// deleted.
return;
}
// Try to get a lease on source folder to block concurrent access to it.
// It may fail if the folder is already gone. We don't check if the
// source exists explicitly because that could recursively trigger redo
// and give an infinite recursion.
SelfRenewingLease lease = null;
boolean sourceFolderGone = false;
try {
lease = fs.leaseSourceFolder(srcKey);
} catch (AzureException e) {
// If the source folder was not found then somebody probably
// raced with us and finished the rename first, or the
// first rename failed right before deleting the rename pending
// file.
String errorCode = "";
try {
StorageException se = (StorageException) e.getCause();
errorCode = se.getErrorCode();
} catch (Exception e2) {
; // do nothing -- could not get errorCode
}
if (errorCode.equals("BlobNotFound")) {
sourceFolderGone = true;
} else {
throw new IOException(
"Unexpected error when trying to lease source folder name during "
+ "folder rename redo",
e);
}
}
if (!sourceFolderGone) {
// Make sure the target folder exists.
Path dst = fullPath(dstKey);
if (!fs.existsInternal(dst)) {
fs.mkdirs(dst);
}
// For each file inside the folder to be renamed,
// make sure it has been renamed.
for(String fileName : fileStrings) {
finishSingleFileRename(fileName);
}
// Remove the source folder. Don't check explicitly if it exists,
// to avoid triggering redo recursively.
try {
// Rename the source folder 0-byte root file
// as destination folder 0-byte root file.
FileMetadata srcMetaData = this.getSourceMetadata();
if (srcMetaData.getBlobMaterialization() == BlobMaterialization.Explicit) {
// We already have a lease. So let's just rename the source blob
// as destination blob under same lease.
fs.getStoreInterface().rename(this.getSrcKey(), this.getDstKey(), false, lease);
}
// Now we can safely delete the source folder.
fs.getStoreInterface().delete(srcKey, lease);
} catch (Exception e) {
LOG.info("Unable to delete source folder during folder rename redo. "
+ "If the source folder is already gone, this is not an error "
+ "condition. Continuing with redo.", e);
}
// Update the last-modified time of the parent folders of both source
// and destination.
fs.updateParentFolderLastModifiedTime(srcKey);
fs.updateParentFolderLastModifiedTime(dstKey);
}
// Remove the -RenamePending.json file.
fs.delete(getRenamePendingFilePath(), false);
}
// See if the source file is still there, and if it is, rename it.
private void finishSingleFileRename(String fileName)
throws IOException {
Path srcFile = fullPath(srcKey, fileName);
Path dstFile = fullPath(dstKey, fileName);
String srcName = fs.pathToKey(srcFile);
String dstName = fs.pathToKey(dstFile);
boolean srcExists = fs.getStoreInterface().explicitFileExists(srcName);
boolean dstExists = fs.getStoreInterface().explicitFileExists(dstName);
if(srcExists) {
// Rename gets exclusive access (via a lease) for HBase write-ahead log
// (WAL) file processing correctness. See the rename code for details.
fs.getStoreInterface().rename(srcName, dstName, true, null);
} else if (!srcExists && dstExists) {
// The rename already finished, so do nothing.
;
} else {
// HADOOP-14512
LOG.warn(
"Attempting to complete rename of file " + srcKey + "/" + fileName
+ " during folder rename redo, and file was not found in source "
+ "or destination " + dstKey + "/" + fileName + ". "
+ "This must mean the rename of this file has already completed");
}
}
// Return an absolute path for the specific fileName within the folder
// specified by folderKey.
private Path fullPath(String folderKey, String fileName) {
return new Path(new Path(fs.getUri()), "/" + folderKey + "/" + fileName);
}
private Path fullPath(String fileKey) {
return new Path(new Path(fs.getUri()), "/" + fileKey);
}
}
private static final String TRAILING_PERIOD_PLACEHOLDER = "[[.]]";
private static final Pattern TRAILING_PERIOD_PLACEHOLDER_PATTERN =
Pattern.compile("\\[\\[\\.\\]\\](?=$|/)");
private static final Pattern TRAILING_PERIOD_PATTERN = Pattern.compile("\\.(?=$|/)");
@Override
public String getScheme() {
return "wasb";
}
/**
* If fs.azure.override.canonical.service.name is set as true, return URI of
* the WASB filesystem, otherwise use the default implementation.
*
* @return a service string that uniquely identifies this file system
*/
@Override
public String getCanonicalServiceName() {
if (returnUriAsCanonicalServiceName) {
return getUri().toString();
}
return super.getCanonicalServiceName();
}
/**
* <p>
* A {@link FileSystem} for reading and writing files stored on <a
* href="http://store.azure.com/">Windows Azure</a>. This implementation is
* blob-based and stores files on Azure in their native form so they can be read
* by other Azure tools. This implementation uses HTTPS for secure network communication.
* </p>
*/
public static | FolderRenamePending |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/tools/picocli/CommandLine.java | {
"start": 298135,
"end": 298955
} | class ____ extends ParameterException {
private static final long serialVersionUID = -8700426380701452440L;
public UnmatchedArgumentException(final CommandLine commandLine, final String msg) {
super(commandLine, msg);
}
public UnmatchedArgumentException(final CommandLine commandLine, final Stack<String> args) {
this(commandLine, new ArrayList<>(reverse(args)));
}
public UnmatchedArgumentException(final CommandLine commandLine, final List<String> args) {
this(commandLine, "Unmatched argument" + (args.size() == 1 ? " " : "s ") + args);
}
}
/** Exception indicating that more values were specified for an option or parameter than its {@link Option#arity() arity} allows. */
public static | UnmatchedArgumentException |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelinePublisher.java | {
"start": 2249,
"end": 15218
} | class ____ extends CompositeService {
// Number of bytes of config which can be published in one shot to ATSv2.
public static final int ATS_CONFIG_PUBLISH_SIZE_BYTES = 10 * 1024;
private TimelineV2Client timelineClient;
private volatile boolean stopped = false;
private static final Logger log =
LoggerFactory.getLogger(ServiceTimelinePublisher.class);
@Override
protected void serviceInit(org.apache.hadoop.conf.Configuration configuration)
throws Exception {
addService(timelineClient);
super.serviceInit(configuration);
}
@Override
protected void serviceStop() throws Exception {
stopped = true;
super.serviceStop();
}
public boolean isStopped() {
return stopped;
}
public ServiceTimelinePublisher(TimelineV2Client client) {
super(ServiceTimelinePublisher.class.getName());
timelineClient = client;
}
public void serviceAttemptRegistered(Service service,
org.apache.hadoop.conf.Configuration systemConf) {
long currentTimeMillis = service.getLaunchTime() == null
? System.currentTimeMillis() : service.getLaunchTime().getTime();
TimelineEntity entity = createServiceAttemptEntity(service.getId());
entity.setCreatedTime(currentTimeMillis);
// create info keys
Map<String, Object> entityInfos = new HashMap<String, Object>();
entityInfos.put(ServiceTimelineMetricsConstants.NAME, service.getName());
entityInfos.put(ServiceTimelineMetricsConstants.STATE,
ServiceState.STARTED.toString());
entityInfos.put(ServiceTimelineMetricsConstants.LAUNCH_TIME,
currentTimeMillis);
entity.addInfo(ServiceTimelineMetricsConstants.QUICK_LINKS,
service.getQuicklinks());
entity.addInfo(entityInfos);
// add an event
TimelineEvent startEvent = new TimelineEvent();
startEvent.setId(ServiceTimelineEvent.SERVICE_ATTEMPT_REGISTERED.toString());
startEvent.setTimestamp(currentTimeMillis);
entity.addEvent(startEvent);
// publish before configurations published
putEntity(entity);
// publish system config - YarnConfiguration
populateTimelineEntity(systemConf.iterator(), service.getId(),
ServiceTimelineEntityType.SERVICE_ATTEMPT.toString());
// publish container conf
publishContainerConf(service.getConfiguration(), service.getId(),
ServiceTimelineEntityType.SERVICE_ATTEMPT.toString());
// publish component as separate entity.
publishComponents(service.getComponents());
}
public void serviceAttemptUpdated(Service service) {
TimelineEntity entity = createServiceAttemptEntity(service.getId());
entity.addInfo(ServiceTimelineMetricsConstants.QUICK_LINKS,
service.getQuicklinks());
putEntity(entity);
}
public void serviceAttemptUnregistered(ServiceContext context,
FinalApplicationStatus status, String diagnostics) {
TimelineEntity entity = createServiceAttemptEntity(
context.attemptId.getApplicationId().toString());
Map<String, Object> entityInfos = new HashMap<String, Object>();
entityInfos.put(ServiceTimelineMetricsConstants.STATE, status);
entityInfos.put(DIAGNOSTICS_INFO, diagnostics);
entity.addInfo(entityInfos);
// add an event
TimelineEvent finishEvent = new TimelineEvent();
finishEvent
.setId(ServiceTimelineEvent.SERVICE_ATTEMPT_UNREGISTERED.toString());
finishEvent.setTimestamp(System.currentTimeMillis());
entity.addEvent(finishEvent);
putEntity(entity);
}
public void componentInstanceStarted(Container container,
ComponentInstance instance) {
TimelineEntity entity = createComponentInstanceEntity(container.getId());
entity.setCreatedTime(container.getLaunchTime().getTime());
// create info keys
Map<String, Object> entityInfos = new HashMap<String, Object>();
entityInfos.put(ServiceTimelineMetricsConstants.BARE_HOST,
container.getBareHost());
entityInfos.put(ServiceTimelineMetricsConstants.STATE,
container.getState().toString());
entityInfos.put(ServiceTimelineMetricsConstants.LAUNCH_TIME,
container.getLaunchTime().getTime());
entityInfos.put(ServiceTimelineMetricsConstants.COMPONENT_NAME,
instance.getCompName());
entityInfos.put(ServiceTimelineMetricsConstants.COMPONENT_INSTANCE_NAME,
instance.getCompInstanceName());
entity.addInfo(entityInfos);
// add an event
TimelineEvent startEvent = new TimelineEvent();
startEvent
.setId(ServiceTimelineEvent.COMPONENT_INSTANCE_REGISTERED.toString());
startEvent.setTimestamp(container.getLaunchTime().getTime());
entity.addEvent(startEvent);
putEntity(entity);
}
public void componentInstanceFinished(ContainerId containerId,
int exitCode, ContainerState state, String diagnostics) {
TimelineEntity entity = createComponentInstanceEntity(
containerId.toString());
// create info keys
Map<String, Object> entityInfos = new HashMap<String, Object>();
entityInfos.put(ServiceTimelineMetricsConstants.EXIT_STATUS_CODE,
exitCode);
entityInfos.put(DIAGNOSTICS_INFO, diagnostics);
entityInfos.put(ServiceTimelineMetricsConstants.STATE, state);
entity.addInfo(entityInfos);
// add an event
TimelineEvent startEvent = new TimelineEvent();
startEvent
.setId(ServiceTimelineEvent.COMPONENT_INSTANCE_UNREGISTERED.toString());
startEvent.setTimestamp(System.currentTimeMillis());
entity.addEvent(startEvent);
putEntity(entity);
}
public void componentInstanceIPHostUpdated(Container container) {
TimelineEntity entity = createComponentInstanceEntity(container.getId());
// create info keys
Map<String, Object> entityInfos = new HashMap<String, Object>();
entityInfos.put(ServiceTimelineMetricsConstants.IP, container.getIp());
entityInfos.put(ServiceTimelineMetricsConstants.EXPOSED_PORTS,
container.getExposedPorts());
entityInfos.put(ServiceTimelineMetricsConstants.HOSTNAME,
container.getHostname());
entityInfos.put(ServiceTimelineMetricsConstants.STATE,
container.getState().toString());
entity.addInfo(entityInfos);
TimelineEvent updateEvent = new TimelineEvent();
updateEvent.setId(ServiceTimelineEvent.COMPONENT_INSTANCE_IP_HOST_UPDATE
.toString());
updateEvent.setTimestamp(System.currentTimeMillis());
entity.addEvent(updateEvent);
putEntity(entity);
}
public void componentInstanceBecomeReady(Container container) {
TimelineEntity entity = createComponentInstanceEntity(container.getId());
Map<String, Object> entityInfo = new HashMap<>();
entityInfo.put(ServiceTimelineMetricsConstants.STATE, READY);
entity.addInfo(entityInfo);
TimelineEvent updateEvent = new TimelineEvent();
updateEvent.setId(ServiceTimelineEvent.COMPONENT_INSTANCE_BECOME_READY
.toString());
updateEvent.setTimestamp(System.currentTimeMillis());
entity.addEvent(updateEvent);
putEntity(entity);
}
private void publishComponents(List<Component> components) {
long currentTimeMillis = System.currentTimeMillis();
for (Component component : components) {
TimelineEntity entity = createComponentEntity(component.getName());
entity.setCreatedTime(currentTimeMillis);
// create info keys
Map<String, Object> entityInfos = new HashMap<String, Object>();
if (component.getArtifact() != null) {
entityInfos.put(ServiceTimelineMetricsConstants.ARTIFACT_ID,
component.getArtifact().getId());
entityInfos.put(ServiceTimelineMetricsConstants.ARTIFACT_TYPE,
component.getArtifact().getType().toString());
}
if (component.getResource() != null) {
entityInfos.put(ServiceTimelineMetricsConstants.RESOURCE_CPU,
component.getResource().getCpus());
entityInfos.put(ServiceTimelineMetricsConstants.RESOURCE_MEMORY,
component.getResource().getMemory());
if (component.getResource().getProfile() != null) {
entityInfos.put(ServiceTimelineMetricsConstants.RESOURCE_PROFILE,
component.getResource().getProfile());
}
}
if (component.getLaunchCommand() != null) {
entityInfos.put(ServiceTimelineMetricsConstants.LAUNCH_COMMAND,
component.getLaunchCommand());
}
entityInfos.put(ServiceTimelineMetricsConstants.RUN_PRIVILEGED_CONTAINER,
component.getRunPrivilegedContainer().toString());
entity.addInfo(entityInfos);
putEntity(entity);
// publish container specific configurations
publishContainerConf(component.getConfiguration(), component.getName(),
ServiceTimelineEntityType.COMPONENT.toString());
}
}
private void publishContainerConf(Configuration configuration,
String entityId, String entityType) {
populateTimelineEntity(configuration.getEnv().entrySet().iterator(),
entityId, entityType);
for (ConfigFile configFile : configuration.getFiles()) {
populateTimelineEntity(configFile.getProperties().entrySet().iterator(),
entityId, entityType);
}
}
private void populateTimelineEntity(Iterator<Entry<String, String>> iterator,
String entityId, String entityType) {
int configSize = 0;
TimelineEntity entity = createTimelineEntity(entityId, entityType);
while (iterator.hasNext()) {
Entry<String, String> entry = iterator.next();
int size = entry.getKey().length() + entry.getValue().length();
configSize += size;
// Configs are split into multiple entities if they exceed 100kb in size.
if (configSize > ATS_CONFIG_PUBLISH_SIZE_BYTES) {
if (entity.getConfigs().size() > 0) {
putEntity(entity);
entity = createTimelineEntity(entityId, entityType);
}
configSize = size;
}
entity.addConfig(entry.getKey(), entry.getValue());
}
if (configSize > 0) {
putEntity(entity);
}
}
/**
* Called from ServiceMetricsSink at regular interval of time.
* @param metrics of service or components
* @param entityId Id of entity
* @param entityType Type of entity
* @param timestamp
*/
public void publishMetrics(Iterable<AbstractMetric> metrics, String entityId,
String entityType, long timestamp) {
TimelineEntity entity = createTimelineEntity(entityId, entityType);
Set<TimelineMetric> entityMetrics = new HashSet<TimelineMetric>();
for (AbstractMetric metric : metrics) {
TimelineMetric timelineMetric = new TimelineMetric();
timelineMetric.setId(metric.name());
timelineMetric.addValue(timestamp, metric.value());
entityMetrics.add(timelineMetric);
}
entity.setMetrics(entityMetrics);
putEntity(entity);
}
private TimelineEntity createServiceAttemptEntity(String serviceId) {
TimelineEntity entity = createTimelineEntity(serviceId,
ServiceTimelineEntityType.SERVICE_ATTEMPT.toString());
return entity;
}
private TimelineEntity createComponentInstanceEntity(String instanceId) {
TimelineEntity entity = createTimelineEntity(instanceId,
ServiceTimelineEntityType.COMPONENT_INSTANCE.toString());
return entity;
}
private TimelineEntity createComponentEntity(String componentId) {
TimelineEntity entity = createTimelineEntity(componentId,
ServiceTimelineEntityType.COMPONENT.toString());
return entity;
}
private TimelineEntity createTimelineEntity(String entityId,
String entityType) {
TimelineEntity entity = new TimelineEntity();
entity.setId(entityId);
entity.setType(entityType);
return entity;
}
private void putEntity(TimelineEntity entity) {
try {
if (log.isDebugEnabled()) {
log.debug("Publishing the entity " + entity + ", JSON-style content: "
+ TimelineUtils.dumpTimelineRecordtoJSON(entity));
}
if (timelineClient != null) {
timelineClient.putEntitiesAsync(entity);
} else {
log.error("Seems like client has been removed before the entity "
+ "could be published for " + entity);
}
} catch (Exception e) {
log.error("Error when publishing entity " + entity, e);
}
}
public void componentFinished(
Component comp,
ComponentState state, long finishTime) {
createComponentEntity(comp.getName());
TimelineEntity entity = createComponentEntity(comp.getName());
// create info keys
Map<String, Object> entityInfos = new HashMap<String, Object>();
entityInfos.put(ServiceTimelineMetricsConstants.STATE, state);
entity.addInfo(entityInfos);
// add an event
TimelineEvent startEvent = new TimelineEvent();
startEvent
.setId(ServiceTimelineEvent.COMPONENT_FINISHED.toString());
startEvent.setTimestamp(finishTime);
entity.addEvent(startEvent);
putEntity(entity);
}
}
| ServiceTimelinePublisher |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/ryu/RyuFloatTest.java | {
"start": 190,
"end": 3203
} | class ____ extends TestCase {
public void test_for_ryu() throws Exception {
Random random = new Random();
for (int i = 0; i < 1000 * 1000 * 1000; ++i) {
float value = random.nextFloat();
String str1 = Float.toString(value);
String str2 = RyuFloat.toString(value);
if (!str1.equals(str2)) {
boolean cmp = (Float.parseFloat(str1) == Float.parseFloat(str2));
System.out.println(str1 + " -> " + str2 + " : " + cmp);
assertTrue(cmp);
// assertTrue(Float.parseFloat(str1) == Float.parseFloat(str2));
}
}
}
public void test_0() throws Exception {
float[] values = new float[] {
Float.NaN,
Float.NEGATIVE_INFINITY,
Float.POSITIVE_INFINITY,
Float.MIN_VALUE,
Float.MAX_VALUE,
0,
0.0f,
-0.0f,
Integer.MAX_VALUE,
Integer.MIN_VALUE,
Long.MAX_VALUE,
Long.MIN_VALUE,
Float.intBitsToFloat(0x80000000),
1.0f,
-1f,
Float.intBitsToFloat(0x00800000),
1.0E7f,
9999999.0f,
0.001f,
0.0009999999f,
Float.intBitsToFloat(0x7f7fffff),
Float.intBitsToFloat(0x00000001),
3.3554448E7f,
8.999999E9f,
3.4366717E10f,
0.33007812f,
Float.intBitsToFloat(0x5D1502F9),
Float.intBitsToFloat(0x5D9502F9),
Float.intBitsToFloat(0x5E1502F9),
4.7223665E21f,
8388608.0f,
1.6777216E7f,
3.3554436E7f,
6.7131496E7f,
1.9310392E-38f,
-2.47E-43f,
1.993244E-38f,
4103.9003f,
5.3399997E9f,
6.0898E-39f,
0.0010310042f,
2.8823261E17f,
7.038531E-26f,
9.2234038E17f,
6.7108872E7f,
1.0E-44f,
2.816025E14f,
9.223372E18f,
1.5846085E29f,
1.1811161E19f,
5.368709E18f,
4.6143165E18f,
0.007812537f,
1.4E-45f,
1.18697724E20f,
1.00014165E-36f,
200f,
3.3554432E7f
};
for (float value : values) {
String str1 = Float.toString(value);
String str2 = RyuFloat.toString(value);
if (!str1.equals(str2)) {
boolean cmp = (Float.parseFloat(str1) == Float.parseFloat(str2));
System.out.println(str1 + " -> " + str2 + " : " + cmp);
assertTrue(cmp);
}
}
}
}
| RyuFloatTest |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/BlockingFirstSubscriber.java | {
"start": 954,
"end": 1386
} | class ____<T> extends BlockingSingleSubscriber<T> {
public BlockingFirstSubscriber(Context context) {
super(context);
}
@Override
public void onNext(T t) {
if (value == null) {
value = t;
dispose();
countDown();
}
}
@Override
public void onError(Throwable t) {
if (value == null) {
error = t;
}
countDown();
}
@Override
public String stepName() {
return "blockFirst";
}
}
| BlockingFirstSubscriber |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/builder/RouteBuilderAddRoutesTest.java | {
"start": 925,
"end": 1756
} | class ____ extends ContextTestSupport {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
interceptSendToEndpoint("mock:result").transform(constant("Foo was here"));
from("direct:foo").to("mock:foo");
from("direct:start").to("mock:result");
}
};
}
@Test
public void testAddRoutes() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Foo was here");
getMockEndpoint("mock:foo").expectedBodiesReceived("Bye World");
template.sendBody("direct:start", "Hello World");
template.sendBody("direct:foo", "Bye World");
assertMockEndpointsSatisfied();
}
}
| RouteBuilderAddRoutesTest |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/MonoLogFuseable.java | {
"start": 1097,
"end": 1837
} | class ____<T> extends InternalMonoOperator<T, T>
implements Fuseable {
final SignalPeek<T> log;
MonoLogFuseable(Mono<? extends T> source, SignalPeek<T> log) {
super(source);
this.log = log;
}
@Override
@SuppressWarnings("unchecked")
public CoreSubscriber<? super T> subscribeOrReturn(CoreSubscriber<? super T> actual) {
if (actual instanceof ConditionalSubscriber) {
return new FluxPeekFuseable.PeekFuseableConditionalSubscriber<>((ConditionalSubscriber<? super T>) actual, log);
}
return new FluxPeekFuseable.PeekFuseableSubscriber<>(actual, log);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
}
| MonoLogFuseable |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/DecodingValidator.java | {
"start": 1167,
"end": 6254
} | class ____ {
private final RawErasureDecoder decoder;
private ByteBuffer buffer;
private int[] newValidIndexes;
private int newErasedIndex;
public DecodingValidator(RawErasureDecoder decoder) {
this.decoder = decoder;
}
/**
* Validate outputs decoded from inputs, by decoding an input back from
* the outputs and comparing it with the original one.
*
* For instance, in RS (6, 3), let (d0, d1, d2, d3, d4, d5) be sources
* and (p0, p1, p2) be parities, and assume
* inputs = [d0, null (d1), d2, d3, d4, d5, null (p0), p1, null (p2)];
* erasedIndexes = [1, 6];
* outputs = [d1, p0].
* Then
* 1. Create new inputs, erasedIndexes and outputs for validation so that
* the inputs could contain the decoded outputs, and decode them:
* newInputs = [d1, d2, d3, d4, d5, p0]
* newErasedIndexes = [0]
* newOutputs = [d0']
* 2. Compare d0 and d0'. The comparison will fail with high probability
* when the initial outputs are wrong.
*
* Note that the input buffers' positions must be the ones where data are
* read: If the input buffers have been processed by a decoder, the buffers'
* positions must be reset before being passed into this method.
*
* This method does not change outputs and erasedIndexes.
*
* @param inputs input buffers used for decoding. The buffers' position
* are moved to the end after this method.
* @param erasedIndexes indexes of erased units used for decoding
* @param outputs decoded output buffers, which are ready to be read after
* the call
* @throws IOException raised on errors performing I/O.
*/
public void validate(ByteBuffer[] inputs, int[] erasedIndexes,
ByteBuffer[] outputs) throws IOException {
markBuffers(outputs);
try {
ByteBuffer validInput = CoderUtil.findFirstValidInput(inputs);
boolean isDirect = validInput.isDirect();
int capacity = validInput.capacity();
int remaining = validInput.remaining();
// Init buffer
if (buffer == null || buffer.isDirect() != isDirect
|| buffer.capacity() < remaining) {
buffer = allocateBuffer(isDirect, capacity);
}
buffer.clear().limit(remaining);
// Create newInputs and newErasedIndex for validation
ByteBuffer[] newInputs = new ByteBuffer[inputs.length];
int count = 0;
for (int i = 0; i < erasedIndexes.length; i++) {
newInputs[erasedIndexes[i]] = outputs[i];
count++;
}
newErasedIndex = -1;
boolean selected = false;
int numValidIndexes = CoderUtil.getValidIndexes(inputs).length;
for (int i = 0; i < newInputs.length; i++) {
if (count == numValidIndexes) {
break;
} else if (!selected && inputs[i] != null) {
newErasedIndex = i;
newInputs[i] = null;
selected = true;
} else if (newInputs[i] == null) {
newInputs[i] = inputs[i];
if (inputs[i] != null) {
count++;
}
}
}
// Keep it for testing
newValidIndexes = CoderUtil.getValidIndexes(newInputs);
decoder.decode(newInputs, new int[]{newErasedIndex},
new ByteBuffer[]{buffer});
if (!buffer.equals(inputs[newErasedIndex])) {
throw new InvalidDecodingException("Failed to validate decoding");
}
} finally {
toLimits(inputs);
resetBuffers(outputs);
}
}
/**
* Validate outputs decoded from inputs, by decoding an input back from
* those outputs and comparing it with the original one.
* @param inputs input buffers used for decoding
* @param erasedIndexes indexes of erased units used for decoding
* @param outputs decoded output buffers
* @throws IOException raised on errors performing I/O.
*/
public void validate(ECChunk[] inputs, int[] erasedIndexes, ECChunk[] outputs)
throws IOException {
ByteBuffer[] newInputs = CoderUtil.toBuffers(inputs);
ByteBuffer[] newOutputs = CoderUtil.toBuffers(outputs);
validate(newInputs, erasedIndexes, newOutputs);
}
private ByteBuffer allocateBuffer(boolean direct, int capacity) {
if (direct) {
buffer = ByteBuffer.allocateDirect(capacity);
} else {
buffer = ByteBuffer.allocate(capacity);
}
return buffer;
}
private static void markBuffers(ByteBuffer[] buffers) {
for (ByteBuffer buffer: buffers) {
if (buffer != null) {
buffer.mark();
}
}
}
private static void resetBuffers(ByteBuffer[] buffers) {
for (ByteBuffer buffer: buffers) {
if (buffer != null) {
buffer.reset();
}
}
}
private static void toLimits(ByteBuffer[] buffers) {
for (ByteBuffer buffer: buffers) {
if (buffer != null) {
buffer.position(buffer.limit());
}
}
}
@VisibleForTesting
protected int[] getNewValidIndexes() {
return newValidIndexes;
}
@VisibleForTesting
protected int getNewErasedIndex() {
return newErasedIndex;
}
} | DecodingValidator |
java | resilience4j__resilience4j | resilience4j-spring-boot3/src/test/java/io/github/resilience4j/springboot3/retry/RetryAutoConfigurationTest.java | {
"start": 2292,
"end": 9619
} | class ____ {
@Rule
public WireMockRule wireMockRule = new WireMockRule(8090);
@Autowired
RetryRegistry retryRegistry;
@Autowired
RetryProperties retryProperties;
@Autowired
RetryAspect retryAspect;
@Autowired
RetryDummyService retryDummyService;
@Autowired
private TestRestTemplate restTemplate;
@Autowired
private RetryDummyFeignClient retryDummyFeignClient;
/**
* This test verifies that the combination of @FeignClient and @Retry annotation works as same
* as @Retry alone works with any normal service class
*/
@Test
public void testFeignClient() {
WireMock.stubFor(WireMock
.get(WireMock.urlEqualTo("/retry/"))
.willReturn(WireMock.aResponse().withStatus(200).withBody("This is successful call"))
);
WireMock.stubFor(WireMock.get(WireMock.urlMatching("^.*\\/retry\\/error.*$"))
.willReturn(WireMock.aResponse().withStatus(400).withBody("This is error")));
assertThat(retryRegistry).isNotNull();
assertThat(retryProperties).isNotNull();
RetryEventsEndpointResponse retryEventListBefore = retryEvents("/actuator/retryevents");
RetryEventsEndpointResponse retryEventsEndpointFeignListBefore = retryEvents(
"/actuator/retryevents/" + RetryDummyFeignClient.RETRY_DUMMY_FEIGN_CLIENT_NAME);
try {
retryDummyFeignClient.doSomething("error");
} catch (Exception ex) {
// Do nothing. The IOException is recorded by the retry as it is one of failure exceptions
}
// The invocation is recorded by the CircuitBreaker as a success.
retryDummyFeignClient.doSomething(StringUtils.EMPTY);
Retry retry = retryRegistry.retry(RetryDummyFeignClient.RETRY_DUMMY_FEIGN_CLIENT_NAME);
assertThat(retry).isNotNull();
// expect retry is configured as defined in application.yml
assertThat(retry.getRetryConfig().getMaxAttempts()).isEqualTo(4);
assertThat(retry.getName()).isEqualTo(RetryDummyFeignClient.RETRY_DUMMY_FEIGN_CLIENT_NAME);
assertThat(retry.getRetryConfig().getExceptionPredicate().test(new IOException())).isTrue();
assertThat(retry.getMetrics().getNumberOfFailedCallsWithoutRetryAttempt()).isZero();
assertThat(retry.getMetrics().getNumberOfFailedCallsWithRetryAttempt()).isEqualTo(1);
assertThat(retry.getMetrics().getNumberOfSuccessfulCallsWithoutRetryAttempt()).isEqualTo(1);
assertThat(retry.getMetrics().getNumberOfSuccessfulCallsWithRetryAttempt()).isZero();
// expect retry actuator endpoint contains both retries
ResponseEntity<RetryEndpointResponse> retriesList = restTemplate
.getForEntity("/actuator/retries", RetryEndpointResponse.class);
assertThat(new HashSet<>(retriesList.getBody().getRetries()))
.contains(RetryDummyService.RETRY_BACKEND_A, RetryDummyService.RETRY_BACKEND_B,
ReactiveRetryDummyService.BACKEND_C, RetryDummyFeignClient.RETRY_DUMMY_FEIGN_CLIENT_NAME);
// expect retry-event actuator endpoint recorded both events
RetryEventsEndpointResponse retryEventList = retryEvents("/actuator/retryevents");
assertThat(retryEventList.getRetryEvents())
.hasSize(retryEventListBefore.getRetryEvents().size() + 4);
retryEventList = retryEvents("/actuator/retryevents/" + RetryDummyFeignClient.RETRY_DUMMY_FEIGN_CLIENT_NAME);
assertThat(retryEventList.getRetryEvents())
.hasSize(retryEventsEndpointFeignListBefore.getRetryEvents().size() + 4);
assertThat(retry.getRetryConfig().getExceptionPredicate().test(new IOException())).isTrue();
assertThat(retry.getRetryConfig().getExceptionPredicate().test(new IgnoredException()))
.isFalse();
assertThat(retryAspect.getOrder()).isEqualTo(399);
}
/**
* The test verifies that a Retry instance is created and configured properly when the
* RetryDummyService is invoked and that the Retry logic is properly handled
*/
@Test
public void testRetryAutoConfiguration() throws IOException {
assertThat(retryRegistry).isNotNull();
assertThat(retryProperties).isNotNull();
RetryEventsEndpointResponse retryEventListBefore = retryEvents("/actuator/retryevents");
RetryEventsEndpointResponse retryEventsAListBefore = retryEvents(
"/actuator/retryevents/" + RetryDummyService.RETRY_BACKEND_A);
try {
retryDummyService.doSomething(true);
} catch (IOException ex) {
// Do nothing. The IOException is recorded by the retry as it is one of failure exceptions
}
// The invocation is recorded by the CircuitBreaker as a success.
retryDummyService.doSomething(false);
Retry retry = retryRegistry.retry(RetryDummyService.RETRY_BACKEND_A);
assertThat(retry).isNotNull();
// expect retry is configured as defined in application.yml
assertThat(retry.getRetryConfig().getMaxAttempts()).isEqualTo(4);
assertThat(retry.getName()).isEqualTo(RetryDummyService.RETRY_BACKEND_A);
assertThat(retry.getRetryConfig().getExceptionPredicate().test(new IOException())).isTrue();
assertThat(retry.getMetrics().getNumberOfFailedCallsWithoutRetryAttempt()).isZero();
assertThat(retry.getMetrics().getNumberOfFailedCallsWithRetryAttempt()).isEqualTo(1);
assertThat(retry.getMetrics().getNumberOfSuccessfulCallsWithoutRetryAttempt()).isEqualTo(1);
assertThat(retry.getMetrics().getNumberOfSuccessfulCallsWithRetryAttempt()).isZero();
// expect retry actuator endpoint contains both retries
ResponseEntity<RetryEndpointResponse> retriesList = restTemplate
.getForEntity("/actuator/retries", RetryEndpointResponse.class);
assertThat(new HashSet<>(retriesList.getBody().getRetries()))
.contains(RetryDummyService.RETRY_BACKEND_A, RetryDummyService.RETRY_BACKEND_B,
ReactiveRetryDummyService.BACKEND_C, RetryDummyFeignClient.RETRY_DUMMY_FEIGN_CLIENT_NAME);
// expect retry-event actuator endpoint recorded both events
RetryEventsEndpointResponse retryEventList = retryEvents("/actuator/retryevents");
assertThat(retryEventList.getRetryEvents())
.hasSize(retryEventListBefore.getRetryEvents().size() + 4);
retryEventList = retryEvents("/actuator/retryevents/" + RetryDummyService.RETRY_BACKEND_A);
assertThat(retryEventList.getRetryEvents())
.hasSize(retryEventsAListBefore.getRetryEvents().size() + 4);
assertThat(retry.getRetryConfig().getExceptionPredicate().test(new IOException())).isTrue();
assertThat(retry.getRetryConfig().getExceptionPredicate().test(new IgnoredException()))
.isFalse();
assertThat(retryAspect.getOrder()).isEqualTo(399);
// test Customizer effect
Retry retryCustom = retryRegistry.retry("retryBackendD");
assertThat(retryCustom.getRetryConfig().getMaxAttempts()).isEqualTo(4);
}
private RetryEventsEndpointResponse retryEvents(String s) {
return restTemplate.getForEntity(s, RetryEventsEndpointResponse.class).getBody();
}
}
| RetryAutoConfigurationTest |
java | apache__spark | examples/src/main/java/org/apache/spark/examples/mllib/JavaRankingMetricsExample.java | {
"start": 1290,
"end": 5729
} | class ____ {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("Java Ranking Metrics Example");
JavaSparkContext sc = new JavaSparkContext(conf);
// $example on$
String path = "data/mllib/sample_movielens_data.txt";
JavaRDD<String> data = sc.textFile(path);
JavaRDD<Rating> ratings = data.map(line -> {
String[] parts = line.split("::");
return new Rating(Integer.parseInt(parts[0]), Integer.parseInt(parts[1]), Double
.parseDouble(parts[2]) - 2.5);
});
ratings.cache();
// Train an ALS model
MatrixFactorizationModel model = ALS.train(JavaRDD.toRDD(ratings), 10, 10, 0.01);
// Get top 10 recommendations for every user and scale ratings from 0 to 1
JavaRDD<Tuple2<Object, Rating[]>> userRecs = model.recommendProductsForUsers(10).toJavaRDD();
JavaRDD<Tuple2<Object, Rating[]>> userRecsScaled = userRecs.map(t -> {
Rating[] scaledRatings = new Rating[t._2().length];
for (int i = 0; i < scaledRatings.length; i++) {
double newRating = Math.max(Math.min(t._2()[i].rating(), 1.0), 0.0);
scaledRatings[i] = new Rating(t._2()[i].user(), t._2()[i].product(), newRating);
}
return new Tuple2<>(t._1(), scaledRatings);
});
JavaPairRDD<Object, Rating[]> userRecommended = JavaPairRDD.fromJavaRDD(userRecsScaled);
// Map ratings to 1 or 0, 1 indicating a movie that should be recommended
JavaRDD<Rating> binarizedRatings = ratings.map(r -> {
double binaryRating;
if (r.rating() > 0.0) {
binaryRating = 1.0;
} else {
binaryRating = 0.0;
}
return new Rating(r.user(), r.product(), binaryRating);
});
// Group ratings by common user
JavaPairRDD<Object, Iterable<Rating>> userMovies = binarizedRatings.groupBy(Rating::user);
// Get true relevant documents from all user ratings
JavaPairRDD<Object, List<Integer>> userMoviesList = userMovies.mapValues(docs -> {
List<Integer> products = new ArrayList<>();
for (Rating r : docs) {
if (r.rating() > 0.0) {
products.add(r.product());
}
}
return products;
});
// Extract the product id from each recommendation
JavaPairRDD<Object, List<Integer>> userRecommendedList = userRecommended.mapValues(docs -> {
List<Integer> products = new ArrayList<>();
for (Rating r : docs) {
products.add(r.product());
}
return products;
});
JavaRDD<Tuple2<List<Integer>, List<Integer>>> relevantDocs = userMoviesList.join(
userRecommendedList).values();
// Instantiate the metrics object
RankingMetrics<Integer> metrics = RankingMetrics.of(relevantDocs);
// Precision, NDCG and Recall at k
Integer[] kVector = {1, 3, 5};
for (Integer k : kVector) {
System.out.format("Precision at %d = %f\n", k, metrics.precisionAt(k));
System.out.format("NDCG at %d = %f\n", k, metrics.ndcgAt(k));
System.out.format("Recall at %d = %f\n", k, metrics.recallAt(k));
}
// Mean average precision
System.out.format("Mean average precision = %f\n", metrics.meanAveragePrecision());
//Mean average precision at k
System.out.format("Mean average precision at 2 = %f\n", metrics.meanAveragePrecisionAt(2));
// Evaluate the model using numerical ratings and regression metrics
JavaRDD<Tuple2<Object, Object>> userProducts =
ratings.map(r -> new Tuple2<>(r.user(), r.product()));
JavaPairRDD<Tuple2<Integer, Integer>, Object> predictions = JavaPairRDD.fromJavaRDD(
model.predict(JavaRDD.toRDD(userProducts)).toJavaRDD().map(r ->
new Tuple2<>(new Tuple2<>(r.user(), r.product()), r.rating())));
JavaRDD<Tuple2<Object, Object>> ratesAndPreds =
JavaPairRDD.fromJavaRDD(ratings.map(r ->
new Tuple2<Tuple2<Integer, Integer>, Object>(
new Tuple2<>(r.user(), r.product()),
r.rating())
)).join(predictions).values();
// Create regression metrics object
RegressionMetrics regressionMetrics = new RegressionMetrics(ratesAndPreds.rdd());
// Root mean squared error
System.out.format("RMSE = %f\n", regressionMetrics.rootMeanSquaredError());
// R-squared
System.out.format("R-squared = %f\n", regressionMetrics.r2());
// $example off$
sc.stop();
}
}
| JavaRankingMetricsExample |
java | apache__kafka | tools/src/main/java/org/apache/kafka/tools/ProducerPerformance.java | {
"start": 24742,
"end": 30563
} | class ____ {
final String bootstrapServer;
final String topicName;
final long numRecords;
final long warmupRecords;
final Integer recordSize;
final double throughput;
final boolean payloadMonotonic;
final Properties producerProps;
final boolean shouldPrintMetrics;
final Long transactionDurationMs;
final boolean transactionsEnabled;
final List<byte[]> payloadByteList;
final long reportingInterval;
public ConfigPostProcessor(ArgumentParser parser, String[] args) throws IOException, ArgumentParserException {
Namespace namespace = parser.parseArgs(args);
this.bootstrapServer = namespace.getString("bootstrapServer");
this.topicName = namespace.getString("topic");
this.numRecords = namespace.getLong("numRecords");
this.warmupRecords = Math.max(namespace.getLong("warmupRecords"), 0);
this.recordSize = namespace.getInt("recordSize");
this.throughput = namespace.getDouble("throughput");
this.payloadMonotonic = namespace.getBoolean("payloadMonotonic");
this.shouldPrintMetrics = namespace.getBoolean("printMetrics");
this.reportingInterval = namespace.getLong("reportingInterval");
List<String> producerConfigs = namespace.getList("producerConfig");
String producerConfigFile = namespace.getString("producerConfigFile");
List<String> commandProperties = namespace.getList("commandProperties");
String commandConfigFile = namespace.getString("commandConfigFile");
String payloadFilePath = namespace.getString("payloadFile");
Long transactionDurationMsArg = namespace.getLong("transactionDurationMs");
String transactionIdArg = namespace.getString("transactionalId");
if (numRecords <= 0) {
throw new ArgumentParserException("--num-records should be greater than zero.", parser);
}
if (warmupRecords >= numRecords) {
throw new ArgumentParserException("The value for --warmup-records must be strictly fewer than the number of records in the test, --num-records.", parser);
}
if (recordSize != null && recordSize <= 0) {
throw new ArgumentParserException("--record-size should be greater than zero.", parser);
}
if (bootstrapServer == null && commandProperties == null && producerConfigs == null && producerConfigFile == null && commandConfigFile == null) {
throw new ArgumentParserException("At least one of --bootstrap-server, --command-property, --producer-props, --producer.config or --command-config must be specified.", parser);
}
if (commandProperties != null && producerConfigs != null) {
throw new ArgumentParserException("--command-property and --producer-props cannot be specified together.", parser);
}
if (commandConfigFile != null && producerConfigFile != null) {
throw new ArgumentParserException("--command-config and --producer.config cannot be specified together.", parser);
}
if (transactionDurationMsArg != null && transactionDurationMsArg <= 0) {
throw new ArgumentParserException("--transaction-duration-ms should be greater than zero.", parser);
}
if (reportingInterval <= 0) {
throw new ArgumentParserException("--reporting-interval should be greater than zero.", parser);
}
// since default value gets printed with the help text, we are escaping \n there and replacing it with correct value here.
String payloadDelimiter = namespace.getString("payloadDelimiter").equals("\\n")
? "\n" : namespace.getString("payloadDelimiter");
this.payloadByteList = readPayloadFile(payloadFilePath, payloadDelimiter);
if (producerConfigs != null) {
System.out.println("Option --producer-props has been deprecated and will be removed in a future version. Use --command-property instead.");
commandProperties = producerConfigs;
}
if (producerConfigFile != null) {
System.out.println("Option --producer.config has been deprecated and will be removed in a future version. Use --command-config instead.");
commandConfigFile = producerConfigFile;
}
this.producerProps = readProps(commandProperties, commandConfigFile);
if (bootstrapServer != null) {
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
}
// setup transaction related configs
this.transactionsEnabled = transactionDurationMsArg != null
|| transactionIdArg != null
|| producerProps.containsKey(ProducerConfig.TRANSACTIONAL_ID_CONFIG);
if (transactionsEnabled) {
Optional<String> txIdInProps =
Optional.ofNullable(producerProps.get(ProducerConfig.TRANSACTIONAL_ID_CONFIG))
.map(Object::toString);
String transactionId = Optional.ofNullable(transactionIdArg).orElse(txIdInProps.orElse(DEFAULT_TRANSACTION_ID_PREFIX + Uuid.randomUuid().toString()));
producerProps.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, transactionId);
if (transactionDurationMsArg == null) {
transactionDurationMsArg = DEFAULT_TRANSACTION_DURATION_MS;
}
}
this.transactionDurationMs = transactionDurationMsArg;
}
}
}
| ConfigPostProcessor |
java | mapstruct__mapstruct | core/src/main/java/org/mapstruct/Qualifier.java | {
"start": 1350,
"end": 1444
} | interface ____ {}
* </code></pre>
* <pre><code class='java'>
* // we can create | GermanToEnglish |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/CacheNode.java | {
"start": 890,
"end": 1140
} | class ____<V>{
private V value;
private long cacheTime;
public CacheNode(V value){
this.value = value;
cacheTime = Time.now();
}
public V get(){
return value;
}
public long getCacheTime(){
return cacheTime;
}
} | CacheNode |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java | {
"start": 2650,
"end": 32091
} | class ____ implements SimpleDiffable<Job>, Writeable, ToXContentObject {
public static final String TYPE = "job";
public static final String ANOMALY_DETECTOR_JOB_TYPE = "anomaly_detector";
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(Job.class);
/*
* Field names used in serialization
*/
public static final ParseField ID = new ParseField("job_id");
public static final ParseField JOB_TYPE = new ParseField("job_type");
public static final ParseField JOB_VERSION = new ParseField("job_version");
public static final ParseField GROUPS = new ParseField("groups");
public static final ParseField ANALYSIS_CONFIG = AnalysisConfig.ANALYSIS_CONFIG;
public static final ParseField ANALYSIS_LIMITS = new ParseField("analysis_limits");
public static final ParseField CREATE_TIME = new ParseField("create_time");
public static final ParseField CUSTOM_SETTINGS = new ParseField("custom_settings");
public static final ParseField DATA_DESCRIPTION = new ParseField("data_description");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField FINISHED_TIME = new ParseField("finished_time");
public static final ParseField MODEL_PLOT_CONFIG = new ParseField("model_plot_config");
public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days");
public static final ParseField BACKGROUND_PERSIST_INTERVAL = new ParseField("background_persist_interval");
public static final ParseField MODEL_SNAPSHOT_RETENTION_DAYS = new ParseField("model_snapshot_retention_days");
public static final ParseField DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS = new ParseField("daily_model_snapshot_retention_after_days");
public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days");
public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id");
public static final ParseField MODEL_SNAPSHOT_MIN_VERSION = new ParseField("model_snapshot_min_version");
public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name");
public static final ParseField DELETING = new ParseField("deleting");
public static final ParseField ALLOW_LAZY_OPEN = new ParseField("allow_lazy_open");
public static final ParseField BLOCKED = new ParseField("blocked");
public static final ParseField DATAFEED_CONFIG = new ParseField("datafeed_config");
// Used for QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("jobs");
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
public static final ObjectParser<Builder, Void> LENIENT_PARSER = createParser(true, true);
// Use the REST request parser to parse a job passed to the API, to disallow setting internal fields.
public static final ObjectParser<Builder, Void> REST_REQUEST_PARSER = createParser(false, false);
public static final TimeValue MIN_BACKGROUND_PERSIST_INTERVAL = TimeValue.timeValueHours(1);
/**
* This includes the overhead of thread stacks and data structures that the program might use that
* are not instrumented. (For the <code>autodetect</code> process categorization is not instrumented,
* and the <code>normalize</code> process is not instrumented at all.) But this overhead does NOT
* include the memory used by loading the executable code.
*/
public static final ByteSizeValue PROCESS_MEMORY_OVERHEAD = ByteSizeValue.ofMb(10);
public static final long DEFAULT_MODEL_SNAPSHOT_RETENTION_DAYS = 10;
public static final long DEFAULT_DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS = 1;
private static ObjectParser<Builder, Void> createParser(boolean allowInternalFields, boolean ignoreUnknownFields) {
ObjectParser<Builder, Void> parser = new ObjectParser<>("job_details", ignoreUnknownFields, Builder::new);
parser.declareString(Builder::setId, ID);
parser.declareStringArray(Builder::setGroups, GROUPS);
parser.declareStringOrNull(Builder::setDescription, DESCRIPTION);
parser.declareObject(
Builder::setAnalysisConfig,
ignoreUnknownFields ? AnalysisConfig.LENIENT_PARSER : AnalysisConfig.STRICT_PARSER,
ANALYSIS_CONFIG
);
parser.declareObject(
Builder::setAnalysisLimits,
ignoreUnknownFields ? AnalysisLimits.LENIENT_PARSER : AnalysisLimits.STRICT_PARSER,
ANALYSIS_LIMITS
);
parser.declareObject(
Builder::setDataDescription,
ignoreUnknownFields ? DataDescription.LENIENT_PARSER : DataDescription.STRICT_PARSER,
DATA_DESCRIPTION
);
parser.declareObject(
Builder::setModelPlotConfig,
ignoreUnknownFields ? ModelPlotConfig.LENIENT_PARSER : ModelPlotConfig.STRICT_PARSER,
MODEL_PLOT_CONFIG
);
parser.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS);
parser.declareString(
(builder, val) -> builder.setBackgroundPersistInterval(
TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName())
),
BACKGROUND_PERSIST_INTERVAL
);
parser.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS);
parser.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS);
parser.declareLong(Builder::setDailyModelSnapshotRetentionAfterDays, DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS);
parser.declareField(Builder::setCustomSettings, (p, c) -> p.mapOrdered(), CUSTOM_SETTINGS, ValueType.OBJECT);
parser.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME);
parser.declareBoolean(Builder::setAllowLazyOpen, ALLOW_LAZY_OPEN);
parser.declareObject(
Builder::setDatafeed,
ignoreUnknownFields ? DatafeedConfig.LENIENT_PARSER : DatafeedConfig.STRICT_PARSER,
DATAFEED_CONFIG
);
if (allowInternalFields) {
parser.declareString(Builder::setJobType, JOB_TYPE);
parser.declareString(Builder::setJobVersion, JOB_VERSION);
parser.declareField(
Builder::setCreateTime,
p -> TimeUtils.parseTimeField(p, CREATE_TIME.getPreferredName()),
CREATE_TIME,
ValueType.VALUE
);
parser.declareField(
Builder::setFinishedTime,
p -> TimeUtils.parseTimeField(p, FINISHED_TIME.getPreferredName()),
FINISHED_TIME,
ValueType.VALUE
);
parser.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID);
parser.declareStringOrNull(Builder::setModelSnapshotMinVersion, MODEL_SNAPSHOT_MIN_VERSION);
parser.declareBoolean(Builder::setDeleting, DELETING);
parser.declareObject(Builder::setBlocked, ignoreUnknownFields ? Blocked.LENIENT_PARSER : Blocked.STRICT_PARSER, BLOCKED);
}
return parser;
}
private final String jobId;
private final String jobType;
/**
* The version when the job was created.
* Will be null for versions before 5.5.
*/
@Nullable
private final MlConfigVersion jobVersion;
private final List<String> groups;
private final String description;
// TODO: Use java.time for the Dates here: x-pack-elasticsearch#829
private final Date createTime;
private final Date finishedTime;
private final AnalysisConfig analysisConfig;
private final AnalysisLimits analysisLimits;
private final DataDescription dataDescription;
private final ModelPlotConfig modelPlotConfig;
private final Long renormalizationWindowDays;
private final TimeValue backgroundPersistInterval;
private final Long modelSnapshotRetentionDays;
private final Long dailyModelSnapshotRetentionAfterDays;
private final Long resultsRetentionDays;
private final Map<String, Object> customSettings;
private final String modelSnapshotId;
private final MlConfigVersion modelSnapshotMinVersion;
private final String resultsIndexName;
private final boolean deleting;
private final boolean allowLazyOpen;
private final Blocked blocked;
private final DatafeedConfig datafeedConfig;
private Job(
String jobId,
String jobType,
MlConfigVersion jobVersion,
List<String> groups,
String description,
Date createTime,
Date finishedTime,
AnalysisConfig analysisConfig,
AnalysisLimits analysisLimits,
DataDescription dataDescription,
ModelPlotConfig modelPlotConfig,
Long renormalizationWindowDays,
TimeValue backgroundPersistInterval,
Long modelSnapshotRetentionDays,
Long dailyModelSnapshotRetentionAfterDays,
Long resultsRetentionDays,
Map<String, Object> customSettings,
String modelSnapshotId,
MlConfigVersion modelSnapshotMinVersion,
String resultsIndexName,
boolean deleting,
boolean allowLazyOpen,
Blocked blocked,
DatafeedConfig datafeedConfig
) {
this.jobId = jobId;
this.jobType = jobType;
this.jobVersion = jobVersion;
this.groups = Collections.unmodifiableList(groups);
this.description = description;
this.createTime = createTime;
this.finishedTime = finishedTime;
this.analysisConfig = analysisConfig;
this.analysisLimits = analysisLimits;
this.dataDescription = dataDescription;
this.modelPlotConfig = modelPlotConfig;
this.renormalizationWindowDays = renormalizationWindowDays;
this.backgroundPersistInterval = backgroundPersistInterval;
this.modelSnapshotRetentionDays = modelSnapshotRetentionDays;
this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays;
this.resultsRetentionDays = resultsRetentionDays;
this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings);
this.modelSnapshotId = modelSnapshotId;
this.modelSnapshotMinVersion = modelSnapshotMinVersion;
this.resultsIndexName = resultsIndexName;
this.allowLazyOpen = allowLazyOpen;
if (deleting == false && blocked.getReason() == Blocked.Reason.DELETE) {
this.deleting = true;
} else {
this.deleting = deleting;
}
if (deleting && blocked.getReason() != Blocked.Reason.DELETE) {
this.blocked = new Blocked(Blocked.Reason.DELETE, null);
} else {
this.blocked = blocked;
}
this.datafeedConfig = datafeedConfig;
}
public Job(StreamInput in) throws IOException {
jobId = in.readString();
jobType = in.readString();
jobVersion = in.readBoolean() ? MlConfigVersion.readVersion(in) : null;
groups = in.readCollectionAsImmutableList(StreamInput::readString);
description = in.readOptionalString();
createTime = new Date(in.readVLong());
finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null;
analysisConfig = new AnalysisConfig(in);
analysisLimits = in.readOptionalWriteable(AnalysisLimits::new);
dataDescription = in.readOptionalWriteable(DataDescription::new);
modelPlotConfig = in.readOptionalWriteable(ModelPlotConfig::new);
renormalizationWindowDays = in.readOptionalLong();
backgroundPersistInterval = in.readOptionalTimeValue();
modelSnapshotRetentionDays = in.readOptionalLong();
dailyModelSnapshotRetentionAfterDays = in.readOptionalLong();
resultsRetentionDays = in.readOptionalLong();
Map<String, Object> readCustomSettings = in.readGenericMap();
customSettings = readCustomSettings == null ? null : Collections.unmodifiableMap(readCustomSettings);
modelSnapshotId = in.readOptionalString();
if (in.readBoolean()) {
modelSnapshotMinVersion = MlConfigVersion.readVersion(in);
} else {
modelSnapshotMinVersion = null;
}
resultsIndexName = in.readString();
deleting = in.readBoolean();
allowLazyOpen = in.readBoolean();
blocked = new Blocked(in);
this.datafeedConfig = in.readOptionalWriteable(DatafeedConfig::new);
}
/**
* Get the persisted job document name from the Job Id.
* Throws if {@code jobId} is not a valid job Id.
*
* @param jobId The job id
* @return The id of document the job is persisted in
*/
public static String documentId(String jobId) {
if (MlStrings.isValidId(jobId) == false) {
throw new IllegalArgumentException(Messages.getMessage(Messages.INVALID_ID, ID.getPreferredName(), jobId));
}
if (MlStrings.hasValidLengthForId(jobId) == false) {
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_ID_TOO_LONG, MlStrings.ID_LENGTH_LIMIT));
}
return ANOMALY_DETECTOR_JOB_TYPE + "-" + jobId;
}
/**
* Returns the job id from the doc id. Returns {@code null} if the doc id is invalid.
*/
@Nullable
public static String extractJobIdFromDocumentId(String docId) {
String jobId = docId.replaceAll("^" + ANOMALY_DETECTOR_JOB_TYPE + "-", "");
return jobId.equals(docId) ? null : jobId;
}
/**
* Return the Job Id.
*
* @return The job Id string
*/
public String getId() {
return jobId;
}
public String getJobType() {
return jobType;
}
public MlConfigVersion getJobVersion() {
return jobVersion;
}
public List<String> getGroups() {
return groups;
}
/**
* A good starting name for the index storing the job's results.
* This defaults to the shared results index if a specific index name is not set.
* This method must <em>only</em> be used during initial job creation.
* After that the read/write aliases must always be used to access the job's
* results index, as the underlying index may roll or be reindexed.
* @return The job's initial results index name
*/
public String getInitialResultsIndexName() {
return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + resultsIndexName;
}
/**
* Get the unmodified <code>results_index_name</code> field from the job.
* This is provided to allow a job to be copied via the builder.
* After creation this does not necessarily reflect the actual concrete
* index used by the job. A job's results must always be read and written
* using the read and write aliases.
* @return The job's configured "index name"
*/
private String getResultsIndexNameNoPrefix() {
return resultsIndexName;
}
/**
* The job description
*
* @return job description
*/
public String getDescription() {
return description;
}
/**
* The Job creation time. This name is preferred when serialising to the
* REST API.
*
* @return The date the job was created
*/
public Date getCreateTime() {
return createTime;
}
/**
* The time the job was finished or <code>null</code> if not finished.
*
* @return The date the job was last retired or <code>null</code>
*/
public Date getFinishedTime() {
return finishedTime;
}
/**
* The analysis configuration object
*
* @return The AnalysisConfig
*/
public AnalysisConfig getAnalysisConfig() {
return analysisConfig;
}
/**
* The analysis options object
*
* @return The AnalysisLimits
*/
public AnalysisLimits getAnalysisLimits() {
return analysisLimits;
}
public ModelPlotConfig getModelPlotConfig() {
return modelPlotConfig;
}
/**
* If not set the input data is assumed to be csv with a '_time' field in
* epoch format.
*
* @return A DataDescription or <code>null</code>
* @see DataDescription
*/
public DataDescription getDataDescription() {
return dataDescription;
}
/**
* The duration of the renormalization window in days
*
* @return renormalization window in days
*/
public Long getRenormalizationWindowDays() {
return renormalizationWindowDays;
}
/**
* The background persistence interval
*
* @return background persistence interval
*/
public TimeValue getBackgroundPersistInterval() {
return backgroundPersistInterval;
}
public Long getModelSnapshotRetentionDays() {
return modelSnapshotRetentionDays;
}
public Long getDailyModelSnapshotRetentionAfterDays() {
return dailyModelSnapshotRetentionAfterDays;
}
public Long getResultsRetentionDays() {
return resultsRetentionDays;
}
public Map<String, Object> getCustomSettings() {
return customSettings;
}
public String getModelSnapshotId() {
return modelSnapshotId;
}
public MlConfigVersion getModelSnapshotMinVersion() {
return modelSnapshotMinVersion;
}
public boolean isDeleting() {
return deleting;
}
public boolean isResetting() {
return blocked != null && Blocked.Reason.RESET.equals(blocked.getReason());
}
public boolean allowLazyOpen() {
return allowLazyOpen;
}
public Blocked getBlocked() {
return blocked;
}
/**
* Get all input data fields mentioned in the job configuration,
* namely analysis fields and the time field.
*
* @return the collection of fields - never <code>null</code>
*/
public Collection<String> allInputFields() {
Set<String> allFields = new TreeSet<>();
// analysis fields
if (analysisConfig != null) {
allFields.addAll(analysisConfig.analysisFields());
}
// time field
if (dataDescription != null) {
String timeField = dataDescription.getTimeField();
if (timeField != null) {
allFields.add(timeField);
}
}
// remove empty strings
allFields.remove("");
// the categorisation field isn't an input field
allFields.remove(AnalysisConfig.ML_CATEGORY_FIELD);
return allFields;
}
/**
* Returns the timestamp before which data is not accepted by the job.
* This is the latest record timestamp minus the job latency.
* @param dataCounts the job data counts
* @return the timestamp before which data is not accepted by the job
*/
public long earliestValidTimestamp(DataCounts dataCounts) {
long currentTime = 0;
Date latestRecordTimestamp = dataCounts.getLatestRecordTimeStamp();
if (latestRecordTimestamp != null) {
TimeValue latency = analysisConfig.getLatency();
long latencyMillis = latency == null ? 0 : latency.millis();
currentTime = latestRecordTimestamp.getTime() - latencyMillis;
}
return currentTime;
}
public Optional<DatafeedConfig> getDatafeedConfig() {
return Optional.ofNullable(datafeedConfig);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(jobId);
out.writeString(jobType);
if (jobVersion != null) {
out.writeBoolean(true);
MlConfigVersion.writeVersion(jobVersion, out);
} else {
out.writeBoolean(false);
}
out.writeStringCollection(groups);
out.writeOptionalString(description);
out.writeVLong(createTime.getTime());
if (finishedTime != null) {
out.writeBoolean(true);
out.writeVLong(finishedTime.getTime());
} else {
out.writeBoolean(false);
}
analysisConfig.writeTo(out);
out.writeOptionalWriteable(analysisLimits);
out.writeOptionalWriteable(dataDescription);
out.writeOptionalWriteable(modelPlotConfig);
out.writeOptionalLong(renormalizationWindowDays);
out.writeOptionalTimeValue(backgroundPersistInterval);
out.writeOptionalLong(modelSnapshotRetentionDays);
out.writeOptionalLong(dailyModelSnapshotRetentionAfterDays);
out.writeOptionalLong(resultsRetentionDays);
out.writeGenericMap(customSettings);
out.writeOptionalString(modelSnapshotId);
if (modelSnapshotMinVersion != null) {
out.writeBoolean(true);
MlConfigVersion.writeVersion(modelSnapshotMinVersion, out);
} else {
out.writeBoolean(false);
}
out.writeString(resultsIndexName);
out.writeBoolean(deleting);
out.writeBoolean(allowLazyOpen);
blocked.writeTo(out);
out.writeOptionalWriteable(datafeedConfig);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
doXContentBody(builder, params);
builder.endObject();
return builder;
}
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
final String humanReadableSuffix = "_string";
builder.field(ID.getPreferredName(), jobId);
if (params.paramAsBoolean(EXCLUDE_GENERATED, false) == false) {
builder.field(JOB_TYPE.getPreferredName(), jobType);
if (jobVersion != null) {
builder.field(JOB_VERSION.getPreferredName(), jobVersion);
}
builder.timestampFieldsFromUnixEpochMillis(
CREATE_TIME.getPreferredName(),
CREATE_TIME.getPreferredName() + humanReadableSuffix,
createTime.getTime()
);
if (finishedTime != null) {
builder.timestampFieldsFromUnixEpochMillis(
FINISHED_TIME.getPreferredName(),
FINISHED_TIME.getPreferredName() + humanReadableSuffix,
finishedTime.getTime()
);
}
if (modelSnapshotId != null) {
builder.field(MODEL_SNAPSHOT_ID.getPreferredName(), modelSnapshotId);
}
if (deleting) {
builder.field(DELETING.getPreferredName(), deleting);
}
if (modelSnapshotMinVersion != null) {
builder.field(MODEL_SNAPSHOT_MIN_VERSION.getPreferredName(), modelSnapshotMinVersion);
}
if (customSettings != null) {
builder.field(CUSTOM_SETTINGS.getPreferredName(), customSettings);
}
// TODO in v8.0.0 move this out so that it will be included when `exclude_generated` is `true`
if (params.paramAsBoolean(ToXContentParams.FOR_INTERNAL_STORAGE, false) == false) {
if (datafeedConfig != null) {
builder.field(DATAFEED_CONFIG.getPreferredName(), datafeedConfig, params);
}
}
} else {
if (customSettings != null) {
HashMap<String, Object> newCustomSettings = new HashMap<>(customSettings);
newCustomSettings.remove("created_by");
builder.field(CUSTOM_SETTINGS.getPreferredName(), newCustomSettings);
}
}
if (groups.isEmpty() == false) {
builder.field(GROUPS.getPreferredName(), groups);
}
if (description != null) {
builder.field(DESCRIPTION.getPreferredName(), description);
}
builder.field(ANALYSIS_CONFIG.getPreferredName(), analysisConfig, params);
if (analysisLimits != null) {
builder.field(ANALYSIS_LIMITS.getPreferredName(), analysisLimits, params);
}
if (dataDescription != null) {
builder.field(DATA_DESCRIPTION.getPreferredName(), dataDescription, params);
}
if (modelPlotConfig != null) {
builder.field(MODEL_PLOT_CONFIG.getPreferredName(), modelPlotConfig, params);
}
if (renormalizationWindowDays != null) {
builder.field(RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays);
}
if (backgroundPersistInterval != null) {
builder.field(BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval.getStringRep());
}
if (modelSnapshotRetentionDays != null) {
builder.field(MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays);
}
if (dailyModelSnapshotRetentionAfterDays != null) {
builder.field(DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS.getPreferredName(), dailyModelSnapshotRetentionAfterDays);
}
if (resultsRetentionDays != null) {
builder.field(RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays);
}
builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName);
builder.field(ALLOW_LAZY_OPEN.getPreferredName(), allowLazyOpen);
if (blocked.getReason() != Blocked.Reason.NONE) {
builder.field(BLOCKED.getPreferredName(), blocked);
}
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof Job == false) {
return false;
}
Job that = (Job) other;
return Objects.equals(this.jobId, that.jobId)
&& Objects.equals(this.jobType, that.jobType)
&& Objects.equals(this.jobVersion, that.jobVersion)
&& Objects.equals(this.groups, that.groups)
&& Objects.equals(this.description, that.description)
&& Objects.equals(this.createTime, that.createTime)
&& Objects.equals(this.finishedTime, that.finishedTime)
&& Objects.equals(this.analysisConfig, that.analysisConfig)
&& Objects.equals(this.analysisLimits, that.analysisLimits)
&& Objects.equals(this.dataDescription, that.dataDescription)
&& Objects.equals(this.modelPlotConfig, that.modelPlotConfig)
&& Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays)
&& Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval)
&& Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays)
&& Objects.equals(this.dailyModelSnapshotRetentionAfterDays, that.dailyModelSnapshotRetentionAfterDays)
&& Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays)
&& Objects.equals(this.customSettings, that.customSettings)
&& Objects.equals(this.modelSnapshotId, that.modelSnapshotId)
&& Objects.equals(this.modelSnapshotMinVersion, that.modelSnapshotMinVersion)
&& Objects.equals(this.resultsIndexName, that.resultsIndexName)
&& Objects.equals(this.deleting, that.deleting)
&& Objects.equals(this.allowLazyOpen, that.allowLazyOpen)
&& Objects.equals(this.blocked, that.blocked)
&& Objects.equals(this.datafeedConfig, that.datafeedConfig);
}
@Override
public int hashCode() {
return Objects.hash(
jobId,
jobType,
jobVersion,
groups,
description,
createTime,
finishedTime,
analysisConfig,
analysisLimits,
dataDescription,
modelPlotConfig,
renormalizationWindowDays,
backgroundPersistInterval,
modelSnapshotRetentionDays,
dailyModelSnapshotRetentionAfterDays,
resultsRetentionDays,
customSettings,
modelSnapshotId,
modelSnapshotMinVersion,
resultsIndexName,
deleting,
allowLazyOpen,
blocked,
datafeedConfig
);
}
// Class already extends from AbstractDiffable, so copied from ToXContentToBytes#toString()
@Override
public final String toString() {
return Strings.toString(this);
}
private static void checkValueNotLessThan(long minVal, String name, Long value) {
if (value != null && value < minVal) {
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, name, minVal, value));
}
}
/**
* Returns the job types that are compatible with a node with {@code mlConfigVersion}
* @param mlConfigVersion the version ML configuration in use
* @return the compatible job types
*/
public static Set<String> getCompatibleJobTypes(MlConfigVersion mlConfigVersion) {
Set<String> compatibleTypes = new HashSet<>();
compatibleTypes.add(ANOMALY_DETECTOR_JOB_TYPE);
return compatibleTypes;
}
public static | Job |
java | google__dagger | javatests/dagger/functional/assisted/AssistedFactoryAsQualifiedBindingTest.java | {
"start": 5925,
"end": 7771
} | interface ____ {
Bar create();
}
@Test
public void testFoo() {
Bar bar = new Bar();
BarFactory barFactory = () -> bar;
Foo foo =
DaggerAssistedFactoryAsQualifiedBindingTest_TestComponent.factory()
.create(bar, barFactory)
.foo();
// Test we can inject the "real" BarFactory implemented by Dagger
assertThat(foo.barFactory).isNotNull();
assertThat(foo.barFactory).isNotEqualTo(barFactory);
assertThat(foo.barFactory.create()).isNotEqualTo(bar);
// Test injection of a qualified Bar/BarFactory with custom @BindsInstance implementation
assertThat(foo.barAsComponentDependency).isEqualTo(bar);
assertThat(foo.barFactoryAsComponentDependency).isEqualTo(barFactory);
// Test injection of a qualified Bar/BarFactory with custom @Provides implementation
assertThat(foo.barAsProvides).isEqualTo(bar);
assertThat(foo.barFactoryAsProvides).isEqualTo(barFactory);
// Test injection of a qualified Bar/BarFactory with custom @Binds implementation
assertThat(foo.barAsBinds).isEqualTo(bar);
assertThat(foo.barFactoryAsBinds).isEqualTo(barFactory);
// Test injection of a qualified Bar/BarFactory with custom @BindsOptionalOf implementation
assertThat(foo.optionalBar).isPresent();
assertThat(foo.optionalBar).hasValue(bar);
assertThat(foo.optionalBarFactory).isPresent();
assertThat(foo.optionalBarFactory).hasValue(barFactory);
// Test injection of a qualified Bar/BarFactory as multibinding
assertThat(foo.barSet).containsExactly(bar);
assertThat(foo.barFactorySet).containsExactly(barFactory);
// Test injection of a unqualified Bar/BarFactory as multibinding
assertThat(foo.unqualifiedBarSet).containsExactly(bar);
assertThat(foo.unqualifiedBarFactorySet).containsExactly(barFactory);
}
}
| BarFactory |
java | elastic__elasticsearch | client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java | {
"start": 5179,
"end": 7192
} | class ____ implements Runnable {
private static final Logger logger = LogManager.getLogger(BulkIndexer.class);
private final BlockingQueue<List<String>> bulkData;
private final int warmupIterations;
private final int measurementIterations;
private final BulkRequestExecutor bulkRequestExecutor;
private final SampleRecorder sampleRecorder;
BulkIndexer(
BlockingQueue<List<String>> bulkData,
int warmupIterations,
int measurementIterations,
SampleRecorder sampleRecorder,
BulkRequestExecutor bulkRequestExecutor
) {
this.bulkData = bulkData;
this.warmupIterations = warmupIterations;
this.measurementIterations = measurementIterations;
this.bulkRequestExecutor = bulkRequestExecutor;
this.sampleRecorder = sampleRecorder;
}
@Override
public void run() {
for (int iteration = 0; iteration < warmupIterations + measurementIterations; iteration++) {
boolean success = false;
List<String> currentBulk;
try {
currentBulk = bulkData.take();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
// measure only service time, latency is not that interesting for a throughput benchmark
long start = System.nanoTime();
try {
success = bulkRequestExecutor.bulkIndex(currentBulk);
} catch (Exception ex) {
logger.warn("Error while executing bulk request", ex);
}
long stop = System.nanoTime();
if (iteration >= warmupIterations) {
sampleRecorder.addSample(new Sample("bulk", start, start, stop, success));
}
}
}
}
}
| BulkIndexer |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/JDKNumberDeserTest.java | {
"start": 1006,
"end": 1142
} | class ____
{
public MyBeanValue value;
}
@JsonDeserialize(using=MyBeanDeserializer.class)
static | MyBeanDefaultValue |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcCall.java | {
"start": 1263,
"end": 2627
} | class ____ {
@Test
public void testConstructor() {
Credentials credential = new CredentialsNone();
Verifier verifier = new VerifierNone();
int rpcVersion = RpcCall.RPC_VERSION;
int program = 2;
int version = 3;
int procedure = 4;
RpcCall call = new RpcCall(0, RpcMessage.Type.RPC_CALL, rpcVersion, program,
version, procedure, credential, verifier);
assertEquals(0, call.getXid());
assertEquals(RpcMessage.Type.RPC_CALL, call.getMessageType());
assertEquals(rpcVersion, call.getRpcVersion());
assertEquals(program, call.getProgram());
assertEquals(version, call.getVersion());
assertEquals(procedure, call.getProcedure());
assertEquals(credential, call.getCredential());
assertEquals(verifier, call.getVerifier());
}
@Test
public void testInvalidRpcVersion() {
assertThrows(IllegalArgumentException.class, () -> {
int invalidRpcVersion = 3;
new RpcCall(0, RpcMessage.Type.RPC_CALL, invalidRpcVersion, 2, 3, 4, null, null);
});
}
@Test
public void testInvalidRpcMessageType() {
assertThrows(IllegalArgumentException.class, () -> {
// Message typ is not RpcMessage.RPC_CALL
RpcMessage.Type invalidMessageType = RpcMessage.Type.RPC_REPLY;
new RpcCall(0, invalidMessageType, RpcCall.RPC_VERSION, 2, 3, 4, null, null);
});
}
}
| TestRpcCall |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/simple/SimpleMetricsExportAutoConfiguration.java | {
"start": 2224,
"end": 2573
} | class ____ {
@Bean
SimpleMeterRegistry simpleMeterRegistry(SimpleConfig config, Clock clock) {
return new SimpleMeterRegistry(config, clock);
}
@Bean
@ConditionalOnMissingBean
SimpleConfig simpleConfig(SimpleProperties simpleProperties) {
return new SimplePropertiesConfigAdapter(simpleProperties);
}
}
| SimpleMetricsExportAutoConfiguration |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java | {
"start": 30851,
"end": 48988
} | interface ____<T> {
/**
* Returns true if this updaters setting has changed with the current update
* @param current the current settings
* @param previous the previous setting
* @return true if this updaters setting has changed with the current update
*/
boolean hasChanged(Settings current, Settings previous);
/**
* Returns the instance value for the current settings. This method is stateless and idempotent.
* This method will throw an exception if the source of this value is invalid.
*/
T getValue(Settings current, Settings previous);
/**
* Applies the given value to the updater. This methods will actually run the update.
*/
void apply(T value, Settings current, Settings previous);
/**
* Updates this updaters value if it has changed.
* @return <code>true</code> iff the value has been updated.
*/
default boolean apply(Settings current, Settings previous) {
if (hasChanged(current, previous)) {
T value = getValue(current, previous);
apply(value, current, previous);
return true;
}
return false;
}
/**
* Returns a callable runnable that calls {@link #apply(Object, Settings, Settings)} if the settings
* actually changed. This allows to defer the update to a later point in time while keeping type safety.
* If the value didn't change the returned runnable is a noop.
*/
default Runnable updater(Settings current, Settings previous) {
if (hasChanged(current, previous)) {
T value = getValue(current, previous);
return () -> { apply(value, current, previous); };
}
return () -> {};
}
}
/**
* Returns the {@link Setting} for the given key or <code>null</code> if the setting can not be found.
*/
public final Setting<?> get(String key) {
Setting<?> raw = getRaw(key);
if (raw == null) {
return null;
}
if (raw.hasComplexMatcher()) {
return raw.getConcreteSetting(key);
} else {
return raw;
}
}
private Setting<?> getRaw(String key) {
Setting<?> setting = keySettings.get(key);
if (setting != null) {
return setting;
}
for (Map.Entry<String, Setting<?>> entry : complexMatchers.entrySet()) {
if (entry.getValue().match(key)) {
assert assertMatcher(key, 1);
assert entry.getValue().hasComplexMatcher();
return entry.getValue();
}
}
return null;
}
private boolean assertMatcher(String key, int numComplexMatchers) {
List<Setting<?>> list = new ArrayList<>();
for (Map.Entry<String, Setting<?>> entry : complexMatchers.entrySet()) {
if (entry.getValue().match(key)) {
list.add(entry.getValue().getConcreteSetting(key));
}
}
assert list.size() == numComplexMatchers
: "Expected " + numComplexMatchers + " complex matchers to match key [" + key + "] but got: " + list.toString();
return true;
}
/**
* Returns <code>true</code> if the setting for the given key is dynamically updateable. Otherwise <code>false</code>.
*/
public boolean isDynamicSetting(String key) {
final Setting<?> setting = get(key);
return setting != null && setting.isDynamic();
}
/**
* Returns <code>true</code> if the setting for the given key is final. Otherwise <code>false</code>.
*/
public boolean isFinalSetting(String key) {
final Setting<?> setting = get(key);
return setting != null && setting.isFinal();
}
/**
* Returns a settings object that contains all settings that are not
* already set in the given source. The diff contains either the default value for each
* setting or the settings value in the given default settings.
*/
public Settings diff(Settings source, Settings defaultSettings) {
Settings.Builder builder = Settings.builder();
for (Setting<?> setting : keySettings.values()) {
setting.diff(builder, source, defaultSettings);
}
for (Setting<?> setting : complexMatchers.values()) {
setting.diff(builder, source, defaultSettings);
}
return builder.build();
}
/**
* Returns the value for the given setting.
*/
public <T> T get(Setting<T> setting) {
if (setting.getProperties().contains(scope) == false) {
throw new IllegalArgumentException(
"settings scope doesn't match the setting scope [" + this.scope + "] not in [" + setting.getProperties() + "]"
);
}
if (get(setting.getKey()) == null) {
throw new IllegalArgumentException("setting " + setting.getKey() + " has not been registered");
}
return setting.get(this.lastSettingsApplied, settings);
}
/**
* Updates a target settings builder with new, updated or deleted settings from a given settings builder.
* <p>
* Note: This method will only allow updates to dynamic settings. if a non-dynamic setting is updated an
* {@link IllegalArgumentException} is thrown instead.
* </p>
*
* @param toApply the new settings to apply
* @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be
* removed from this builder
* @param updates a settings builder that holds all updates applied to target
* @param type a free text string to allow better exceptions messages
* @return <code>true</code> if the target has changed otherwise <code>false</code>
*/
public boolean updateDynamicSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) {
return updateSettings(toApply, target, updates, type, true);
}
/**
* Updates a target settings builder with new, updated or deleted settings from a given settings builder.
*
* @param toApply the new settings to apply
* @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be
* removed from this builder
* @param updates a settings builder that holds all updates applied to target
* @param type a free text string to allow better exceptions messages
* @return <code>true</code> if the target has changed otherwise <code>false</code>
*/
public boolean updateSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) {
return updateSettings(toApply, target, updates, type, false);
}
/**
* Returns <code>true</code> if the given key is a valid delete key
*/
private boolean isValidDelete(String key, boolean onlyDynamic) {
return isFinalSetting(key) == false && // it's not a final setting
(onlyDynamic && isDynamicSetting(key) // it's a dynamicSetting and we only do dynamic settings
|| get(key) == null && key.startsWith(ARCHIVED_SETTINGS_PREFIX) // the setting is not registered AND it's been archived
|| (onlyDynamic == false && get(key) != null)); // if it's not dynamic AND we have a key
}
/**
* Updates a target settings builder with new, updated or deleted settings from a given settings builder.
*
* @param toApply the new settings to apply
* @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be
* removed from this builder
* @param updates a settings builder that holds all updates applied to target
* @param type a free text string to allow better exceptions messages
* @param onlyDynamic if <code>false</code> all settings are updated otherwise only dynamic settings are updated. if set to
* <code>true</code> and a non-dynamic setting is updated an exception is thrown.
* @return <code>true</code> if the target has changed otherwise <code>false</code>
*/
private boolean updateSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type, boolean onlyDynamic) {
boolean changed = false;
final Set<String> toRemove = new HashSet<>();
Settings.Builder settingsBuilder = Settings.builder();
final Predicate<String> canUpdate = (key) -> (isFinalSetting(key) == false && // it's not a final setting
((onlyDynamic == false && get(key) != null) || isDynamicSetting(key)));
for (String key : toApply.keySet()) {
boolean isDelete = toApply.hasValue(key) == false;
if (isDelete && (isValidDelete(key, onlyDynamic) || key.endsWith("*"))) {
// this either accepts null values that suffice the canUpdate test OR wildcard expressions (key ends with *)
// we don't validate if there is any dynamic setting with that prefix yet we could do in the future
toRemove.add(key);
// we don't set changed here it's set after we apply deletes below if something actually changed
} else if (get(key) == null) {
throw new IllegalArgumentException(type + " setting [" + key + "], not recognized");
} else if (isDelete == false && canUpdate.test(key)) {
get(key).validateWithoutDependencies(toApply); // we might not have a full picture here do to a dependency validation
settingsBuilder.copy(key, toApply);
updates.copy(key, toApply);
changed |= toApply.get(key).equals(target.get(key)) == false;
} else {
if (isFinalSetting(key)) {
throw new IllegalArgumentException("final " + type + " setting [" + key + "], not updateable");
} else {
throw new IllegalArgumentException(type + " setting [" + key + "], not dynamically updateable");
}
}
}
changed |= applyDeletes(toRemove, target, k -> isValidDelete(k, onlyDynamic));
target.put(settingsBuilder.build());
return changed;
}
private static boolean applyDeletes(Set<String> deletes, Settings.Builder builder, Predicate<String> canRemove) {
boolean changed = false;
for (String entry : deletes) {
Set<String> keysToRemove = new HashSet<>();
Set<String> keySet = builder.keys();
for (String key : keySet) {
if (Regex.simpleMatch(entry, key) && canRemove.test(key)) {
// we have to re-check with canRemove here since we might have a wildcard expression foo.* that matches
// dynamic as well as static settings if that is the case we might remove static settings since we resolve the
// wildcards late
keysToRemove.add(key);
}
}
for (String key : keysToRemove) {
builder.remove(key);
changed = true;
}
}
return changed;
}
private static Setting<?> findOverlappingSetting(Setting<?> newSetting, Map<String, Setting<?>> complexMatchers) {
assert newSetting.hasComplexMatcher();
if (complexMatchers.containsKey(newSetting.getKey())) {
// we return null here because we use a putIfAbsent call when inserting into the map, so if it exists then we already checked
// the setting to make sure there are no overlapping settings.
return null;
}
for (Setting<?> existingSetting : complexMatchers.values()) {
if (newSetting.match(existingSetting.getKey()) || existingSetting.match(newSetting.getKey())) {
return existingSetting;
}
}
return null;
}
/**
* Archives invalid or unknown settings. Any setting that is not recognized or fails validation
* will be archived. This means the setting is prefixed with {@value ARCHIVED_SETTINGS_PREFIX}
* and remains in the settings object. This can be used to detect invalid settings via APIs.
*
* @param settings the {@link Settings} instance to scan for unknown or invalid settings
* @param unknownConsumer callback on unknown settings (consumer receives unknown key and its
* associated value)
* @param invalidConsumer callback on invalid settings (consumer receives invalid key, its
* associated value and an exception)
* @return a {@link Settings} instance with the unknown or invalid settings archived
*/
public Settings archiveUnknownOrInvalidSettings(
final Settings settings,
final Consumer<Map.Entry<String, String>> unknownConsumer,
final BiConsumer<Map.Entry<String, String>, IllegalArgumentException> invalidConsumer
) {
Settings.Builder builder = Settings.builder();
boolean changed = false;
for (String key : settings.keySet()) {
try {
Setting<?> setting = get(key);
if (setting != null) {
setting.get(settings);
builder.copy(key, settings);
} else {
if (key.startsWith(ARCHIVED_SETTINGS_PREFIX) || isPrivateSetting(key)) {
builder.copy(key, settings);
} else {
changed = true;
unknownConsumer.accept(new Entry(key, settings));
/*
* We put them back in here such that tools can check from the outside if there are any indices with invalid
* settings. The setting can remain there but we want users to be aware that some of their setting are invalid and
* they can research why and what they need to do to replace them.
*/
builder.copy(ARCHIVED_SETTINGS_PREFIX + key, key, settings);
}
}
} catch (IllegalArgumentException ex) {
changed = true;
invalidConsumer.accept(new Entry(key, settings), ex);
/*
* We put them back in here such that tools can check from the outside if there are any indices with invalid settings. The
* setting can remain there but we want users to be aware that some of their setting are invalid and they can research why
* and what they need to do to replace them.
*/
builder.copy(ARCHIVED_SETTINGS_PREFIX + key, key, settings);
}
}
if (changed) {
return builder.build();
} else {
return settings;
}
}
/**
* Deletes invalid or unknown settings. Any setting that is not recognized or fails validation
* will be deleted. This behaviour is desired when dealing with unknown index settings on
* system indices.
*
* @param settings the {@link Settings} instance to scan for unknown or invalid settings
* @param unknownConsumer callback on unknown settings (consumer receives unknown key and its
* associated value)
* @param invalidConsumer callback on invalid settings (consumer receives invalid key, its
* associated value and an exception)
* @return a {@link Settings} instance with the unknown or invalid settings removed
*/
public Settings deleteUnknownOrInvalidSettings(
final Settings settings,
final Consumer<Map.Entry<String, String>> unknownConsumer,
final BiConsumer<Map.Entry<String, String>, IllegalArgumentException> invalidConsumer
) {
Settings.Builder builder = Settings.builder();
boolean changed = false;
for (String key : settings.keySet()) {
try {
Setting<?> setting = get(key);
if (setting != null) {
// will throw IllegalArgumentException on invalid setting
setting.get(settings);
builder.copy(key, settings);
} else {
if (isPrivateSetting(key)) {
// will throw IllegalArgumentException on invalid setting
builder.copy(key, settings);
} else {
changed = true;
unknownConsumer.accept(new Entry(key, settings));
}
}
} catch (IllegalArgumentException ex) {
changed = true;
invalidConsumer.accept(new Entry(key, settings), ex);
}
}
if (changed) {
return builder.build();
} else {
return settings;
}
}
private record Entry(String key, Settings settings) implements Map.Entry<String, String> {
@Override
public String getKey() {
return key;
}
@Override
public String getValue() {
return settings.get(key);
}
@Override
public String setValue(String value) {
throw new UnsupportedOperationException();
}
}
/**
* Returns <code>true</code> iff the setting is a private setting ie. it should be treated as valid even though it has no internal
* representation. Otherwise <code>false</code>
*/
// TODO this should be replaced by Setting.Property.HIDDEN or something like this.
public boolean isPrivateSetting(String key) {
return false;
}
}
| SettingUpdater |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/body/TypedMessageBodyHandler.java | {
"start": 929,
"end": 1141
} | interface ____<T> extends MessageBodyHandler<T>, TypedMessageBodyWriter<T>, TypedMessageBodyReader<T> {
/**
* @return The body type.
*/
@NonNull
Argument<T> getType();
}
| TypedMessageBodyHandler |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/FutureTransformAsyncTest.java | {
"start": 22689,
"end": 23774
} | class ____ {
private Executor executor;
ListenableFuture<String> test() {
ListenableFuture<Integer> future1 = Futures.immediateFuture(5);
ListenableFuture<Integer> future2 = Futures.immediateFuture(10);
ListenableFuture<String> future =
Futures.whenAllSucceed(future1, future2)
.call(
() -> {
int total = Futures.getDone(future1) + Futures.getDone(future2);
return "Sum = " + total;
},
executor);
return future;
}
}
""")
.doTest();
}
@Test
public void submitAsync_expressionLambda() {
refactoringHelper
.addInputLines(
"in/Test.java",
"""
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.concurrent.Executor;
| Test |
java | spring-projects__spring-framework | spring-beans/src/testFixtures/java/org/springframework/beans/testfixture/beans/DummyBean.java | {
"start": 718,
"end": 1474
} | class ____ {
private Object value;
private String name;
private int age;
private TestBean spouse;
public DummyBean(Object value) {
this.value = value;
}
public DummyBean(String name, int age) {
this.name = name;
this.age = age;
}
public DummyBean(int ageRef, String nameRef) {
this.name = nameRef;
this.age = ageRef;
}
public DummyBean(String name, TestBean spouse) {
this.name = name;
this.spouse = spouse;
}
public DummyBean(String name, Object value, int age) {
this.name = name;
this.value = value;
this.age = age;
}
public Object getValue() {
return value;
}
public String getName() {
return name;
}
public int getAge() {
return age;
}
public TestBean getSpouse() {
return spouse;
}
}
| DummyBean |
java | quarkusio__quarkus | integration-tests/kafka-devservices/src/main/java/io/quarkus/it/kafka/KafkaAdminManager.java | {
"start": 630,
"end": 2416
} | class ____ {
@ConfigProperty(name = "kafka.bootstrap.servers")
String bs;
private static AdminClient createAdmin(String kafkaBootstrapServers) {
Properties properties = new Properties();
properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers);
return AdminClient.create(properties);
}
AdminClient admin;
@PostConstruct
void create() {
admin = createAdmin(bs);
}
@PreDestroy
void cleanup() {
admin.close();
}
public int partitions(String topic) {
TopicDescription topicDescription;
try {
Map<String, TopicDescription> partitions = admin.describeTopics(Collections.singletonList(topic))
.allTopicNames().get(2000, TimeUnit.MILLISECONDS);
topicDescription = partitions.get(topic);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
throw new RuntimeException(e);
}
if (topicDescription == null) {
throw new IllegalArgumentException("Topic doesn't exist: " + topic);
}
return topicDescription.partitions().size();
}
int port() throws InterruptedException, ExecutionException {
return admin.describeCluster().controller().get().port();
}
String image() throws InterruptedException, ExecutionException {
// By observation, the red panda does not return anything for the supported features call
// It would be nice to have a more robust check, but hopefully this fragile check is good enough
boolean isRedPanda = admin.describeFeatures().featureMetadata().get().supportedFeatures().size() == 0;
return isRedPanda ? "redpanda" : "kafka-native";
}
}
| KafkaAdminManager |
java | spring-projects__spring-boot | module/spring-boot-opentelemetry/src/test/java/org/springframework/boot/opentelemetry/autoconfigure/OpenTelemetryResourceAttributesTests.java | {
"start": 1243,
"end": 10161
} | class ____ {
private final MockEnvironment environment = new MockEnvironment();
private final Map<String, String> environmentVariables = new LinkedHashMap<>();
private final Map<String, String> resourceAttributes = new LinkedHashMap<>();
@Test
void otelServiceNameShouldTakePrecedenceOverOtelResourceAttributes() {
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "service.name=ignored");
this.environmentVariables.put("OTEL_SERVICE_NAME", "otel-service");
assertThat(getAttributes()).hasSize(1).containsEntry("service.name", "otel-service");
}
@Test
void otelServiceNameWhenEmptyShouldTakePrecedenceOverOtelResourceAttributes() {
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "service.name=ignored");
this.environmentVariables.put("OTEL_SERVICE_NAME", "");
assertThat(getAttributes()).hasSize(1).containsEntry("service.name", "");
}
@Test
void otelResourceAttributes() {
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES",
", ,,key1=value1,key2= value2, key3=value3,key4=,=value5,key6,=,key7=%20spring+boot%20,key8=ś");
assertThat(getAttributes()).hasSize(7)
.containsEntry("key1", "value1")
.containsEntry("key2", "value2")
.containsEntry("key3", "value3")
.containsEntry("key4", "")
.containsEntry("key7", " spring+boot ")
.containsEntry("key8", "ś")
.containsEntry("service.name", "unknown_service");
}
@Test
void resourceAttributesShouldBeMergedWithEnvironmentVariablesAndTakePrecedence() {
this.resourceAttributes.put("service.group", "custom-group");
this.resourceAttributes.put("key2", "");
this.environmentVariables.put("OTEL_SERVICE_NAME", "custom-service");
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "key1=value1,key2=value2");
assertThat(getAttributes()).hasSize(4)
.containsEntry("service.name", "custom-service")
.containsEntry("service.group", "custom-group")
.containsEntry("key1", "value1")
.containsEntry("key2", "");
}
@Test
void invalidResourceAttributesShouldBeIgnored() {
this.resourceAttributes.put("", "empty-key");
this.resourceAttributes.put(null, "null-key");
this.resourceAttributes.put("null-value", null);
this.resourceAttributes.put("empty-value", "");
assertThat(getAttributes()).hasSize(2)
.containsEntry("service.name", "unknown_service")
.containsEntry("empty-value", "");
}
@Test
@SuppressWarnings("unchecked")
void systemGetEnvShouldBeUsedAsDefaultEnvFunction() {
OpenTelemetryResourceAttributes attributes = new OpenTelemetryResourceAttributes(this.environment, null);
Function<String, String> getEnv = assertThat(attributes).extracting("systemEnvironment")
.asInstanceOf(InstanceOfAssertFactories.type(Function.class))
.actual();
System.getenv().forEach((key, value) -> assertThat(getEnv.apply(key)).isEqualTo(value));
}
@Test
void otelResourceAttributeValuesShouldBePercentDecoded() {
PercentEscaper escaper = PercentEscaper.create();
String value = IntStream.range(32, 127)
.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append)
.toString();
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "key=" + escaper.escape(value));
assertThat(getAttributes()).hasSize(2)
.containsEntry("service.name", "unknown_service")
.containsEntry("key", value);
}
@Test
void otelResourceAttributeValuesShouldBePercentDecodedWhenStringContainsNonAscii() {
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "key=%20\u015bp\u0159\u00ec\u0144\u0121%20");
assertThat(getAttributes()).hasSize(2)
.containsEntry("service.name", "unknown_service")
.containsEntry("key", " śpřìńġ ");
}
@Test
void otelResourceAttributeValuesShouldBePercentDecodedWhenMultiByteSequences() {
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "key=T%C5%8Dky%C5%8D");
assertThat(getAttributes()).hasSize(2)
.containsEntry("service.name", "unknown_service")
.containsEntry("key", "Tōkyō");
}
@Test
void illegalArgumentExceptionShouldBeThrownWhenDecodingIllegalHexCharPercentEncodedValue() {
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "key=abc%ß");
assertThatIllegalArgumentException().isThrownBy(this::getAttributes)
.withMessage("Incomplete trailing escape (%) pattern");
}
@Test
void replacementCharShouldBeUsedWhenDecodingNonUtf8Character() {
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "key=%a3%3e");
assertThat(getAttributes()).containsEntry("key", "\ufffd>");
}
@Test
void illegalArgumentExceptionShouldBeThrownWhenDecodingInvalidPercentEncodedValue() {
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "key=%");
assertThatIllegalArgumentException().isThrownBy(this::getAttributes)
.withMessage("Incomplete trailing escape (%) pattern");
}
@Test
void unknownServiceShouldBeUsedAsDefaultServiceName() {
assertThat(getAttributes()).hasSize(1).containsEntry("service.name", "unknown_service");
}
@Test
void springApplicationGroupNameShouldBeUsedAsDefaultServiceNamespace() {
this.environment.setProperty("spring.application.group", "spring-boot");
assertThat(getAttributes()).hasSize(2)
.containsEntry("service.name", "unknown_service")
.containsEntry("service.namespace", "spring-boot");
}
@Test
void springApplicationNameShouldBeUsedAsDefaultServiceName() {
this.environment.setProperty("spring.application.name", "spring-boot-app");
assertThat(getAttributes()).hasSize(1).containsEntry("service.name", "spring-boot-app");
}
@Test
void serviceNamespaceShouldNotBePresentByDefault() {
assertThat(getAttributes()).hasSize(1).doesNotContainKey("service.namespace");
}
@Test
void resourceAttributesShouldTakePrecedenceOverSpringApplicationName() {
this.resourceAttributes.put("service.name", "spring-boot");
this.environment.setProperty("spring.application.name", "spring-boot-app");
assertThat(getAttributes()).hasSize(1).containsEntry("service.name", "spring-boot");
}
@Test
void otelResourceAttributesShouldTakePrecedenceOverSpringApplicationName() {
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "service.name=spring-boot");
this.environment.setProperty("spring.application.name", "spring-boot-app");
assertThat(getAttributes()).hasSize(1).containsEntry("service.name", "spring-boot");
}
@Test
void otelServiceNameShouldTakePrecedenceOverSpringApplicationName() {
this.environmentVariables.put("OTEL_SERVICE_NAME", "spring-boot");
this.environment.setProperty("spring.application.name", "spring-boot-app");
assertThat(getAttributes()).hasSize(1).containsEntry("service.name", "spring-boot");
}
@Test
void resourceAttributesShouldTakePrecedenceOverSpringApplicationGroupName() {
this.resourceAttributes.put("service.group", "spring-boot-app");
this.environment.setProperty("spring.application.group", "spring-boot");
assertThat(getAttributes()).hasSize(3)
.containsEntry("service.name", "unknown_service")
.containsEntry("service.group", "spring-boot-app");
}
@Test
void resourceAttributesShouldTakePrecedenceOverApplicationGroupNameForPopulatingServiceNamespace() {
this.resourceAttributes.put("service.namespace", "spring-boot-app");
this.environment.setProperty("spring.application.group", "overridden");
assertThat(getAttributes()).hasSize(2)
.containsEntry("service.name", "unknown_service")
.containsEntry("service.namespace", "spring-boot-app");
}
@Test
void otelResourceAttributesShouldTakePrecedenceOverSpringApplicationGroupName() {
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "service.namespace=spring-boot");
this.environment.setProperty("spring.application.group", "spring-boot-app");
assertThat(getAttributes()).hasSize(2)
.containsEntry("service.name", "unknown_service")
.containsEntry("service.namespace", "spring-boot");
}
@Test
void otelResourceAttributesShouldTakePrecedenceOverSpringApplicationGroupNameForServiceNamespace() {
this.environmentVariables.put("OTEL_RESOURCE_ATTRIBUTES", "service.namespace=spring-boot");
this.environment.setProperty("spring.application.group", "overridden");
assertThat(getAttributes()).hasSize(2)
.containsEntry("service.name", "unknown_service")
.containsEntry("service.namespace", "spring-boot");
}
@Test
void shouldUseServiceGroupForServiceNamespaceIfServiceGroupIsSet() {
this.environment.setProperty("spring.application.group", "alpha");
assertThat(getAttributes()).containsEntry("service.namespace", "alpha");
}
@Test
void shouldNotSetServiceNamespaceIfServiceGroupIsNotSet() {
assertThat(getAttributes()).doesNotContainKey("service.namespace");
}
private Map<String, String> getAttributes() {
Map<String, String> attributes = new LinkedHashMap<>();
new OpenTelemetryResourceAttributes(this.environment, this.resourceAttributes, this.environmentVariables::get)
.applyTo(attributes::put);
return attributes;
}
}
| OpenTelemetryResourceAttributesTests |
java | google__gson | gson/src/test/java/com/google/gson/functional/TypeHierarchyAdapterTest.java | {
"start": 8028,
"end": 8094
} | class ____ extends Manager {
Employee assistant;
}
static | CEO |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/xml/StaxEventXMLReader.java | {
"start": 2249,
"end": 11224
} | class ____ extends AbstractStaxXMLReader {
private static final String DEFAULT_XML_VERSION = "1.0";
private final XMLEventReader reader;
private String xmlVersion = DEFAULT_XML_VERSION;
private @Nullable String encoding;
/**
* Constructs a new instance of the {@code StaxEventXmlReader} that reads from
* the given {@code XMLEventReader}. The supplied event reader must be in
* {@code XMLStreamConstants.START_DOCUMENT} or {@code XMLStreamConstants.START_ELEMENT} state.
* @param reader the {@code XMLEventReader} to read from
* @throws IllegalStateException if the reader is not at the start of a document or element
*/
StaxEventXMLReader(XMLEventReader reader) {
try {
XMLEvent event = reader.peek();
if (event != null && !(event.isStartDocument() || event.isStartElement())) {
throw new IllegalStateException("XMLEventReader not at start of document or element");
}
}
catch (XMLStreamException ex) {
throw new IllegalStateException("Could not read first element: " + ex.getMessage());
}
this.reader = reader;
}
@Override
protected void parseInternal() throws SAXException, XMLStreamException {
boolean documentStarted = false;
boolean documentEnded = false;
int elementDepth = 0;
while (this.reader.hasNext() && elementDepth >= 0) {
XMLEvent event = this.reader.nextEvent();
if (!event.isStartDocument() && !event.isEndDocument() && !documentStarted) {
handleStartDocument(event);
documentStarted = true;
}
switch (event.getEventType()) {
case XMLStreamConstants.START_DOCUMENT -> {
handleStartDocument(event);
documentStarted = true;
}
case XMLStreamConstants.START_ELEMENT -> {
elementDepth++;
handleStartElement(event.asStartElement());
}
case XMLStreamConstants.END_ELEMENT -> {
elementDepth--;
if (elementDepth >= 0) {
handleEndElement(event.asEndElement());
}
}
case XMLStreamConstants.PROCESSING_INSTRUCTION ->
handleProcessingInstruction((ProcessingInstruction) event);
case XMLStreamConstants.CHARACTERS, XMLStreamConstants.SPACE, XMLStreamConstants.CDATA ->
handleCharacters(event.asCharacters());
case XMLStreamConstants.END_DOCUMENT -> {
handleEndDocument();
documentEnded = true;
}
case XMLStreamConstants.NOTATION_DECLARATION -> handleNotationDeclaration((NotationDeclaration) event);
case XMLStreamConstants.ENTITY_DECLARATION -> handleEntityDeclaration((EntityDeclaration) event);
case XMLStreamConstants.COMMENT -> handleComment((Comment) event);
case XMLStreamConstants.DTD -> handleDtd((DTD) event);
case XMLStreamConstants.ENTITY_REFERENCE -> handleEntityReference((EntityReference) event);
}
}
if (documentStarted && !documentEnded) {
handleEndDocument();
}
}
private void handleStartDocument(final XMLEvent event) throws SAXException {
if (event.isStartDocument()) {
StartDocument startDocument = (StartDocument) event;
String xmlVersion = startDocument.getVersion();
if (StringUtils.hasLength(xmlVersion)) {
this.xmlVersion = xmlVersion;
}
if (startDocument.encodingSet()) {
this.encoding = startDocument.getCharacterEncodingScheme();
}
}
ContentHandler contentHandler = getContentHandler();
if (contentHandler != null) {
final Location location = event.getLocation();
contentHandler.setDocumentLocator(new Locator2() {
@Override
public int getColumnNumber() {
return (location != null ? location.getColumnNumber() : -1);
}
@Override
public int getLineNumber() {
return (location != null ? location.getLineNumber() : -1);
}
@Override
public @Nullable String getPublicId() {
return (location != null ? location.getPublicId() : null);
}
@Override
public @Nullable String getSystemId() {
return (location != null ? location.getSystemId() : null);
}
@Override
public String getXMLVersion() {
return xmlVersion;
}
@Override
public @Nullable String getEncoding() {
return encoding;
}
});
contentHandler.startDocument();
}
}
private void handleStartElement(StartElement startElement) throws SAXException {
if (getContentHandler() != null) {
QName qName = startElement.getName();
if (hasNamespacesFeature()) {
for (Iterator i = startElement.getNamespaces(); i.hasNext();) {
Namespace namespace = (Namespace) i.next();
startPrefixMapping(namespace.getPrefix(), namespace.getNamespaceURI());
}
for (Iterator i = startElement.getAttributes(); i.hasNext();){
Attribute attribute = (Attribute) i.next();
QName attributeName = attribute.getName();
startPrefixMapping(attributeName.getPrefix(), attributeName.getNamespaceURI());
}
getContentHandler().startElement(qName.getNamespaceURI(), qName.getLocalPart(), toQualifiedName(qName),
getAttributes(startElement));
}
else {
getContentHandler().startElement("", "", toQualifiedName(qName), getAttributes(startElement));
}
}
}
private void handleCharacters(Characters characters) throws SAXException {
char[] data = characters.getData().toCharArray();
if (getContentHandler() != null && characters.isIgnorableWhiteSpace()) {
getContentHandler().ignorableWhitespace(data, 0, data.length);
return;
}
if (characters.isCData() && getLexicalHandler() != null) {
getLexicalHandler().startCDATA();
}
if (getContentHandler() != null) {
getContentHandler().characters(data, 0, data.length);
}
if (characters.isCData() && getLexicalHandler() != null) {
getLexicalHandler().endCDATA();
}
}
private void handleEndElement(EndElement endElement) throws SAXException {
if (getContentHandler() != null) {
QName qName = endElement.getName();
if (hasNamespacesFeature()) {
getContentHandler().endElement(qName.getNamespaceURI(), qName.getLocalPart(), toQualifiedName(qName));
for (Iterator i = endElement.getNamespaces(); i.hasNext();) {
Namespace namespace = (Namespace) i.next();
endPrefixMapping(namespace.getPrefix());
}
}
else {
getContentHandler().endElement("", "", toQualifiedName(qName));
}
}
}
private void handleEndDocument() throws SAXException {
if (getContentHandler() != null) {
getContentHandler().endDocument();
}
}
private void handleNotationDeclaration(NotationDeclaration declaration) throws SAXException {
if (getDTDHandler() != null) {
getDTDHandler().notationDecl(declaration.getName(), declaration.getPublicId(), declaration.getSystemId());
}
}
private void handleEntityDeclaration(EntityDeclaration entityDeclaration) throws SAXException {
if (getDTDHandler() != null) {
getDTDHandler().unparsedEntityDecl(entityDeclaration.getName(), entityDeclaration.getPublicId(),
entityDeclaration.getSystemId(), entityDeclaration.getNotationName());
}
}
private void handleProcessingInstruction(ProcessingInstruction pi) throws SAXException {
if (getContentHandler() != null) {
getContentHandler().processingInstruction(pi.getTarget(), pi.getData());
}
}
private void handleComment(Comment comment) throws SAXException {
if (getLexicalHandler() != null) {
char[] ch = comment.getText().toCharArray();
getLexicalHandler().comment(ch, 0, ch.length);
}
}
private void handleDtd(DTD dtd) throws SAXException {
if (getLexicalHandler() != null) {
Location location = dtd.getLocation();
getLexicalHandler().startDTD(null, location.getPublicId(), location.getSystemId());
}
if (getLexicalHandler() != null) {
getLexicalHandler().endDTD();
}
}
private void handleEntityReference(EntityReference reference) throws SAXException {
if (getLexicalHandler() != null) {
getLexicalHandler().startEntity(reference.getName());
}
if (getLexicalHandler() != null) {
getLexicalHandler().endEntity(reference.getName());
}
}
private Attributes getAttributes(StartElement event) {
AttributesImpl attributes = new AttributesImpl();
for (Iterator i = event.getAttributes(); i.hasNext();) {
Attribute attribute = (Attribute) i.next();
QName qName = attribute.getName();
String namespace = qName.getNamespaceURI();
if (namespace == null || !hasNamespacesFeature()) {
namespace = "";
}
String type = attribute.getDTDType();
if (type == null) {
type = "CDATA";
}
attributes.addAttribute(namespace, qName.getLocalPart(), toQualifiedName(qName), type, attribute.getValue());
}
if (hasNamespacePrefixesFeature()) {
for (Iterator i = event.getNamespaces(); i.hasNext();) {
Namespace namespace = (Namespace) i.next();
String prefix = namespace.getPrefix();
String namespaceUri = namespace.getNamespaceURI();
String qName;
if (StringUtils.hasLength(prefix)) {
qName = "xmlns:" + prefix;
}
else {
qName = "xmlns";
}
attributes.addAttribute("", "", qName, "CDATA", namespaceUri);
}
}
return attributes;
}
}
| StaxEventXMLReader |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/abilities/source/FilterPushDownSpec.java | {
"start": 2548,
"end": 8668
} | class ____ extends SourceAbilitySpecBase {
public static final String FIELD_NAME_PREDICATES = "predicates";
@JsonProperty(FIELD_NAME_PREDICATES)
private final List<RexNode> predicates;
/**
* A flag which indicates all predicates are retained in the outer Filter operator.
*
* <p>This flog is only used for optimization phase, and should not be serialized.
*/
@JsonIgnore private final boolean allPredicatesRetained;
public FilterPushDownSpec(List<RexNode> predicates, boolean allPredicatesRetained) {
this.predicates = new ArrayList<>(checkNotNull(predicates));
this.allPredicatesRetained = allPredicatesRetained;
}
@JsonCreator
public FilterPushDownSpec(@JsonProperty(FIELD_NAME_PREDICATES) List<RexNode> predicates) {
this(predicates, true);
}
@JsonIgnore
public boolean isAllPredicatesRetained() {
return allPredicatesRetained;
}
public List<RexNode> getPredicates() {
return predicates;
}
@Override
public void apply(DynamicTableSource tableSource, SourceAbilityContext context) {
SupportsFilterPushDown.Result result = apply(predicates, tableSource, context);
if (result.getAcceptedFilters().size() != predicates.size()) {
throw new TableException("All predicates should be accepted here.");
}
}
public static SupportsFilterPushDown.Result apply(
List<RexNode> predicates,
DynamicTableSource tableSource,
SourceAbilityContext context) {
if (tableSource instanceof SupportsFilterPushDown) {
RexNodeToExpressionConverter converter =
new RexNodeToExpressionConverter(
new RexBuilder(context.getTypeFactory()),
context.getSourceRowType().getFieldNames().toArray(new String[0]),
context.getFunctionCatalog(),
context.getCatalogManager(),
Option.apply(
context.getTypeFactory()
.buildRelNodeRowType(context.getSourceRowType())));
List<Expression> filters =
predicates.stream()
.map(
p -> {
scala.Option<ResolvedExpression> expr = p.accept(converter);
if (expr.isDefined()) {
return expr.get();
} else {
throw new TableException(
String.format(
"%s can not be converted to Expression, please make sure %s can accept %s.",
p.toString(),
tableSource.getClass().getSimpleName(),
p.toString()));
}
})
.collect(Collectors.toList());
ExpressionResolver resolver =
ExpressionResolver.resolverFor(
context.getTableConfig(),
context.getClassLoader(),
name -> Optional.empty(),
context.getFunctionCatalog()
.asLookup(
str -> {
throw new TableException(
"We should not need to lookup any expressions at this point");
}),
context.getCatalogManager().getDataTypeFactory(),
(sqlExpression, inputRowType, outputType) -> {
throw new TableException(
"SQL expression parsing is not supported at this location.");
})
.build();
return ((SupportsFilterPushDown) tableSource).applyFilters(resolver.resolve(filters));
} else {
throw new TableException(
String.format(
"%s does not support SupportsFilterPushDown.",
tableSource.getClass().getName()));
}
}
@Override
public boolean needAdjustFieldReferenceAfterProjection() {
return true;
}
@Override
public String getDigests(SourceAbilityContext context) {
final List<String> expressionStrs = new ArrayList<>();
final RowType sourceRowType = context.getSourceRowType();
for (RexNode rexNode : predicates) {
expressionStrs.add(
FlinkRexUtil.getExpressionString(
rexNode,
JavaScalaConversionUtil.toScala(sourceRowType.getFieldNames())));
}
return String.format(
"filter=[%s]",
expressionStrs.stream()
.reduce((l, r) -> String.format("and(%s, %s)", l, r))
.orElse(""));
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
FilterPushDownSpec that = (FilterPushDownSpec) o;
return Objects.equals(predicates, that.predicates);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), predicates);
}
}
| FilterPushDownSpec |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ValueSerializer.java | {
"start": 16144,
"end": 16292
} | class ____ to be used as the marker for
* annotation {@link tools.jackson.databind.annotation.JsonSerialize}.
*/
public abstract static | is |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/NativeQueryResultTypeAutoDiscoveryTest.java | {
"start": 20141,
"end": 20313
} | class ____ extends TestedEntity<String> {
public String getTestedProperty() {
return testedProperty;
}
}
@Entity(name = "nvarcharEntity")
public static | VarcharEntity |
java | spring-projects__spring-boot | module/spring-boot-webmvc/src/main/java/org/springframework/boot/webmvc/actuate/endpoint/web/AdditionalHealthEndpointPathsWebMvcHandlerMapping.java | {
"start": 1560,
"end": 3085
} | class ____ extends AbstractWebMvcEndpointHandlerMapping {
private final @Nullable ExposableWebEndpoint healthEndpoint;
private final Set<HealthEndpointGroup> groups;
public AdditionalHealthEndpointPathsWebMvcHandlerMapping(@Nullable ExposableWebEndpoint healthEndpoint,
Set<HealthEndpointGroup> groups) {
super(new EndpointMapping(""), asList(healthEndpoint), new EndpointMediaTypes(), false);
this.healthEndpoint = healthEndpoint;
this.groups = groups;
}
private static Collection<ExposableWebEndpoint> asList(@Nullable ExposableWebEndpoint healthEndpoint) {
return (healthEndpoint != null) ? Collections.singletonList(healthEndpoint) : Collections.emptyList();
}
@Override
protected void initHandlerMethods() {
if (this.healthEndpoint == null) {
return;
}
for (WebOperation operation : this.healthEndpoint.getOperations()) {
WebOperationRequestPredicate predicate = operation.getRequestPredicate();
String matchAllRemainingPathSegmentsVariable = predicate.getMatchAllRemainingPathSegmentsVariable();
if (matchAllRemainingPathSegmentsVariable != null) {
for (HealthEndpointGroup group : this.groups) {
AdditionalHealthEndpointPath additionalPath = group.getAdditionalPath();
if (additionalPath != null) {
registerMapping(this.healthEndpoint, predicate, operation, additionalPath.getValue());
}
}
}
}
}
@Override
protected LinksHandler getLinksHandler() {
return (request, response) -> null;
}
}
| AdditionalHealthEndpointPathsWebMvcHandlerMapping |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Functions.java | {
"start": 561,
"end": 795
} | class ____ {
public static boolean isAggregate(Expression e) {
return e instanceof AggregateFunction;
}
public static boolean isGrouping(Expression e) {
return e instanceof GroupingFunction;
}
}
| Functions |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java | {
"start": 16141,
"end": 17026
} | class ____ {
final Map<ShardId, ShardAllocationConfiguration> localShards;
final boolean active;
final String index;
Configuration(String index, Map<ShardId, ShardAllocationConfiguration> localShards) {
this.active = localShards.isEmpty() == false;
this.index = index;
this.localShards = Collections.unmodifiableMap(localShards);
}
/**
* Find out, if the supplied index matches the current watcher configuration and the
* current state is active
*
* @param index The name of the index to compare with
* @return false if watcher is not active or the passed index is not the watcher index
*/
public boolean isIndexAndActive(String index) {
return active && index.equals(this.index);
}
}
static final | Configuration |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/argumentselectiondefects/AssertEqualsArgumentOrderCheckerTest.java | {
"start": 6173,
"end": 6666
} | class ____ {
static void assertEquals(Object expected, Object actual) {}
;
void test(Object other1, Object other2) {
assertEquals(other1, other2);
}
}
""")
.doTest();
}
@Test
public void assertEqualsCheck_makesNoChange_whenArgumentExtendsThrowable() {
compilationHelper
.addSourceLines(
"ErrorProneTest.java",
"""
abstract | ErrorProneTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/NaturalIdInUninitializedAssociationTest.java | {
"start": 5101,
"end": 5361
} | class ____ {
@Id
private int id;
@NaturalId(mutable = false)
private String name;
public EntityImmutableNaturalId() {
}
public EntityImmutableNaturalId(int id, String name) {
this.id = id;
this.name = name;
}
}
}
| EntityImmutableNaturalId |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/features/InfrastructureFeatures.java | {
"start": 658,
"end": 2362
} | class ____ implements FeatureSpecification {
/*
* These features are auto-generated from the constants in RestApiVersion.
*
* When there's a new major version, CURRENT becomes N+1 and PREVIOUS becomes N.
* Because PREVIOUS is marked as assumed, this doesn't stop N+1 nodes from joining the cluster.
* A little table helps:
*
* Major | 9 | 10 | 11
* ---------|-----|---- |-----
* CURRENT | 9 | 10 | 11
* PREVIOUS | 8 | 9 | 10
*
* v9 knows about REST API 9 and 8. v10 knows about REST API 10 and 9.
* A v10 node can join a v9 cluster, as the ES_V_8 feature known by v9 is assumed.
* But the v9 nodes don't know about ES_V_10, so that feature isn't active
* on the v10 nodes until the cluster is fully upgraded,
* at which point the ES_V_8 feature also disappears from the cluster.
*
* One thing you must not do is check the PREVIOUS_VERSION feature existence on the cluster,
* as the answer will be wrong (v9 nodes will assume that v10 nodes have the v8 feature) - hence why it is private.
* That feature only exists here so that upgrades work to remove the feature from the cluster.
*/
public static final NodeFeature CURRENT_VERSION = new NodeFeature("ES_" + RestApiVersion.current());
private static final NodeFeature PREVIOUS_VERSION = new NodeFeature("ES_" + RestApiVersion.previous(), true);
@Override
public Set<NodeFeature> getFeatures() {
return Set.of(CURRENT_VERSION, PREVIOUS_VERSION);
}
@Override
public Set<NodeFeature> getTestFeatures() {
return Set.of(FeatureService.TEST_FEATURES_ENABLED);
}
}
| InfrastructureFeatures |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/cdi/CdiExtensionIntegrationTests.java | {
"start": 1355,
"end": 3029
} | class ____ {
private static SeContainer container;
private static Log LOGGER = LogFactory.getLog(CdiExtensionIntegrationTests.class);
@BeforeAll
static void setUpCdi() {
container = SeContainerInitializer.newInstance() //
.disableDiscovery() //
.addPackages(PersonRepository.class) //
.initialize();
LOGGER.debug("CDI container bootstrapped");
}
@AfterAll
static void tearDownCdi() {
container.close();
}
@Test // DATAJPA-319, DATAJPA-1180
@SuppressWarnings("rawtypes")
void foo() {
Set<Bean<?>> beans = container.getBeanManager().getBeans(PersonRepository.class);
assertThat(beans).hasSize(1);
assertThat(beans.iterator().next().getScope()).isEqualTo((Class) ApplicationScoped.class);
}
@Test // DATAJPA-136, DATAJPA-1180
void saveAndFindAll() {
RepositoryConsumer repositoryConsumer = container.select(RepositoryConsumer.class).get();
Person person = new Person();
repositoryConsumer.save(person);
repositoryConsumer.findAll();
}
@Test // DATAJPA-584, DATAJPA-1180
void returnOneFromCustomImpl() {
RepositoryConsumer repositoryConsumer = container.select(RepositoryConsumer.class).get();
assertThat(repositoryConsumer.returnOne()).isOne();
}
@Test // DATAJPA-584, DATAJPA-1180
void useQualifiedCustomizedUserRepo() {
RepositoryConsumer repositoryConsumer = container.select(RepositoryConsumer.class).get();
repositoryConsumer.doSomethingOnUserDB();
}
@Test // DATAJPA-1287
void useQualifiedFragmentUserRepo() {
RepositoryConsumer repositoryConsumer = container.select(RepositoryConsumer.class).get();
assertThat(repositoryConsumer.returnOneUserDB()).isOne();
}
}
| CdiExtensionIntegrationTests |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/CredentialsTestJob.java | {
"start": 1855,
"end": 2890
} | class ____ extends Configured implements Tool {
private static final int NUM_OF_KEYS = 10;
private static void checkSecrets(Credentials ts) {
if ( ts == null){
throw new RuntimeException("The credentials are not available");
// fail the test
}
for(int i=0; i<NUM_OF_KEYS; i++) {
String secretName = "alias"+i;
// get token storage and a key
byte[] secretValue = ts.getSecretKey(new Text(secretName));
if (secretValue == null){
throw new RuntimeException("The key "+ secretName + " is not available. ");
// fail the test
}
String secretValueStr = new String (secretValue, StandardCharsets.UTF_8);
System.out.println(secretValueStr);
if ( !("password"+i).equals(secretValueStr)){
throw new RuntimeException("The key "+ secretName +
" is not correct. Expected value is "+ ("password"+i) +
". Actual value is " + secretValueStr); // fail the test
}
}
}
public static | CredentialsTestJob |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.