language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneInlineJoinOnEmptyRightSide.java | {
"start": 597,
"end": 857
} | class ____ extends OptimizerRules.OptimizerRule<InlineJoin> {
@Override
protected LogicalPlan rule(InlineJoin plan) {
return plan.right() instanceof LocalRelation lr ? InlineJoin.inlineData(plan, lr) : plan;
}
}
| PruneInlineJoinOnEmptyRightSide |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/aot/samples/bean/override/EasyMockBeanJupiterTests.java | {
"start": 2180,
"end": 2274
} | class ____ {
@Bean
MessageService messageService() {
return () -> "prod";
}
}
}
| Config |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/server/header/ReferrerPolicyServerHttpHeadersWriter.java | {
"start": 2084,
"end": 2954
} | enum ____ {
NO_REFERRER("no-referrer"),
NO_REFERRER_WHEN_DOWNGRADE("no-referrer-when-downgrade"),
SAME_ORIGIN("same-origin"),
ORIGIN("origin"),
STRICT_ORIGIN("strict-origin"),
ORIGIN_WHEN_CROSS_ORIGIN("origin-when-cross-origin"),
STRICT_ORIGIN_WHEN_CROSS_ORIGIN("strict-origin-when-cross-origin"),
UNSAFE_URL("unsafe-url");
private static final Map<String, ReferrerPolicy> REFERRER_POLICIES;
static {
Map<String, ReferrerPolicy> referrerPolicies = new HashMap<>();
for (ReferrerPolicy referrerPolicy : values()) {
referrerPolicies.put(referrerPolicy.getPolicy(), referrerPolicy);
}
REFERRER_POLICIES = Collections.unmodifiableMap(referrerPolicies);
}
private final String policy;
ReferrerPolicy(String policy) {
this.policy = policy;
}
public String getPolicy() {
return this.policy;
}
}
}
| ReferrerPolicy |
java | apache__camel | components/camel-google/camel-google-sheets/src/test/java/org/apache/camel/component/google/sheets/SheetsSpreadsheetsValuesIT.java | {
"start": 2263,
"end": 2576
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(SheetsSpreadsheetsValuesIT.class);
private static final String PATH_PREFIX
= GoogleSheetsApiCollection.getCollection().getApiName(SheetsSpreadsheetsValuesApiMethod.class).getName();
@Nested
| SheetsSpreadsheetsValuesIT |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsInOutTransferExchangeTest.java | {
"start": 2102,
"end": 6822
} | class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
private static final Logger LOG = LoggerFactory.getLogger(JmsInOutTransferExchangeTest.class);
@EndpointInject("mock:transfer")
protected MockEndpoint transfer;
@EndpointInject("mock:result")
protected MockEndpoint result;
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Override
protected String getComponentName() {
return "activemq";
}
@Test
public void testTransferExchangeInOut() throws Exception {
transfer.expectedMessageCount(1);
result.expectedMessageCount(1);
template.send("direct:start", exchange -> {
LOG.debug("Preparing the exchange");
exchange.getIn().setBody(new SerializableRequestDto("Restless Camel"));
Map<String, Object> map = new HashMap<>();
map.put("boolean", Boolean.TRUE);
map.put("string", "hello");
map.put("long", 123L);
map.put("double", 1.23);
exchange.getIn().setHeaders(map);
exchange.setProperty("PropertyName", "PropertyValue");
LOG.debug("Done preparing the exchange");
});
LOG.debug("Asserting transfer");
transfer.assertIsSatisfied();
LOG.debug("Asserting result");
result.assertIsSatisfied();
MockEndpoint.assertIsSatisfied(context);
Exchange transferExchange = transfer.getExchanges().get(0);
Exchange exchange = createExchangeWithBody(null);
assertInstanceOf(JmsMessage.class, transferExchange.getIn());
JmsMessage transferMessage = transferExchange.getIn(JmsMessage.class);
ActiveMQObjectMessage transferActiveMQMessage = (ActiveMQObjectMessage) transferMessage.getJmsMessage();
assertInstanceOf(DefaultExchangeHolder.class, transferActiveMQMessage.getObject());
DefaultExchangeHolder exchangeHolder = (DefaultExchangeHolder) transferActiveMQMessage.getObject();
DefaultExchangeHolder.unmarshal(exchange, exchangeHolder);
assertNotNull(exchange.getIn().getBody(SerializableRequestDto.class));
assertEquals(Boolean.TRUE, exchange.getIn().getHeader("boolean", Boolean.class));
assertEquals((Long) 123L, exchange.getIn().getHeader("long", Long.class));
assertEquals((Double) 1.23, exchange.getIn().getHeader("double", Double.class));
assertEquals("hello", exchange.getIn().getHeader("string", String.class));
assertEquals("PropertyValue", exchange.getProperty("PropertyName"));
Exchange resultExchange = result.getExchanges().get(0);
assertInstanceOf(JmsMessage.class, resultExchange.getIn());
JmsMessage resultMessage = resultExchange.getIn(JmsMessage.class);
ActiveMQObjectMessage resultActiveMQMessage = (ActiveMQObjectMessage) resultMessage.getJmsMessage();
exchangeHolder = (DefaultExchangeHolder) resultActiveMQMessage.getObject();
exchange = createExchangeWithBody(null);
DefaultExchangeHolder.unmarshal(exchange, exchangeHolder);
assertNotNull(exchange.getIn().getBody(SerializableResponseDto.class));
assertEquals(Boolean.TRUE, exchange.getIn().getHeader("boolean", Boolean.class));
assertEquals((Long) 123L, exchange.getIn().getHeader("long", Long.class));
assertEquals((Double) 1.23, exchange.getIn().getHeader("double", Double.class));
assertEquals("hello", exchange.getIn().getHeader("string", String.class));
assertEquals("PropertyValue", exchange.getProperty("PropertyName"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start")
.to(ExchangePattern.InOut, "activemq:responseGenerator?transferExchange=true")
.to("mock:result");
from("activemq:responseGenerator?transferExchange=true")
.to("mock:transfer")
.process(exchange -> exchange.getIn().setBody(new SerializableResponseDto(true)));
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
| JmsInOutTransferExchangeTest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KubernetesNamespacesComponentBuilderFactory.java | {
"start": 6170,
"end": 7346
} | class ____
extends AbstractComponentBuilder<KubernetesNamespacesComponent>
implements KubernetesNamespacesComponentBuilder {
@Override
protected KubernetesNamespacesComponent buildConcreteComponent() {
return new KubernetesNamespacesComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "kubernetesClient": ((KubernetesNamespacesComponent) component).setKubernetesClient((io.fabric8.kubernetes.client.KubernetesClient) value); return true;
case "bridgeErrorHandler": ((KubernetesNamespacesComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((KubernetesNamespacesComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((KubernetesNamespacesComponent) component).setAutowiredEnabled((boolean) value); return true;
default: return false;
}
}
}
} | KubernetesNamespacesComponentBuilderImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/collection/spi/BagSemantics.java | {
"start": 204,
"end": 294
} | interface ____<BE extends Collection<E>, E> extends CollectionSemantics<BE,E> {
}
| BagSemantics |
java | google__guice | extensions/assistedinject/test/com/google/inject/assistedinject/FactoryModuleBuilderTest.java | {
"start": 15397,
"end": 15564
} | interface ____<E> {
Foo<E> create(Bar bar);
}
@SuppressWarnings("unused")
@Inject
Foo(@Assisted Bar bar, Baz<E> baz) {}
}
public static | Factory |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1460/Issue1460Mapper.java | {
"start": 997,
"end": 1202
} | class ____<T> {
private final T source;
public Value(T source) {
this.source = source;
}
public T getSource() {
return source;
}
}
}
| Value |
java | spring-projects__spring-security | oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/endpoint/RestClientJwtBearerTokenResponseClientTests.java | {
"start": 2902,
"end": 14049
} | class ____ {
private RestClientJwtBearerTokenResponseClient tokenResponseClient;
private MockWebServer server;
private ClientRegistration.Builder clientRegistration;
private Jwt jwtAssertion;
@BeforeEach
public void setUp() throws IOException {
this.tokenResponseClient = new RestClientJwtBearerTokenResponseClient();
this.server = new MockWebServer();
this.server.start();
String tokenUri = this.server.url("/oauth2/token").toString();
this.clientRegistration = TestClientRegistrations.clientCredentials()
.clientId("client-1")
.clientSecret("secret")
.authorizationGrantType(AuthorizationGrantType.JWT_BEARER)
.tokenUri(tokenUri)
.scope("read", "write");
this.jwtAssertion = TestJwts.jwt().build();
}
@AfterEach
public void cleanUp() throws IOException {
this.server.shutdown();
}
@Test
public void setRestClientWhenNullThenThrowIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.tokenResponseClient.setRestClient(null))
.withMessage("restClient cannot be null");
// @formatter:on
}
@Test
public void setHeadersConverterWhenNullThenThrowIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.tokenResponseClient.setHeadersConverter(null))
.withMessage("headersConverter cannot be null");
// @formatter:on
}
@Test
public void addHeadersConverterWhenNullThenThrowIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.tokenResponseClient.addHeadersConverter(null))
.withMessage("headersConverter cannot be null");
// @formatter:on
}
@Test
public void setParametersConverterWhenNullThenThrowIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.tokenResponseClient.setParametersConverter(null))
.withMessage("parametersConverter cannot be null");
// @formatter:on
}
@Test
public void addParametersConverterWhenNullThenThrowIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.tokenResponseClient.addParametersConverter(null))
.withMessage("parametersConverter cannot be null");
// @formatter:on
}
@Test
public void setParametersCustomizerWhenNullThenThrowIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.tokenResponseClient.setParametersCustomizer(null))
.withMessage("parametersCustomizer cannot be null");
// @formatter:on
}
@Test
public void getTokenResponseWhenGrantRequestIsNullThenThrowIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.tokenResponseClient.getTokenResponse(null))
.withMessage("grantRequest cannot be null");
// @formatter:on
}
@Test
public void getTokenResponseWhenSuccessResponseThenReturnAccessTokenResponse() throws Exception {
this.server.enqueue(MockResponses.json("access-token-response-read-write.json"));
Instant expiresAtBefore = Instant.now().plusSeconds(3600);
ClientRegistration clientRegistration = this.clientRegistration.build();
Set<String> scopes = clientRegistration.getScopes();
JwtBearerGrantRequest grantRequest = new JwtBearerGrantRequest(clientRegistration, this.jwtAssertion);
OAuth2AccessTokenResponse accessTokenResponse = this.tokenResponseClient.getTokenResponse(grantRequest);
assertThat(accessTokenResponse).isNotNull();
Instant expiresAtAfter = Instant.now().plusSeconds(3600);
RecordedRequest recordedRequest = this.server.takeRequest();
assertThat(recordedRequest.getMethod()).isEqualTo(HttpMethod.POST.toString());
assertThat(recordedRequest.getHeader(HttpHeaders.ACCEPT)).isEqualTo(MediaType.APPLICATION_JSON_VALUE);
assertThat(recordedRequest.getHeader(HttpHeaders.CONTENT_TYPE))
.isEqualTo(MediaType.APPLICATION_FORM_URLENCODED_VALUE);
String formParameters = recordedRequest.getBody().readUtf8();
// @formatter:off
assertThat(formParameters).contains(
param(OAuth2ParameterNames.GRANT_TYPE, AuthorizationGrantType.JWT_BEARER.getValue()),
param(OAuth2ParameterNames.ASSERTION, this.jwtAssertion.getTokenValue()),
param(OAuth2ParameterNames.SCOPE, StringUtils.collectionToDelimitedString(scopes, " "))
);
// @formatter:on
assertThat(accessTokenResponse.getAccessToken().getTokenValue()).isEqualTo("access-token-1234");
assertThat(accessTokenResponse.getAccessToken().getTokenType()).isEqualTo(OAuth2AccessToken.TokenType.BEARER);
assertThat(accessTokenResponse.getAccessToken().getExpiresAt()).isBetween(expiresAtBefore, expiresAtAfter);
assertThat(accessTokenResponse.getAccessToken().getScopes()).containsExactlyInAnyOrder("read", "write");
assertThat(accessTokenResponse.getRefreshToken()).isNull();
}
@Test
public void getTokenResponseWhenAuthenticationClientSecretBasicThenAuthorizationHeaderIsSent() throws Exception {
this.server.enqueue(MockResponses.json("access-token-response.json"));
ClientRegistration clientRegistration = this.clientRegistration.build();
JwtBearerGrantRequest grantRequest = new JwtBearerGrantRequest(clientRegistration, this.jwtAssertion);
this.tokenResponseClient.getTokenResponse(grantRequest);
RecordedRequest recordedRequest = this.server.takeRequest();
assertThat(recordedRequest.getHeader(HttpHeaders.AUTHORIZATION)).startsWith("Basic ");
}
@Test
public void getTokenResponseWhenAuthenticationClientSecretPostThenFormParametersAreSent() throws Exception {
this.server.enqueue(MockResponses.json("access-token-response.json"));
ClientRegistration clientRegistration = this.clientRegistration
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_POST)
.build();
JwtBearerGrantRequest grantRequest = new JwtBearerGrantRequest(clientRegistration, this.jwtAssertion);
this.tokenResponseClient.getTokenResponse(grantRequest);
RecordedRequest recordedRequest = this.server.takeRequest();
String formParameters = recordedRequest.getBody().readUtf8();
// @formatter:off
assertThat(formParameters).contains(
param(OAuth2ParameterNames.CLIENT_ID, "client-1"),
param(OAuth2ParameterNames.CLIENT_SECRET, "secret")
);
// @formatter:on
}
@Test
public void getTokenResponseWhenSuccessResponseAndNotBearerTokenTypeThenThrowOAuth2AuthorizationException() {
this.server.enqueue(MockResponses.json("invalid-token-type-response.json"));
ClientRegistration clientRegistration = this.clientRegistration.build();
JwtBearerGrantRequest grantRequest = new JwtBearerGrantRequest(clientRegistration, this.jwtAssertion);
// @formatter:off
assertThatExceptionOfType(OAuth2AuthorizationException.class)
.isThrownBy(() -> this.tokenResponseClient.getTokenResponse(grantRequest))
.satisfies((ex) -> assertThat(ex.getError().getErrorCode()).isEqualTo("invalid_token_response"))
.withMessageContaining("[invalid_token_response] An error occurred while attempting to retrieve the OAuth 2.0 Access Token Response")
.havingRootCause().withMessage("tokenType cannot be null");
// @formatter:on
}
@Test
public void getTokenResponseWhenSuccessResponseIncludesScopeThenAccessTokenHasResponseScope() {
this.server.enqueue(MockResponses.json("access-token-response-read.json"));
ClientRegistration clientRegistration = this.clientRegistration.build();
JwtBearerGrantRequest grantRequest = new JwtBearerGrantRequest(clientRegistration, this.jwtAssertion);
OAuth2AccessTokenResponse accessTokenResponse = this.tokenResponseClient.getTokenResponse(grantRequest);
assertThat(accessTokenResponse).isNotNull();
assertThat(accessTokenResponse.getAccessToken().getScopes()).containsExactly("read");
}
@Test
public void getTokenResponseWhenSuccessResponseDoesNotIncludeScopeThenAccessTokenHasNoScope() {
this.server.enqueue(MockResponses.json("access-token-response.json"));
ClientRegistration clientRegistration = this.clientRegistration.build();
JwtBearerGrantRequest grantRequest = new JwtBearerGrantRequest(clientRegistration, this.jwtAssertion);
OAuth2AccessTokenResponse accessTokenResponse = this.tokenResponseClient.getTokenResponse(grantRequest);
assertThat(accessTokenResponse).isNotNull();
assertThat(accessTokenResponse.getAccessToken().getScopes()).isEmpty();
}
@Test
public void getTokenResponseWhenInvalidResponseThenThrowOAuth2AuthorizationException() {
this.server.enqueue(new MockResponse().setResponseCode(301));
ClientRegistration clientRegistration = this.clientRegistration.build();
JwtBearerGrantRequest request = new JwtBearerGrantRequest(clientRegistration, this.jwtAssertion);
// @formatter:off
assertThatExceptionOfType(OAuth2AuthorizationException.class)
.isThrownBy(() -> this.tokenResponseClient.getTokenResponse(request))
.satisfies((ex) -> assertThat(ex.getError().getErrorCode()).isEqualTo("invalid_token_response"))
.withMessage("[invalid_token_response] Empty OAuth 2.0 Access Token Response");
// @formatter:on
}
@Test
public void getTokenResponseWhenServerErrorResponseThenThrowOAuth2AuthorizationException() {
this.server.enqueue(MockResponses.json("server-error-response.json").setResponseCode(500));
ClientRegistration clientRegistration = this.clientRegistration.build();
JwtBearerGrantRequest request = new JwtBearerGrantRequest(clientRegistration, this.jwtAssertion);
// @formatter:off
assertThatExceptionOfType(OAuth2AuthorizationException.class)
.isThrownBy(() -> this.tokenResponseClient.getTokenResponse(request))
.satisfies((ex) -> assertThat(ex.getError().getErrorCode()).isEqualTo("invalid_token_response"))
.withMessageContaining("[invalid_token_response] An error occurred while attempting to retrieve the OAuth 2.0 Access Token Response");
// @formatter:on
}
@Test
public void getTokenResponseWhenErrorResponseThenThrowOAuth2AuthorizationException() {
this.server.enqueue(MockResponses.json("invalid-grant-response.json").setResponseCode(400));
ClientRegistration clientRegistration = this.clientRegistration.build();
JwtBearerGrantRequest request = new JwtBearerGrantRequest(clientRegistration, this.jwtAssertion);
// @formatter:off
assertThatExceptionOfType(OAuth2AuthorizationException.class)
.isThrownBy(() -> this.tokenResponseClient.getTokenResponse(request))
.satisfies((ex) -> assertThat(ex.getError().getErrorCode()).isEqualTo(OAuth2ErrorCodes.INVALID_GRANT))
.withMessage("[invalid_grant] Invalid grant");
// @formatter:on
}
@Test
public void getTokenResponseWhenCustomClientAuthenticationMethodThenIllegalArgument() {
ClientRegistration clientRegistration = this.clientRegistration
.clientAuthenticationMethod(new ClientAuthenticationMethod("basic"))
.build();
JwtBearerGrantRequest grantRequest = new JwtBearerGrantRequest(clientRegistration, this.jwtAssertion);
// @formatter:off
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> this.tokenResponseClient.getTokenResponse(grantRequest))
.withMessageContaining("This | RestClientJwtBearerTokenResponseClientTests |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/mutation/internal/ExpressionDomainResultProducer.java | {
"start": 768,
"end": 2160
} | class ____ implements DomainResultProducer<Object>, Expression {
private final Expression expression;
public ExpressionDomainResultProducer(Expression expression) {
this.expression = expression;
}
@Override
public DomainResult<Object> createDomainResult(String resultVariable, DomainResultCreationState creationState) {
final SqlSelection sqlSelection = resolveSqlSelection( creationState );
return new BasicResult<>(
sqlSelection.getValuesArrayPosition(),
resultVariable,
expression.getExpressionType().getSingleJdbcMapping(),
null,
false,
false
);
}
@Override
public void applySqlSelections(DomainResultCreationState creationState) {
resolveSqlSelection( creationState );
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
expression.accept( sqlTreeWalker );
}
@Override
public JdbcMappingContainer getExpressionType() {
return expression.getExpressionType();
}
private SqlSelection resolveSqlSelection(DomainResultCreationState creationState) {
final SqlAstCreationState sqlAstCreationState = creationState.getSqlAstCreationState();
return sqlAstCreationState.getSqlExpressionResolver().resolveSqlSelection(
expression,
expression.getExpressionType().getSingleJdbcMapping().getJdbcJavaType(),
null,
sqlAstCreationState.getCreationContext().getTypeConfiguration()
);
}
}
| ExpressionDomainResultProducer |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/pool/ha/selector/StickyRandomDataSourceSelector.java | {
"start": 1111,
"end": 3379
} | class ____ extends RandomDataSourceSelector {
private static final Log LOG = LogFactory.getLog(StickyRandomDataSourceSelector.class);
private ThreadLocal<StickyDataSourceHolder> holders = new ThreadLocal<StickyDataSourceHolder>();
private int expireSeconds = 5;
public StickyRandomDataSourceSelector(HighAvailableDataSource highAvailableDataSource) {
super(highAvailableDataSource);
}
@Override
public String getName() {
return DataSourceSelectorEnum.STICKY_RANDOM.getName();
}
@Override
public DataSource get() {
StickyDataSourceHolder holder = holders.get();
if (holder != null && isAvailable(holder)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Return the sticky DataSource " + holder.getDataSource().toString() + " directly.");
}
return holder.getDataSource();
}
LOG.debug("Return a random DataSource.");
DataSource dataSource = super.get();
holder = new StickyDataSourceHolder(dataSource);
holders.remove();
holders.set(holder);
return dataSource;
}
private boolean isAvailable(StickyDataSourceHolder holder) {
boolean flag = isValid(holder) && !isExpired(holder);
if (flag && holder.getDataSource() instanceof DruidDataSource) {
flag = ((DruidDataSource) holder.getDataSource()).getPoolingCount() > 0;
}
return flag;
}
private boolean isValid(StickyDataSourceHolder holder) {
boolean flag = holder.isValid() && !getBlacklist().contains(holder.getDataSource());
if (!(holder.getDataSource() instanceof DruidDataSource) || !flag) {
return flag;
}
DruidDataSource dataSource = (DruidDataSource) holder.getDataSource();
return flag && dataSource.getActiveCount() < dataSource.getMaxActive();
}
private boolean isExpired(StickyDataSourceHolder holder) {
return System.currentTimeMillis() - holder.getRetrievingTime() > expireSeconds * 1000L;
}
public int getExpireSeconds() {
return expireSeconds;
}
public void setExpireSeconds(int expireSeconds) {
this.expireSeconds = expireSeconds;
}
}
| StickyRandomDataSourceSelector |
java | quarkusio__quarkus | extensions/elasticsearch-java-client/deployment/src/main/java/io/quarkus/elasticsearch/javaclient/deployment/ElasticsearchJavaClientProcessor.java | {
"start": 569,
"end": 1524
} | class ____ {
@BuildStep
FeatureBuildItem feature() {
return new FeatureBuildItem(Feature.ELASTICSEARCH_JAVA_CLIENT);
}
@BuildStep
AdditionalBeanBuildItem build() {
return AdditionalBeanBuildItem.unremovableOf(ElasticsearchJavaClientProducer.class);
}
@BuildStep
ServiceProviderBuildItem serviceProvider() {
return new ServiceProviderBuildItem("jakarta.json.spi.JsonProvider",
"co.elastic.clients.json.jackson.JacksonJsonProvider");
}
@BuildStep
ReflectiveClassBuildItem jsonProvider() {
return ReflectiveClassBuildItem.builder("org.eclipse.parsson.JsonProviderImpl").build();
}
@BuildStep
NativeImageFeatureBuildItem enableElasticsearchJavaClientFeature() {
return new NativeImageFeatureBuildItem(
"io.quarkus.elasticsearch.javaclient.runtime.graalvm.ElasticsearchJavaClientFeature");
}
}
| ElasticsearchJavaClientProcessor |
java | apache__dubbo | dubbo-plugin/dubbo-mcp/src/main/java/org/apache/dubbo/mcp/util/TypeSchemaUtils.java | {
"start": 11573,
"end": 12992
} | class ____ {
private final String type;
private final String format;
private final String description;
private final String javaType;
private final List<String> enumValues;
private final TypeSchemaInfo items;
private final TypeSchemaInfo additionalProperties;
private TypeSchemaInfo(Builder builder) {
this.type = builder.type;
this.format = builder.format;
this.description = builder.description;
this.javaType = builder.javaType;
this.enumValues = builder.enumValues;
this.items = builder.items;
this.additionalProperties = builder.additionalProperties;
}
public static Builder builder() {
return new Builder();
}
public String getType() {
return type;
}
public String getFormat() {
return format;
}
public String getDescription() {
return description;
}
public String getJavaType() {
return javaType;
}
public List<String> getEnumValues() {
return enumValues;
}
public TypeSchemaInfo getItems() {
return items;
}
public TypeSchemaInfo getAdditionalProperties() {
return additionalProperties;
}
public static | TypeSchemaInfo |
java | quarkusio__quarkus | extensions/reactive-routes/deployment/src/test/java/io/quarkus/vertx/web/ConflictingRouteTest.java | {
"start": 401,
"end": 915
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyRoutes.class));
@Test
public void testRouteConflict() {
String neo = get("/conflict/neo").asString();
Assertions.assertEquals("neo", neo);
String me = get("/conflict/me").asString();
Assertions.assertEquals("/me called", me);
}
@ApplicationScoped
public static | ConflictingRouteTest |
java | spring-projects__spring-boot | buildSrc/src/main/java/org/springframework/boot/build/devtools/DocumentDevtoolsPropertyDefaults.java | {
"start": 1398,
"end": 3535
} | class ____ extends DefaultTask {
private FileCollection defaults;
public DocumentDevtoolsPropertyDefaults() {
getOutputFile().convention(getProject().getLayout()
.getBuildDirectory()
.file("generated/docs/using/devtools-property-defaults.adoc"));
}
@InputFiles
@PathSensitive(PathSensitivity.RELATIVE)
public FileCollection getDefaults() {
return this.defaults;
}
public void setDefaults(FileCollection defaults) {
this.defaults = defaults;
}
@OutputFile
public abstract RegularFileProperty getOutputFile();
@TaskAction
void documentPropertyDefaults() throws IOException {
Map<String, String> propertyDefaults = loadPropertyDefaults();
documentPropertyDefaults(propertyDefaults);
}
private Map<String, String> loadPropertyDefaults() throws IOException, FileNotFoundException {
Properties properties = new Properties();
Map<String, String> propertyDefaults = new TreeMap<>();
for (File contribution : this.defaults.getFiles()) {
if (contribution.isFile()) {
try (JarFile jar = new JarFile(contribution)) {
ZipEntry entry = jar.getEntry("META-INF/spring-devtools.properties");
if (entry != null) {
properties.load(jar.getInputStream(entry));
}
}
}
else if (contribution.exists()) {
throw new IllegalStateException(
"Unexpected Devtools default properties contribution from '" + contribution + "'");
}
}
for (String name : properties.stringPropertyNames()) {
if (name.startsWith("defaults.")) {
propertyDefaults.put(name.substring("defaults.".length()), properties.getProperty(name));
}
}
return propertyDefaults;
}
private void documentPropertyDefaults(Map<String, String> properties) throws IOException {
try (PrintWriter writer = new PrintWriter(new FileWriter(getOutputFile().getAsFile().get()))) {
writer.println("[cols=\"3,1\"]");
writer.println("|===");
writer.println("| Name | Default Value");
properties.forEach((name, value) -> {
writer.println();
writer.printf("| `%s`%n", name);
writer.printf("| `%s`%n", value);
});
writer.println("|===");
}
}
}
| DocumentDevtoolsPropertyDefaults |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/StringUtilsContainsTest.java | {
"start": 1563,
"end": 23079
} | class ____ extends AbstractLangTest {
@Test
void testContains_Char() {
assertFalse(StringUtils.contains(null, ' '));
assertFalse(StringUtils.contains("", ' '));
assertFalse(StringUtils.contains("", null));
assertFalse(StringUtils.contains(null, null));
assertTrue(StringUtils.contains("abc", 'a'));
assertTrue(StringUtils.contains("abc", 'b'));
assertTrue(StringUtils.contains("abc", 'c'));
assertFalse(StringUtils.contains("abc", 'z'));
}
@Test
void testContains_String() {
assertFalse(StringUtils.contains(null, null));
assertFalse(StringUtils.contains(null, ""));
assertFalse(StringUtils.contains(null, "a"));
assertFalse(StringUtils.contains("", null));
assertTrue(StringUtils.contains("", ""));
assertFalse(StringUtils.contains("", "a"));
assertTrue(StringUtils.contains("abc", "a"));
assertTrue(StringUtils.contains("abc", "b"));
assertTrue(StringUtils.contains("abc", "c"));
assertTrue(StringUtils.contains("abc", "abc"));
assertFalse(StringUtils.contains("abc", "z"));
}
/**
* See https://www.oracle.com/technical-resources/articles/javase/supplementary.html
*/
@Test
void testContains_StringWithBadSupplementaryChars() {
// Test edge case: 1/2 of a (broken) supplementary char
assertFalse(StringUtils.contains(CharUSuppCharLow, CharU20001));
assertFalse(StringUtils.contains(CharUSuppCharHigh, CharU20001));
assertFalse(StringUtils.contains(CharU20001, CharUSuppCharLow));
assertEquals(0, CharU20001.indexOf(CharUSuppCharHigh));
assertTrue(StringUtils.contains(CharU20001, CharUSuppCharHigh));
assertTrue(StringUtils.contains(CharU20001 + CharUSuppCharHigh + "a", "a"));
assertTrue(StringUtils.contains(CharU20001 + CharUSuppCharLow + "a", "a"));
}
/**
* See https://www.oracle.com/technical-resources/articles/javase/supplementary.html
*/
@Test
void testContains_StringWithSupplementaryChars() {
assertTrue(StringUtils.contains(CharU20000 + CharU20001, CharU20000));
assertTrue(StringUtils.contains(CharU20000 + CharU20001, CharU20001));
assertTrue(StringUtils.contains(CharU20000, CharU20000));
assertFalse(StringUtils.contains(CharU20000, CharU20001));
}
@Test
void testContainsAny_StringCharArray() {
assertFalse(StringUtils.containsAny(null, (char[]) null));
assertFalse(StringUtils.containsAny(null, new char[0]));
assertFalse(StringUtils.containsAny(null, 'a', 'b'));
assertFalse(StringUtils.containsAny("", (char[]) null));
assertFalse(StringUtils.containsAny("", new char[0]));
assertFalse(StringUtils.containsAny("", 'a', 'b'));
assertFalse(StringUtils.containsAny("zzabyycdxx", (char[]) null));
assertFalse(StringUtils.containsAny("zzabyycdxx", new char[0]));
assertTrue(StringUtils.containsAny("zzabyycdxx", 'z', 'a'));
assertTrue(StringUtils.containsAny("zzabyycdxx", 'b', 'y'));
assertTrue(StringUtils.containsAny("zzabyycdxx", 'z', 'y'));
assertFalse(StringUtils.containsAny("ab", 'z'));
}
/**
* See https://www.oracle.com/technical-resources/articles/javase/supplementary.html
*/
@Test
void testContainsAny_StringCharArrayWithBadSupplementaryChars() {
// Test edge case: 1/2 of a (broken) supplementary char
assertFalse(StringUtils.containsAny(CharUSuppCharLow, CharU20001.toCharArray()));
assertFalse(StringUtils.containsAny("abc" + CharUSuppCharLow + "xyz", CharU20001.toCharArray()));
assertEquals(-1, CharUSuppCharHigh.indexOf(CharU20001));
assertFalse(StringUtils.containsAny(CharUSuppCharHigh, CharU20001.toCharArray()));
assertFalse(StringUtils.containsAny(CharU20001, CharUSuppCharLow.toCharArray()));
assertEquals(0, CharU20001.indexOf(CharUSuppCharHigh));
assertTrue(StringUtils.containsAny(CharU20001, CharUSuppCharHigh.toCharArray()));
}
/**
* See https://www.oracle.com/technical-resources/articles/javase/supplementary.html
*/
@Test
void testContainsAny_StringCharArrayWithSupplementaryChars() {
assertTrue(StringUtils.containsAny(CharU20000 + CharU20001, CharU20000.toCharArray()));
assertTrue(StringUtils.containsAny("a" + CharU20000 + CharU20001, "a".toCharArray()));
assertTrue(StringUtils.containsAny(CharU20000 + "a" + CharU20001, "a".toCharArray()));
assertTrue(StringUtils.containsAny(CharU20000 + CharU20001 + "a", "a".toCharArray()));
assertTrue(StringUtils.containsAny(CharU20000 + CharU20001, CharU20001.toCharArray()));
assertTrue(StringUtils.containsAny(CharU20000, CharU20000.toCharArray()));
// Sanity check:
assertEquals(-1, CharU20000.indexOf(CharU20001));
assertEquals(0, CharU20000.indexOf(CharU20001.charAt(0)));
assertEquals(-1, CharU20000.indexOf(CharU20001.charAt(1)));
// Test:
assertFalse(StringUtils.containsAny(CharU20000, CharU20001.toCharArray()));
assertFalse(StringUtils.containsAny(CharU20001, CharU20000.toCharArray()));
}
@Test
void testContainsAny_StringString() {
assertFalse(StringUtils.containsAny(null, (String) null));
assertFalse(StringUtils.containsAny(null, ""));
assertFalse(StringUtils.containsAny(null, "ab"));
assertFalse(StringUtils.containsAny("", (String) null));
assertFalse(StringUtils.containsAny("", ""));
assertFalse(StringUtils.containsAny("", "ab"));
assertFalse(StringUtils.containsAny("zzabyycdxx", (String) null));
assertFalse(StringUtils.containsAny("zzabyycdxx", ""));
assertTrue(StringUtils.containsAny("zzabyycdxx", "za"));
assertTrue(StringUtils.containsAny("zzabyycdxx", "by"));
assertTrue(StringUtils.containsAny("zzabyycdxx", "zy"));
assertFalse(StringUtils.containsAny("ab", "z"));
}
@Test
void testContainsAny_StringStringArray() {
assertFalse(StringUtils.containsAny(null, (String[]) null));
assertFalse(StringUtils.containsAny(null, new String[0]));
assertFalse(StringUtils.containsAny(null, new String[] { "hello" }));
assertFalse(StringUtils.containsAny("", (String[]) null));
assertFalse(StringUtils.containsAny("", new String[0]));
assertFalse(StringUtils.containsAny("", new String[] { "hello" }));
assertFalse(StringUtils.containsAny("hello, goodbye", (String[]) null));
assertFalse(StringUtils.containsAny("hello, goodbye", new String[0]));
assertTrue(StringUtils.containsAny("hello, goodbye", new String[] { "hello", "goodbye" }));
assertTrue(StringUtils.containsAny("hello, goodbye", new String[] { "hello", "Goodbye" }));
assertFalse(StringUtils.containsAny("hello, goodbye", new String[] { "Hello", "Goodbye" }));
assertFalse(StringUtils.containsAny("hello, goodbye", new String[] { "Hello", null }));
assertFalse(StringUtils.containsAny("hello, null", new String[] { "Hello", null }));
// Javadoc examples:
assertTrue(StringUtils.containsAny("abcd", "ab", null));
assertTrue(StringUtils.containsAny("abcd", "ab", "cd"));
assertTrue(StringUtils.containsAny("abc", "d", "abc"));
}
/**
* See https://www.oracle.com/technical-resources/articles/javase/supplementary.html
*/
@Test
void testContainsAny_StringWithBadSupplementaryChars() {
// Test edge case: 1/2 of a (broken) supplementary char
assertFalse(StringUtils.containsAny(CharUSuppCharLow, CharU20001));
assertEquals(-1, CharUSuppCharHigh.indexOf(CharU20001));
assertFalse(StringUtils.containsAny(CharUSuppCharHigh, CharU20001));
assertFalse(StringUtils.containsAny(CharU20001, CharUSuppCharLow));
assertEquals(0, CharU20001.indexOf(CharUSuppCharHigh));
assertTrue(StringUtils.containsAny(CharU20001, CharUSuppCharHigh));
}
/**
* See https://www.oracle.com/technical-resources/articles/javase/supplementary.html
*/
@Test
void testContainsAny_StringWithSupplementaryChars() {
assertTrue(StringUtils.containsAny(CharU20000 + CharU20001, CharU20000));
assertTrue(StringUtils.containsAny(CharU20000 + CharU20001, CharU20001));
assertTrue(StringUtils.containsAny(CharU20000, CharU20000));
// Sanity check:
assertEquals(-1, CharU20000.indexOf(CharU20001));
assertEquals(0, CharU20000.indexOf(CharU20001.charAt(0)));
assertEquals(-1, CharU20000.indexOf(CharU20001.charAt(1)));
// Test:
assertFalse(StringUtils.containsAny(CharU20000, CharU20001));
assertFalse(StringUtils.containsAny(CharU20001, CharU20000));
}
@Test
void testContainsAnyIgnoreCase_StringStringArray() {
assertFalse(StringUtils.containsAnyIgnoreCase(null, (String[]) null));
assertFalse(StringUtils.containsAnyIgnoreCase(null, new String[0]));
assertFalse(StringUtils.containsAnyIgnoreCase(null, new String[] { "hello" }));
assertFalse(StringUtils.containsAnyIgnoreCase("", (String[]) null));
assertFalse(StringUtils.containsAnyIgnoreCase("", new String[0]));
assertFalse(StringUtils.containsAnyIgnoreCase("", new String[] { "hello" }));
assertFalse(StringUtils.containsAnyIgnoreCase("hello, goodbye", (String[]) null));
assertFalse(StringUtils.containsAnyIgnoreCase("hello, goodbye", new String[0]));
assertTrue(StringUtils.containsAnyIgnoreCase("hello, goodbye", new String[] { "hello", "goodbye" }));
assertTrue(StringUtils.containsAnyIgnoreCase("hello, goodbye", new String[] { "hello", "Goodbye" }));
assertTrue(StringUtils.containsAnyIgnoreCase("hello, goodbye", new String[] { "Hello", "Goodbye" }));
assertTrue(StringUtils.containsAnyIgnoreCase("hello, goodbye", new String[] { "Hello", null }));
assertTrue(StringUtils.containsAnyIgnoreCase("hello, null", new String[] { "Hello", null }));
// Javadoc examples:
assertTrue(StringUtils.containsAnyIgnoreCase("abcd", "ab", null));
assertTrue(StringUtils.containsAnyIgnoreCase("abcd", "ab", "cd"));
assertTrue(StringUtils.containsAnyIgnoreCase("abc", "d", "abc"));
}
@Test
@DefaultLocale(language = "de", country = "DE")
@ReadsDefaultLocale
@WritesDefaultLocale
void testContainsIgnoreCase_LocaleIndependence() {
final Locale[] locales = { Locale.ENGLISH, new Locale("tr"), Locale.getDefault() };
final String[][] tdata = { { "i", "I" }, { "I", "i" }, { "\u03C2", "\u03C3" }, { "\u03A3", "\u03C2" }, { "\u03A3", "\u03C3" }, };
final String[][] fdata = { { "\u00DF", "SS" }, };
for (final Locale testLocale : locales) {
Locale.setDefault(testLocale);
for (int j = 0; j < tdata.length; j++) {
assertTrue(StringUtils.containsIgnoreCase(tdata[j][0], tdata[j][1]), Locale.getDefault() + ": " + j + " " + tdata[j][0] + " " + tdata[j][1]);
}
for (int j = 0; j < fdata.length; j++) {
assertFalse(StringUtils.containsIgnoreCase(fdata[j][0], fdata[j][1]), Locale.getDefault() + ": " + j + " " + fdata[j][0] + " " + fdata[j][1]);
}
}
}
@Test
void testContainsIgnoreCase_StringString() {
assertFalse(StringUtils.containsIgnoreCase(null, null));
// Null tests
assertFalse(StringUtils.containsIgnoreCase(null, ""));
assertFalse(StringUtils.containsIgnoreCase(null, "a"));
assertFalse(StringUtils.containsIgnoreCase(null, "abc"));
assertFalse(StringUtils.containsIgnoreCase("", null));
assertFalse(StringUtils.containsIgnoreCase("a", null));
assertFalse(StringUtils.containsIgnoreCase("abc", null));
// Match len = 0
assertTrue(StringUtils.containsIgnoreCase("", ""));
assertTrue(StringUtils.containsIgnoreCase("a", ""));
assertTrue(StringUtils.containsIgnoreCase("abc", ""));
// Match len = 1
assertFalse(StringUtils.containsIgnoreCase("", "a"));
assertTrue(StringUtils.containsIgnoreCase("a", "a"));
assertTrue(StringUtils.containsIgnoreCase("abc", "a"));
assertFalse(StringUtils.containsIgnoreCase("", "A"));
assertTrue(StringUtils.containsIgnoreCase("a", "A"));
assertTrue(StringUtils.containsIgnoreCase("abc", "A"));
// Match len > 1
assertFalse(StringUtils.containsIgnoreCase("", "abc"));
assertFalse(StringUtils.containsIgnoreCase("a", "abc"));
assertTrue(StringUtils.containsIgnoreCase("xabcz", "abc"));
assertFalse(StringUtils.containsIgnoreCase("", "ABC"));
assertFalse(StringUtils.containsIgnoreCase("a", "ABC"));
assertTrue(StringUtils.containsIgnoreCase("xabcz", "ABC"));
}
@Test
void testContainsNone_CharArray() {
final String str1 = "a";
final String str2 = "b";
final String str3 = "ab.";
final char[] chars1 = { 'b' };
final char[] chars2 = { '.' };
final char[] chars3 = { 'c', 'd' };
final char[] emptyChars = {};
assertTrue(StringUtils.containsNone(null, (char[]) null));
assertTrue(StringUtils.containsNone("", (char[]) null));
assertTrue(StringUtils.containsNone(null, emptyChars));
assertTrue(StringUtils.containsNone(str1, emptyChars));
assertTrue(StringUtils.containsNone("", emptyChars));
assertTrue(StringUtils.containsNone("", chars1));
assertTrue(StringUtils.containsNone(str1, chars1));
assertTrue(StringUtils.containsNone(str1, chars2));
assertTrue(StringUtils.containsNone(str1, chars3));
assertFalse(StringUtils.containsNone(str2, chars1));
assertTrue(StringUtils.containsNone(str2, chars2));
assertTrue(StringUtils.containsNone(str2, chars3));
assertFalse(StringUtils.containsNone(str3, chars1));
assertFalse(StringUtils.containsNone(str3, chars2));
assertTrue(StringUtils.containsNone(str3, chars3));
}
/**
* See https://www.oracle.com/technical-resources/articles/javase/supplementary.html
*/
@Test
void testContainsNone_CharArrayWithBadSupplementaryChars() {
// Test edge case: 1/2 of a (broken) supplementary char
assertTrue(StringUtils.containsNone(CharUSuppCharLow, CharU20001.toCharArray()));
assertEquals(-1, CharUSuppCharHigh.indexOf(CharU20001));
assertTrue(StringUtils.containsNone(CharUSuppCharHigh, CharU20001.toCharArray()));
assertEquals(-1, CharU20001.indexOf(CharUSuppCharLow));
assertTrue(StringUtils.containsNone(CharU20001, CharUSuppCharLow.toCharArray()));
assertEquals(0, CharU20001.indexOf(CharUSuppCharHigh));
assertFalse(StringUtils.containsNone(CharU20001, CharUSuppCharHigh.toCharArray()));
}
/**
* See https://www.oracle.com/technical-resources/articles/javase/supplementary.html
*/
@Test
void testContainsNone_CharArrayWithSupplementaryChars() {
assertFalse(StringUtils.containsNone(CharU20000 + CharU20001, CharU20000.toCharArray()));
assertFalse(StringUtils.containsNone(CharU20000 + CharU20001, CharU20001.toCharArray()));
assertFalse(StringUtils.containsNone(CharU20000, CharU20000.toCharArray()));
// Sanity check:
assertEquals(-1, CharU20000.indexOf(CharU20001));
assertEquals(0, CharU20000.indexOf(CharU20001.charAt(0)));
assertEquals(-1, CharU20000.indexOf(CharU20001.charAt(1)));
// Test:
assertTrue(StringUtils.containsNone(CharU20000, CharU20001.toCharArray()));
assertTrue(StringUtils.containsNone(CharU20001, CharU20000.toCharArray()));
}
@Test
void testContainsNone_String() {
final String str1 = "a";
final String str2 = "b";
final String str3 = "ab.";
final String chars1 = "b";
final String chars2 = ".";
final String chars3 = "cd";
assertTrue(StringUtils.containsNone(null, (String) null));
assertTrue(StringUtils.containsNone("", (String) null));
assertTrue(StringUtils.containsNone(null, ""));
assertTrue(StringUtils.containsNone(str1, ""));
assertTrue(StringUtils.containsNone("", ""));
assertTrue(StringUtils.containsNone("", chars1));
assertTrue(StringUtils.containsNone(str1, chars1));
assertTrue(StringUtils.containsNone(str1, chars2));
assertTrue(StringUtils.containsNone(str1, chars3));
assertFalse(StringUtils.containsNone(str2, chars1));
assertTrue(StringUtils.containsNone(str2, chars2));
assertTrue(StringUtils.containsNone(str2, chars3));
assertFalse(StringUtils.containsNone(str3, chars1));
assertFalse(StringUtils.containsNone(str3, chars2));
assertTrue(StringUtils.containsNone(str3, chars3));
}
/**
* See https://www.oracle.com/technical-resources/articles/javase/supplementary.html
*/
@Test
void testContainsNone_StringWithBadSupplementaryChars() {
// Test edge case: 1/2 of a (broken) supplementary char
assertTrue(StringUtils.containsNone(CharUSuppCharLow, CharU20001));
assertEquals(-1, CharUSuppCharHigh.indexOf(CharU20001));
assertTrue(StringUtils.containsNone(CharUSuppCharHigh, CharU20001));
assertEquals(-1, CharU20001.indexOf(CharUSuppCharLow));
assertTrue(StringUtils.containsNone(CharU20001, CharUSuppCharLow));
assertEquals(0, CharU20001.indexOf(CharUSuppCharHigh));
assertFalse(StringUtils.containsNone(CharU20001, CharUSuppCharHigh));
}
/**
* See https://www.oracle.com/technical-resources/articles/javase/supplementary.html
*/
@Test
void testContainsNone_StringWithSupplementaryChars() {
assertFalse(StringUtils.containsNone(CharU20000 + CharU20001, CharU20000));
assertFalse(StringUtils.containsNone(CharU20000 + CharU20001, CharU20001));
assertFalse(StringUtils.containsNone(CharU20000, CharU20000));
// Sanity check:
assertEquals(-1, CharU20000.indexOf(CharU20001));
assertEquals(0, CharU20000.indexOf(CharU20001.charAt(0)));
assertEquals(-1, CharU20000.indexOf(CharU20001.charAt(1)));
// Test:
assertTrue(StringUtils.containsNone(CharU20000, CharU20001));
assertTrue(StringUtils.containsNone(CharU20001, CharU20000));
}
@Test
void testContainsOnly_CharArray() {
final String str1 = "a";
final String str2 = "b";
final String str3 = "ab";
final char[] chars1 = { 'b' };
final char[] chars2 = { 'a' };
final char[] chars3 = { 'a', 'b' };
final char[] emptyChars = {};
assertFalse(StringUtils.containsOnly(null, (char[]) null));
assertFalse(StringUtils.containsOnly("", (char[]) null));
assertFalse(StringUtils.containsOnly(null, emptyChars));
assertFalse(StringUtils.containsOnly(str1, emptyChars));
assertTrue(StringUtils.containsOnly("", emptyChars));
assertTrue(StringUtils.containsOnly("", chars1));
assertFalse(StringUtils.containsOnly(str1, chars1));
assertTrue(StringUtils.containsOnly(str1, chars2));
assertTrue(StringUtils.containsOnly(str1, chars3));
assertTrue(StringUtils.containsOnly(str2, chars1));
assertFalse(StringUtils.containsOnly(str2, chars2));
assertTrue(StringUtils.containsOnly(str2, chars3));
assertFalse(StringUtils.containsOnly(str3, chars1));
assertFalse(StringUtils.containsOnly(str3, chars2));
assertTrue(StringUtils.containsOnly(str3, chars3));
}
@Test
void testContainsOnly_String() {
final String str1 = "a";
final String str2 = "b";
final String str3 = "ab";
final String chars1 = "b";
final String chars2 = "a";
final String chars3 = "ab";
assertFalse(StringUtils.containsOnly(null, (String) null));
assertFalse(StringUtils.containsOnly("", (String) null));
assertFalse(StringUtils.containsOnly(null, ""));
assertFalse(StringUtils.containsOnly(str1, ""));
assertTrue(StringUtils.containsOnly("", ""));
assertTrue(StringUtils.containsOnly("", chars1));
assertFalse(StringUtils.containsOnly(str1, chars1));
assertTrue(StringUtils.containsOnly(str1, chars2));
assertTrue(StringUtils.containsOnly(str1, chars3));
assertTrue(StringUtils.containsOnly(str2, chars1));
assertFalse(StringUtils.containsOnly(str2, chars2));
assertTrue(StringUtils.containsOnly(str2, chars3));
assertFalse(StringUtils.containsOnly(str3, chars1));
assertFalse(StringUtils.containsOnly(str3, chars2));
assertTrue(StringUtils.containsOnly(str3, chars3));
}
@Test
void testContainsWhitespace() {
assertFalse(StringUtils.containsWhitespace(""));
assertTrue(StringUtils.containsWhitespace(" "));
assertFalse(StringUtils.containsWhitespace("a"));
assertTrue(StringUtils.containsWhitespace("a "));
assertTrue(StringUtils.containsWhitespace(" a"));
assertTrue(StringUtils.containsWhitespace("a\t"));
assertTrue(StringUtils.containsWhitespace("\n"));
}
}
| StringUtilsContainsTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/OptionalQueryParamResource.java | {
"start": 353,
"end": 1446
} | class ____ {
public static final String HELLO = "hello ";
public static final String NOBODY = "nobody";
public static final String AND = " and ";
@Path("/one")
@GET
public String sayHelloToValue(@QueryParam("name") final Optional<String> name) {
return HELLO + name.orElse(NOBODY);
}
@Path("/list")
@GET
public String sayHelloToList(@QueryParam("name") final Optional<List<String>> names) {
return doSayHelloToCollection(names);
}
@Path("/set")
@GET
public String sayHelloToSet(@QueryParam("name") final Optional<Set<String>> names) {
return doSayHelloToCollection(names);
}
@Path("/sortedset")
@GET
public String sayHelloToSortedSet(@QueryParam("name") final Optional<SortedSet<String>> names) {
return doSayHelloToCollection(names);
}
private String doSayHelloToCollection(final Optional<? extends Collection<String>> names) {
return HELLO + names.map(l -> l.stream().collect(Collectors.joining(AND)))
.orElse(NOBODY);
}
}
| OptionalQueryParamResource |
java | elastic__elasticsearch | x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/CustomDateFormatTestCase.java | {
"start": 1219,
"end": 4228
} | class ____ extends BaseRestSqlTestCase {
private static String[] customFormats = new String[] {
"HH:mm yyyy-MM-dd",
"HH:mm:ss yyyy-dd-MM",
"HH:mm:ss VV",
"HH:mm:ss VV z",
"yyyy-MM-dd'T'HH:mm:ss'T'VV'T'z" };
private static String[] nowFunctions = new String[] { "NOW()", "CURRENT_DATE()", "CURRENT_TIME()", "CURRENT_TIMESTAMP()" };
private static String[] operators = new String[] { " < ", " > ", " <= ", " >= ", " = ", " != " };
public void testCustomDateFormatsWithNowFunctions() throws IOException {
createIndex();
String[] docs = new String[customFormats.length];
String zID = randomZone().getId();
StringBuilder datesConditions = new StringBuilder();
for (int i = 0; i < customFormats.length; i++) {
String field = "date_" + i;
String format = DateTimeFormatter.ofPattern(customFormats[i], Locale.ROOT).format(DateUtils.nowWithMillisResolution());
docs[i] = org.elasticsearch.core.Strings.format("""
{"%s":"%s"}
""", field, format);
datesConditions.append(i > 0 ? " OR " : "").append(field + randomFrom(operators) + randomFrom(nowFunctions));
}
index(docs);
Request request = new Request("POST", RestSqlTestCase.SQL_QUERY_REST_ENDPOINT);
final String query = "SELECT COUNT(*) AS c FROM test WHERE " + datesConditions.toString();
request.setEntity(new StringEntity(query(query).mode(Mode.PLAIN).timeZone(zID).toString(), ContentType.APPLICATION_JSON));
Response response = client().performRequest(request);
String expectedJsonSnippet = """
{"columns":[{"name":"c","type":"long"}],"rows":[[""";
try (InputStream content = response.getEntity().getContent()) {
String actualJson = new BytesArray(content.readAllBytes()).utf8ToString();
// we just need to get a response that's not a date parsing error
assertTrue(actualJson.startsWith(expectedJsonSnippet));
}
}
private static void createIndex() throws IOException {
Request request = new Request("PUT", "/test");
XContentBuilder index = JsonXContent.contentBuilder().prettyPrint().startObject();
index.startObject("mappings");
{
index.startObject("properties");
{
for (int i = 0; i < customFormats.length; i++) {
String fieldName = "date_" + i;
index.startObject(fieldName);
{
index.field("type", "date");
index.field("format", customFormats[i]);
}
index.endObject();
}
index.endObject();
}
}
index.endObject();
index.endObject();
request.setJsonEntity(Strings.toString(index));
client().performRequest(request);
}
}
| CustomDateFormatTestCase |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/service/dump/processor/DumpAllProcessor.java | {
"start": 1929,
"end": 7980
} | class ____ implements NacosTaskProcessor {
public DumpAllProcessor(ConfigInfoPersistService configInfoPersistService) {
this.configInfoPersistService = configInfoPersistService;
}
@Override
@SuppressWarnings("PMD.MethodTooLongRule")
public boolean process(NacosTask task) {
if (!(task instanceof DumpAllTask)) {
DEFAULT_LOG.error(
"[all-dump-error] ,invalid task type {},DumpAllProcessor should process DumpAllTask type.",
task.getClass().getSimpleName());
return false;
}
DumpAllTask dumpAllTask = (DumpAllTask) task;
long currentMaxId = configInfoPersistService.findConfigMaxId();
long lastMaxId = 0;
ThreadPoolExecutor executorService = null;
if (dumpAllTask.isStartUp()) {
executorService = new ThreadPoolExecutor(Runtime.getRuntime().availableProcessors(),
Runtime.getRuntime().availableProcessors(), 60L, TimeUnit.SECONDS,
new LinkedBlockingQueue<>(PropertyUtil.getAllDumpPageSize() * 2),
r -> new Thread(r, "dump all executor"), new ThreadPoolExecutor.CallerRunsPolicy());
} else {
executorService = new ThreadPoolExecutor(1, 1, 60L, TimeUnit.SECONDS, new SynchronousQueue<>(),
r -> new Thread(r, "dump all executor"), new ThreadPoolExecutor.CallerRunsPolicy());
}
DEFAULT_LOG.info("start dump all config-info...");
while (lastMaxId < currentMaxId) {
long start = System.currentTimeMillis();
Page<ConfigInfoWrapper> page = configInfoPersistService.findAllConfigInfoFragment(lastMaxId,
PropertyUtil.getAllDumpPageSize(), dumpAllTask.isStartUp());
long dbTimeStamp = System.currentTimeMillis();
if (page == null || page.getPageItems() == null || page.getPageItems().isEmpty()) {
break;
}
for (ConfigInfoWrapper cf : page.getPageItems()) {
lastMaxId = Math.max(cf.getId(), lastMaxId);
if (StringUtils.isBlank(cf.getTenant())) {
continue;
}
//if not start up, page query will not return content, check md5 and lastModified first ,if changed ,get single content info to dump.
if (!dumpAllTask.isStartUp()) {
final String groupKey = GroupKey2.getKey(cf.getDataId(), cf.getGroup(), cf.getTenant());
boolean newLastModified = cf.getLastModified() > ConfigCacheService.getLastModifiedTs(groupKey);
//check md5 & update local disk cache.
String localContentMd5 = ConfigCacheService.getContentMd5(groupKey);
boolean md5Update = !localContentMd5.equals(cf.getMd5());
if (newLastModified || md5Update) {
LogUtil.DUMP_LOG.info("[dump-all] find change config {}, {}, md5={}", groupKey,
cf.getLastModified(), cf.getMd5());
cf = configInfoPersistService.findConfigInfo(cf.getDataId(), cf.getGroup(), cf.getTenant());
} else {
continue;
}
}
if (cf == null) {
continue;
}
if (cf.getDataId().equals(ClientIpWhiteList.CLIENT_IP_WHITELIST_METADATA)) {
ClientIpWhiteList.load(cf.getContent());
}
if (cf.getDataId().equals(SwitchService.SWITCH_META_DATA_ID)) {
SwitchService.load(cf.getContent());
}
final String content = cf.getContent();
final String dataId = cf.getDataId();
final String group = cf.getGroup();
final String tenant = cf.getTenant();
final long lastModified = cf.getLastModified();
final String type = cf.getType();
final String encryptedDataKey = cf.getEncryptedDataKey();
executorService.execute(() -> {
final String md5Utf8 = MD5Utils.md5Hex(content, ENCODE_UTF8);
boolean result = ConfigCacheService.dumpWithMd5(dataId, group, tenant, content, md5Utf8,
lastModified, type, encryptedDataKey);
if (result) {
LogUtil.DUMP_LOG.info("[dump-all-ok] {}, {}, length={},md5UTF8={}",
GroupKey2.getKey(dataId, group), lastModified, content.length(), md5Utf8);
} else {
LogUtil.DUMP_LOG.info("[dump-all-error] {}", GroupKey2.getKey(dataId, group));
}
});
}
long diskStamp = System.currentTimeMillis();
DEFAULT_LOG.info("[all-dump] submit all task for {} / {}, dbTime={},diskTime={}", lastMaxId, currentMaxId,
(dbTimeStamp - start), (diskStamp - dbTimeStamp));
}
//wait all task are finished and then shutdown executor.
try {
int unfinishedTaskCount = 0;
while ((unfinishedTaskCount = executorService.getQueue().size() + executorService.getActiveCount()) > 0) {
DEFAULT_LOG.info("[all-dump] wait {} dump tasks to be finished", unfinishedTaskCount);
Thread.sleep(1000L);
}
executorService.shutdown();
} catch (Exception e) {
DEFAULT_LOG.error("[all-dump] wait dump tasks to be finished error", e);
}
DEFAULT_LOG.info("success to dump all config-info。");
return true;
}
final ConfigInfoPersistService configInfoPersistService;
}
| DumpAllProcessor |
java | apache__camel | components/camel-google/camel-google-pubsub/src/test/java/org/apache/camel/component/google/pubsub/integration/MessageOrderingIT.java | {
"start": 1527,
"end": 4063
} | class ____ extends PubsubTestSupport {
private static final String TOPIC_NAME = "camel.input-topic";
private static final String SUBSCRIPTION_NAME = "camel.input-topic-subscription";
@EndpointInject("direct:in")
private Endpoint directIn;
@EndpointInject("google-pubsub:{{project.id}}:" + TOPIC_NAME
+ "?messageOrderingEnabled=true&pubsubEndpoint=us-east1-pubsub.googleapis.com:443")
private Endpoint pubsubTopic;
@EndpointInject("google-pubsub:{{project.id}}:" + SUBSCRIPTION_NAME)
private Endpoint pubsubSubscription;
@EndpointInject("mock:input")
private MockEndpoint inputMock;
@EndpointInject("mock:output")
private MockEndpoint outputMock;
@Produce("direct:in")
private ProducerTemplate producer;
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(directIn).routeId("directRoute")
.setHeader(ORDERING_KEY, constant("orderkey"))
.to(pubsubTopic)
.to(inputMock);
from(pubsubSubscription).routeId("subscriptionRoute")
.autoStartup(false)
.to(outputMock);
}
};
}
@Override
public void createTopicSubscription() {
TopicName inputTopicName = TopicName.of(PROJECT_ID, TOPIC_NAME);
ProjectSubscriptionName projectInputSubscriptionName = ProjectSubscriptionName.of(PROJECT_ID, SUBSCRIPTION_NAME);
Topic inputTopic = Topic.newBuilder().setName(inputTopicName.toString()).build();
Subscription inputSubscription = Subscription.newBuilder()
.setName(projectInputSubscriptionName.toString())
.setTopic(inputTopic.getName())
.setEnableMessageOrdering(true)
.build();
createTopicSubscriptionPair(inputTopic, inputSubscription);
}
@Test
void orderedMessageDeliveryTest() throws Exception {
List<String> bodyList = Arrays.asList("1", "2", "3", "4", "5", "6");
inputMock.expectedMessageCount(6);
outputMock.expectedMessageCount(6);
for (String string : bodyList) {
producer.sendBody(string);
}
inputMock.assertIsSatisfied();
context.getRouteController().startRoute("subscriptionRoute");
outputMock.expectedBodiesReceived(bodyList);
outputMock.assertIsSatisfied();
}
}
| MessageOrderingIT |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/db/jdbc/FactoryMethodConnectionSourceTest.java | {
"start": 5483,
"end": 5684
} | class ____ {
public static DataSource factoryMethod02() {
return (DataSource) holder.get();
}
}
@SuppressWarnings("unused")
protected static final | DataSourceFactory |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionEvaluationReportTests.java | {
"start": 12574,
"end": 13159
} | class ____ extends SpringBootCondition implements ConfigurationCondition {
private final ConfigurationPhase phase;
private final boolean match;
TestMatchCondition(ConfigurationPhase phase, boolean match) {
this.phase = phase;
this.match = match;
}
@Override
public ConfigurationPhase getConfigurationPhase() {
return this.phase;
}
@Override
public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) {
return new ConditionOutcome(this.match, ClassUtils.getShortName(getClass()));
}
}
static | TestMatchCondition |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/AnySetterTest.java | {
"start": 6602,
"end": 6774
} | class ____ {
public int id;
@JsonAnySetter
public JsonNode extraData = new ObjectNode(null);
}
// [databind#4316]
static | AnySetter3394Bean |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/dev/testing/JunitTestRunner.java | {
"start": 6174,
"end": 34999
} | class
____ static QuarkusClassLoader firstDeploymentClassLoader;
// private static ClassLoader classLoaderForLoadingTests;
public JunitTestRunner(Builder builder) {
this.runId = builder.runId;
this.moduleInfo = builder.moduleInfo;
this.testApplication = builder.testApplication;
this.classScanResult = builder.classScanResult;
this.testClassUsages = builder.testClassUsages;
this.listeners = builder.listeners;
this.additionalFilters = builder.additionalFilters;
this.testState = builder.testState;
this.includeTags = new HashSet<>(builder.includeTags);
this.excludeTags = new HashSet<>(builder.excludeTags);
this.include = builder.include;
this.exclude = builder.exclude;
this.specificSelection = builder.specificSelection;
this.includeEngines = builder.includeEngines;
this.excludeEngines = builder.excludeEngines;
this.failingTestsOnly = builder.failingTestsOnly;
this.testType = builder.testType;
}
public Runnable prepare() {
try {
long start = System.currentTimeMillis();
ClassLoader old = Thread.currentThread().getContextClassLoader();
QuarkusClassLoader tcl = testApplication.createDeploymentClassLoader();
deploymentClassLoader = tcl;
LogCapturingOutputFilter logHandler = new LogCapturingOutputFilter(testApplication, true, true,
TestSupport.instance()
.get()::isDisplayTestOutput);
Thread.currentThread().setContextClassLoader(tcl);
Set<UniqueId> allDiscoveredIds = new HashSet<>();
Set<UniqueId> dynamicIds = new HashSet<>();
DiscoveryResult quarkusTestClasses = discoverTestClasses();
Launcher launcher = LauncherFactory.create(LauncherConfig.builder().build());
LauncherDiscoveryRequestBuilder launchBuilder = LauncherDiscoveryRequestBuilder.request()
.selectors(quarkusTestClasses.testClasses.stream().map(DiscoverySelectors::selectClass)
.collect(Collectors.toList()));
launchBuilder.filters(new PostDiscoveryFilter() {
@Override
public FilterResult apply(TestDescriptor testDescriptor) {
allDiscoveredIds.add(testDescriptor.getUniqueId());
return FilterResult.included(null);
}
});
if (classScanResult != null) {
launchBuilder.filters(testClassUsages.getTestsToRun(classScanResult.getChangedClassNames(), testState));
}
if (!includeTags.isEmpty()) {
launchBuilder.filters(TagFilter.includeTags(new ArrayList<>(includeTags)));
} else if (!excludeTags.isEmpty()) {
launchBuilder.filters(TagFilter.excludeTags(new ArrayList<>(excludeTags)));
}
if (specificSelection != null) {
if (specificSelection.startsWith("maven:")) {
launchBuilder.filters(new MavenSpecificSelectionFilter(specificSelection.substring("maven:".length())));
} else if (specificSelection.startsWith("gradle:")) {
launchBuilder.filters(new GradleSpecificSelectionFilter(specificSelection.substring("gradle:".length())));
} else {
log.error("Unknown specific selection, ignoring: " + specificSelection);
}
} else if (include != null) {
launchBuilder.filters(new RegexFilter(false, include));
} else if (exclude != null) {
launchBuilder.filters(new RegexFilter(true, exclude));
}
if (!includeEngines.isEmpty()) {
launchBuilder.filters(EngineFilter.includeEngines(includeEngines));
} else if (!excludeEngines.isEmpty()) {
launchBuilder.filters(EngineFilter.excludeEngines(excludeEngines));
}
if (!additionalFilters.isEmpty()) {
launchBuilder.filters(additionalFilters.toArray(new PostDiscoveryFilter[0]));
}
if (failingTestsOnly) {
launchBuilder.filters(new CurrentlyFailingFilter());
}
LauncherDiscoveryRequest request = launchBuilder
.build();
TestPlan testPlan = launcher.discover(request);
long toRun = testPlan.countTestIdentifiers(TestIdentifier::isTest);
for (TestRunListener listener : listeners) {
listener.runStarted(toRun);
}
return new Runnable() {
@Override
public void run() {
final ClassLoader origCl = Thread.currentThread().getContextClassLoader();
try {
synchronized (JunitTestRunner.this) {
testsRunning = true;
}
log.debug("Starting test run with " + testPlan.countTestIdentifiers((s) -> true) + " tests");
QuarkusConsole.addOutputFilter(logHandler);
final Deque<Set<String>> touchedClasses = new LinkedBlockingDeque<>();
Map<TestIdentifier, Long> startTimes = new HashMap<>();
final AtomicReference<Set<String>> startupClasses = new AtomicReference<>();
TracingHandler.setTracingHandler(new TracingHandler.TraceListener() {
@Override
public void touched(String className) {
Set<String> set = touchedClasses.peek();
if (set != null) {
set.add(className);
}
}
@Override
public void quarkusStarting() {
startupClasses.set(touchedClasses.peek());
}
});
Map<String, Map<UniqueId, TestResult>> resultsByClass = new HashMap<>();
AtomicReference<TestIdentifier> currentNonDynamicTest = new AtomicReference<>();
Thread.currentThread().setContextClassLoader(tcl);
launcher.execute(testPlan, new TestExecutionListener() {
@Override
public void executionStarted(TestIdentifier testIdentifier) {
if (aborted) {
return;
}
boolean dynamic = dynamicIds.contains(UniqueId.parse(testIdentifier.getUniqueId()));
if (!dynamic) {
currentNonDynamicTest.set(testIdentifier);
}
startTimes.put(testIdentifier, System.currentTimeMillis());
String testClassName = "";
Class<?> testClass = getTestClassFromSource(testIdentifier.getSource());
if (testClass != null) {
testClassName = testClass.getName();
}
for (TestRunListener listener : listeners) {
listener.testStarted(testIdentifier, testClassName);
}
touchedClasses.push(Collections.synchronizedSet(new HashSet<>()));
}
@Override
public void executionSkipped(TestIdentifier testIdentifier, String reason) {
if (aborted) {
return;
}
touchedClasses.pop();
Class<?> testClass = getTestClassFromSource(testIdentifier.getSource());
String displayName = getDisplayNameFromIdentifier(testIdentifier, testClass);
UniqueId id = UniqueId.parse(testIdentifier.getUniqueId());
if (testClass != null) {
Map<UniqueId, TestResult> results = resultsByClass.computeIfAbsent(testClass.getName(),
s -> new HashMap<>());
TestResult result = new TestResult(displayName, testClass.getName(),
toTagList(testIdentifier),
id, TestExecutionResult.aborted(null),
logHandler.captureOutput(), testIdentifier.isTest(), runId, 0, true);
results.put(id, result);
if (result.isTest()) {
for (TestRunListener listener : listeners) {
listener.testComplete(result);
}
}
}
touchedClasses.push(Collections.synchronizedSet(new HashSet<>()));
}
@Override
public void dynamicTestRegistered(TestIdentifier testIdentifier) {
dynamicIds.add(UniqueId.parse(testIdentifier.getUniqueId()));
for (TestRunListener listener : listeners) {
listener.dynamicTestRegistered(testIdentifier);
}
}
@Override
public void executionFinished(TestIdentifier testIdentifier,
TestExecutionResult testExecutionResult) {
if (aborted) {
return;
}
boolean dynamic = dynamicIds.contains(UniqueId.parse(testIdentifier.getUniqueId()));
Set<String> touched = touchedClasses.pop();
Class<?> testClass = getTestClassFromSource(testIdentifier.getSource());
String displayName = getDisplayNameFromIdentifier(testIdentifier, testClass);
UniqueId id = UniqueId.parse(testIdentifier.getUniqueId());
if (testClass == null) {
return;
}
String testClassName = testClass.getName();
if (testExecutionResult.getStatus() != TestExecutionResult.Status.ABORTED) {
for (Set<String> i : touchedClasses) {
//also add the parent touched classes
touched.addAll(i);
}
if (startupClasses.get() != null) {
touched.addAll(startupClasses.get());
}
if (testIdentifier.getSource().map(ClassSource.class::isInstance).orElse(false)) {
testClassUsages.updateTestData(testClassName, touched);
} else {
testClassUsages.updateTestData(testClassName, id, touched);
}
}
Map<UniqueId, TestResult> results = resultsByClass.computeIfAbsent(testClassName,
s -> new HashMap<>());
TestResult result = new TestResult(displayName, testClassName,
toTagList(testIdentifier),
id, testExecutionResult,
logHandler.captureOutput(), testIdentifier.isTest(), runId,
System.currentTimeMillis() - startTimes.get(testIdentifier), true);
if (!results.containsKey(id)) {
//if a child has failed we may have already marked the parent failed
results.put(id, result);
}
if (result.isTest()) {
for (TestRunListener listener : listeners) {
listener.testComplete(result);
}
if (dynamic && testExecutionResult.getStatus() == TestExecutionResult.Status.FAILED) {
//if it is dynamic we fail the parent as well for re-runs
RuntimeException failure = new RuntimeException("A child test failed");
failure.setStackTrace(new StackTraceElement[0]);
results.put(id,
new TestResult(currentNonDynamicTest.get().getDisplayName(),
result.getTestClass(),
toTagList(testIdentifier),
currentNonDynamicTest.get().getUniqueIdObject(),
TestExecutionResult.failed(failure), List.of(), false, runId, 0,
false));
results.put(UniqueId.parse(currentNonDynamicTest.get().getUniqueId()), result);
} else if (testExecutionResult.getStatus() == TestExecutionResult.Status.FAILED) {
Throwable throwable = testExecutionResult.getThrowable().get();
trimStackTrace(testClass, throwable);
for (var i : throwable.getSuppressed()) {
trimStackTrace(testClass, i);
}
}
} else if (testExecutionResult.getStatus() == TestExecutionResult.Status.FAILED) {
//if a parent fails we fail the children
Set<TestIdentifier> children = testPlan.getChildren(testIdentifier);
for (TestIdentifier child : children) {
UniqueId childId = UniqueId.parse(child.getUniqueId());
result = new TestResult(child.getDisplayName(), testClassName,
toTagList(testIdentifier),
childId,
testExecutionResult,
logHandler.captureOutput(), child.isTest(), runId,
System.currentTimeMillis() - startTimes.get(testIdentifier), true);
results.put(childId, result);
if (child.isTest()) {
for (TestRunListener listener : listeners) {
listener.testStarted(child, testClassName);
listener.testComplete(result);
}
}
}
Throwable throwable = testExecutionResult.getThrowable().get();
trimStackTrace(testClass, throwable);
for (var i : throwable.getSuppressed()) {
trimStackTrace(testClass, i);
}
}
}
@Override
public void reportingEntryPublished(TestIdentifier testIdentifier, ReportEntry entry) {
}
});
if (aborted) {
return;
}
testState.updateResults(resultsByClass);
testState.pruneDeletedTests(allDiscoveredIds, dynamicIds);
if (classScanResult != null) {
testState.classesRemoved(classScanResult.getDeletedClassNames());
}
QuarkusConsole.removeOutputFilter(logHandler);
for (TestRunListener listener : listeners) {
listener.runComplete(new TestRunResults(runId, classScanResult, classScanResult == null, start,
System.currentTimeMillis(), toResultsMap(testState.getCurrentResults())));
}
} finally {
try {
TracingHandler.setTracingHandler(null);
QuarkusConsole.removeOutputFilter(logHandler);
Thread.currentThread().setContextClassLoader(old);
tcl.close();
try {
quarkusTestClasses.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
} finally {
Thread.currentThread().setContextClassLoader(origCl);
synchronized (JunitTestRunner.this) {
testsRunning = false;
if (aborted) {
JunitTestRunner.this.notifyAll();
}
}
}
}
}
};
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private static List<String> toTagList(TestIdentifier testIdentifier) {
return testIdentifier
.getTags()
.stream()
.map(TestTag::getName)
.sorted()
.toList();
}
private Class<?> getTestClassFromSource(Optional<TestSource> optionalTestSource) {
if (optionalTestSource.isPresent()) {
var testSource = optionalTestSource.get();
if (testSource instanceof ClassSource classSource) {
return classSource.getJavaClass();
} else if (testSource instanceof MethodSource methodSource) {
return methodSource.getJavaClass();
} else if (testSource.getClass().getName().equals(ARCHUNIT_FIELDSOURCE_FQCN)) {
try {
return (Class<?>) testSource.getClass().getMethod("getJavaClass").invoke(testSource);
} catch (ReflectiveOperationException e) {
log.warnf(e, "Failed to read javaClass reflectively from %s. ArchUnit >= 0.23.0 is required.", testSource);
}
}
}
return null;
}
private String getDisplayNameFromIdentifier(TestIdentifier testIdentifier, Class<?> testClass) {
if (testIdentifier.getSource().isPresent() && testClass != null) {
var testSource = testIdentifier.getSource().get();
if (testSource instanceof ClassSource) {
return testIdentifier.getDisplayName();
} else if (testSource instanceof MethodSource
|| testSource.getClass().getName().equals(ARCHUNIT_FIELDSOURCE_FQCN)) {
return testClass.getSimpleName() + "#" + testIdentifier.getDisplayName();
}
}
return testIdentifier.getDisplayName();
}
private void trimStackTrace(Class<?> testClass, Throwable throwable) {
if (testClass != null) {
//first we cut all the platform stuff out of the stack trace
Throwable cause = throwable;
while (cause != null) {
StackTraceElement[] st = cause.getStackTrace();
for (int i = st.length - 1; i >= 0; --i) {
StackTraceElement elem = st[i];
if (elem.getClassName().equals(testClass.getName())) {
StackTraceElement[] newst = new StackTraceElement[i + 1];
System.arraycopy(st, 0, newst, 0, i + 1);
st = newst;
break;
}
}
//now cut out all the restassured internals
//TODO: this should be pluggable
for (int i = st.length - 1; i >= 0; --i) {
StackTraceElement elem = st[i];
if (elem.getClassName().startsWith("io.restassured")) {
StackTraceElement[] newst = new StackTraceElement[st.length - i];
System.arraycopy(st, i, newst, 0, st.length - i);
st = newst;
break;
}
}
cause.setStackTrace(st);
cause = cause.getCause();
}
}
}
public synchronized void abort() {
for (TestRunListener listener : listeners) {
try {
listener.runAborted();
} catch (Throwable t) {
log.error("Failed to invoke test listener", t);
}
}
aborted = true;
while (testsRunning) {
try {
wait();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
private Map<String, TestClassResult> toResultsMap(
Map<String, Map<UniqueId, TestResult>> resultsByClass) {
Map<String, TestClassResult> resultMap = new HashMap<>();
Set<String> classes = new HashSet<>(resultsByClass.keySet());
for (String clazz : classes) {
List<TestResult> passing = new ArrayList<>();
List<TestResult> failing = new ArrayList<>();
List<TestResult> skipped = new ArrayList<>();
long time = 0;
for (TestResult i : Optional.ofNullable(resultsByClass.get(clazz)).orElse(Collections.emptyMap()).values()) {
if (i.getTestExecutionResult().getStatus() == TestExecutionResult.Status.FAILED) {
failing.add(i);
} else if (i.getTestExecutionResult().getStatus() == TestExecutionResult.Status.ABORTED) {
skipped.add(i);
} else {
passing.add(i);
}
if (i.getUniqueId().getLastSegment().getType().equals("class")) {
time = i.time;
}
}
resultMap.put(clazz, new TestClassResult(clazz, passing, failing, skipped, time));
}
return resultMap;
}
private DiscoveryResult discoverTestClasses() {
//maven has a lot of rules around this and is configurable
//for now this is out of scope, we are just going to do annotation based discovery
//we will need to fix this sooner rather than later though
// Set the system property that is used for QuarkusComponentTest
System.setProperty(TEST_DISCOVERY_PROPERTY, "true");
if (moduleInfo.getTest().isEmpty()) {
return DiscoveryResult.EMPTY;
}
//we also only run tests from the current module, which we can also revisit later
Indexer indexer = new Indexer();
try (Stream<Path> files = Files.walk(Paths.get(moduleInfo.getTest().get().getClassesPath()))) {
files.filter(s -> s.getFileName().toString().endsWith(".class")).forEach(s -> {
try (InputStream in = Files.newInputStream(s)) {
indexer.index(in);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
} catch (IOException e) {
throw new RuntimeException(e);
}
Index index = indexer.complete();
//we now have all the classes by name
//these tests we never run
Set<String> integrationTestClasses = new HashSet<>();
for (AnnotationInstance i : index.getAnnotations(QUARKUS_INTEGRATION_TEST)) {
DotName name = i.target().asClass().name();
integrationTestClasses.add(name.toString());
for (ClassInfo clazz : index.getAllKnownSubclasses(name)) {
integrationTestClasses.add(clazz.name().toString());
}
}
Set<String> quarkusTestClasses = new HashSet<>();
for (var a : Arrays.asList(QUARKUS_TEST, QUARKUS_MAIN_TEST)) {
for (AnnotationInstance i : index.getAnnotations(a)) {
DotName name = i.target()
.asClass()
.name();
quarkusTestClasses.add(name.toString());
for (ClassInfo clazz : index.getAllKnownSubclasses(name)) {
if (!integrationTestClasses.contains(clazz.name().toString())) {
quarkusTestClasses.add(clazz.name().toString());
}
}
}
}
Set<String> quarkusComponentTestClasses = new HashSet<>();
for (AnnotationInstance a : index.getAnnotations(QUARKUS_COMPONENT_TEST)) {
DotName name = a.target().asClass().name();
quarkusComponentTestClasses.add(name.toString());
for (ClassInfo subclass : index.getAllKnownSubclasses(name)) {
quarkusComponentTestClasses.add(subclass.name().toString());
}
}
for (ClassInfo clazz : index.getKnownUsers(QUARKUS_COMPONENT_TEST_EXTENSION)) {
DotName name = clazz.name();
quarkusComponentTestClasses.add(name.toString());
for (ClassInfo subclass : index.getAllKnownSubclasses(name)) {
quarkusComponentTestClasses.add(subclass.name().toString());
}
}
// The FacadeClassLoader approach of loading test classes with the classloader we will use to run them can only work for `@QuarkusTest` and not main or integration tests
// Most logic in the JUnitRunner counts main tests as quarkus tests, so do a (mildly irritating) special pass to get the ones which are strictly @QuarkusTest
Set<String> quarkusTestClassesForFacadeClassLoader = new HashSet<>();
for (AnnotationInstance i : index.getAnnotations(QUARKUS_TEST)) {
DotName name = i.target()
.asClass()
.name();
quarkusTestClassesForFacadeClassLoader.add(name.toString());
for (ClassInfo clazz : index.getAllKnownSubclasses(name)) {
if (!integrationTestClasses.contains(clazz.name()
.toString())) {
quarkusTestClassesForFacadeClassLoader.add(clazz.name()
.toString());
}
}
}
Map<String, String> profiles = new HashMap<>();
for (AnnotationInstance i : index.getAnnotations(TEST_PROFILE)) {
DotName name = i.target()
.asClass()
.name();
// We could do the value as a class, but it wouldn't be in the right classloader
profiles.put(name.toString(), i.value().asString());
}
Set<DotName> allTestAnnotations = collectTestAnnotations(index);
// Order matters here for nested tests
// We assume we have evaluated the parent of a | private |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/server/reactive/JettyCoreServerHttpResponse.java | {
"start": 1890,
"end": 5251
} | class ____ extends AbstractServerHttpResponse implements ZeroCopyHttpOutputMessage {
private final Response response;
public JettyCoreServerHttpResponse(Response response, JettyDataBufferFactory dataBufferFactory) {
super(dataBufferFactory, new HttpHeaders(new JettyHeadersAdapter(response.getHeaders())));
this.response = response;
// Remove all existing cookies from the response and add them to the cookie map, to be added back later
for (ListIterator<HttpField> it = this.response.getHeaders().listIterator(); it.hasNext();) {
HttpField field = it.next();
if (field instanceof HttpCookieUtils.SetCookieHttpField setCookieHttpField) {
HttpCookie httpCookie = setCookieHttpField.getHttpCookie();
ResponseCookie responseCookie = ResponseCookie.from(httpCookie.getName(), httpCookie.getValue())
.httpOnly(httpCookie.isHttpOnly())
.domain(httpCookie.getDomain())
.maxAge(httpCookie.getMaxAge())
.sameSite(httpCookie.getSameSite().name())
.secure(httpCookie.isSecure())
.partitioned(httpCookie.isPartitioned())
.build();
addCookie(responseCookie);
it.remove();
}
}
}
@Override
protected Mono<Void> writeWithInternal(Publisher<? extends DataBuffer> body) {
return Flux.from(body)
.concatMap(this::sendDataBuffer)
.then();
}
@Override
protected Mono<Void> writeAndFlushWithInternal(Publisher<? extends Publisher<? extends DataBuffer>> body) {
return Flux.from(body).concatMap(this::writeWithInternal).then();
}
@Override
protected void applyStatusCode() {
HttpStatusCode status = getStatusCode();
if (status != null){
this.response.setStatus(status.value());
}
}
@Override
protected void applyHeaders() {
}
@Override
protected void applyCookies() {
getCookies().values().stream()
.flatMap(List::stream)
.forEach(cookie -> Response.addCookie(this.response, new ResponseHttpCookie(cookie)));
}
@Override
public Mono<Void> writeWith(Path file, long position, long count) {
Callback.Completable callback = new Callback.Completable();
Mono<Void> mono = Mono.fromFuture(callback);
try {
Content.copy(Content.Source.from(null, file, position, count), this.response, callback);
}
catch (Throwable th) {
callback.failed(th);
}
return doCommit(() -> mono);
}
private Mono<Void> sendDataBuffer(DataBuffer dataBuffer) {
return Mono.defer(() -> {
DataBuffer.ByteBufferIterator byteBufferIterator = dataBuffer.readableByteBuffers();
Callback.Completable callback = new Callback.Completable();
new IteratingCallback() {
@Override
protected Action process() {
if (!byteBufferIterator.hasNext()) {
return Action.SUCCEEDED;
}
response.write(false, byteBufferIterator.next(), this);
return Action.SCHEDULED;
}
@Override
protected void onCompleteSuccess() {
byteBufferIterator.close();
DataBufferUtils.release(dataBuffer);
callback.complete(null);
}
@Override
protected void onCompleteFailure(Throwable cause) {
byteBufferIterator.close();
DataBufferUtils.release(dataBuffer);
callback.failed(cause);
}
}.iterate();
return Mono.fromFuture(callback);
});
}
@SuppressWarnings("unchecked")
@Override
public <T> T getNativeResponse() {
return (T) this.response;
}
private static | JettyCoreServerHttpResponse |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/saga/NotSupportedSagaProcessor.java | {
"start": 1165,
"end": 2450
} | class ____ extends SagaProcessor {
public NotSupportedSagaProcessor(CamelContext camelContext, Processor childProcessor, CamelSagaService sagaService,
SagaCompletionMode completionMode, CamelSagaStep step) {
super(camelContext, childProcessor, sagaService, completionMode, step);
if (!step.isEmpty()) {
throw new IllegalArgumentException("Saga configuration is not allowed when propagation is set to NOT_SUPPORTED");
}
if (completionMode != null && completionMode != SagaCompletionMode.defaultCompletionMode()) {
throw new IllegalArgumentException("CompletionMode cannot be specified when propagation is NOT_SUPPORTED");
}
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
getCurrentSagaCoordinator(exchange).whenComplete((coordinator, ex) -> ifNotException(ex, exchange, callback, () -> {
setCurrentSagaCoordinator(exchange, null);
super.process(exchange, doneSync -> {
// Restore existing coordinator
setCurrentSagaCoordinator(exchange, coordinator);
callback.done(false);
});
}));
return false;
}
}
| NotSupportedSagaProcessor |
java | apache__kafka | connect/api/src/test/java/org/apache/kafka/connect/data/SchemaBuilderTest.java | {
"start": 1228,
"end": 14940
} | class ____ {
private static final String NAME = "name";
private static final Integer VERSION = 2;
private static final String DOC = "doc";
private static final Map<String, String> NO_PARAMS = null;
@Test
public void testInt8Builder() {
Schema schema = SchemaBuilder.int8().build();
assertTypeAndDefault(schema, Schema.Type.INT8, false, null);
assertNoMetadata(schema);
schema = SchemaBuilder.int8().name(NAME).optional().defaultValue((byte) 12)
.version(VERSION).doc(DOC).build();
assertTypeAndDefault(schema, Schema.Type.INT8, true, (byte) 12);
assertMetadata(schema, NAME, VERSION, DOC, NO_PARAMS);
}
@Test
public void testInt8BuilderInvalidDefault() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.int8().defaultValue("invalid"));
}
@Test
public void testInt16Builder() {
Schema schema = SchemaBuilder.int16().build();
assertTypeAndDefault(schema, Schema.Type.INT16, false, null);
assertNoMetadata(schema);
schema = SchemaBuilder.int16().name(NAME).optional().defaultValue((short) 12)
.version(VERSION).doc(DOC).build();
assertTypeAndDefault(schema, Schema.Type.INT16, true, (short) 12);
assertMetadata(schema, NAME, VERSION, DOC, NO_PARAMS);
}
@Test
public void testInt16BuilderInvalidDefault() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.int16().defaultValue("invalid"));
}
@Test
public void testInt32Builder() {
Schema schema = SchemaBuilder.int32().build();
assertTypeAndDefault(schema, Schema.Type.INT32, false, null);
assertNoMetadata(schema);
schema = SchemaBuilder.int32().name(NAME).optional().defaultValue(12)
.version(VERSION).doc(DOC).build();
assertTypeAndDefault(schema, Schema.Type.INT32, true, 12);
assertMetadata(schema, NAME, VERSION, DOC, NO_PARAMS);
}
@Test
public void testInt32BuilderInvalidDefault() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.int32().defaultValue("invalid"));
}
@Test
public void testInt64Builder() {
Schema schema = SchemaBuilder.int64().build();
assertTypeAndDefault(schema, Schema.Type.INT64, false, null);
assertNoMetadata(schema);
schema = SchemaBuilder.int64().name(NAME).optional().defaultValue((long) 12)
.version(VERSION).doc(DOC).build();
assertTypeAndDefault(schema, Schema.Type.INT64, true, (long) 12);
assertMetadata(schema, NAME, VERSION, DOC, NO_PARAMS);
}
@Test
public void testInt64BuilderInvalidDefault() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.int64().defaultValue("invalid"));
}
@Test
public void testFloatBuilder() {
Schema schema = SchemaBuilder.float32().build();
assertTypeAndDefault(schema, Schema.Type.FLOAT32, false, null);
assertNoMetadata(schema);
schema = SchemaBuilder.float32().name(NAME).optional().defaultValue(12.f)
.version(VERSION).doc(DOC).build();
assertTypeAndDefault(schema, Schema.Type.FLOAT32, true, 12.f);
assertMetadata(schema, NAME, VERSION, DOC, NO_PARAMS);
}
@Test
public void testFloatBuilderInvalidDefault() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.float32().defaultValue("invalid"));
}
@Test
public void testDoubleBuilder() {
Schema schema = SchemaBuilder.float64().build();
assertTypeAndDefault(schema, Schema.Type.FLOAT64, false, null);
assertNoMetadata(schema);
schema = SchemaBuilder.float64().name(NAME).optional().defaultValue(12.0)
.version(VERSION).doc(DOC).build();
assertTypeAndDefault(schema, Schema.Type.FLOAT64, true, 12.0);
assertMetadata(schema, NAME, VERSION, DOC, NO_PARAMS);
}
@Test
public void testDoubleBuilderInvalidDefault() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.float64().defaultValue("invalid"));
}
@Test
public void testBooleanBuilder() {
Schema schema = SchemaBuilder.bool().build();
assertTypeAndDefault(schema, Schema.Type.BOOLEAN, false, null);
assertNoMetadata(schema);
schema = SchemaBuilder.bool().name(NAME).optional().defaultValue(true)
.version(VERSION).doc(DOC).build();
assertTypeAndDefault(schema, Schema.Type.BOOLEAN, true, true);
assertMetadata(schema, NAME, VERSION, DOC, NO_PARAMS);
}
@Test
public void testBooleanBuilderInvalidDefault() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.bool().defaultValue("invalid"));
}
@Test
public void testStringBuilder() {
Schema schema = SchemaBuilder.string().build();
assertTypeAndDefault(schema, Schema.Type.STRING, false, null);
assertNoMetadata(schema);
schema = SchemaBuilder.string().name(NAME).optional().defaultValue("a default string")
.version(VERSION).doc(DOC).build();
assertTypeAndDefault(schema, Schema.Type.STRING, true, "a default string");
assertMetadata(schema, NAME, VERSION, DOC, NO_PARAMS);
}
@Test
public void testStringBuilderInvalidDefault() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.string().defaultValue(true));
}
@Test
public void testBytesBuilder() {
Schema schema = SchemaBuilder.bytes().build();
assertTypeAndDefault(schema, Schema.Type.BYTES, false, null);
assertNoMetadata(schema);
schema = SchemaBuilder.bytes().name(NAME).optional().defaultValue("a default byte array".getBytes())
.version(VERSION).doc(DOC).build();
assertTypeAndDefault(schema, Schema.Type.BYTES, true, "a default byte array".getBytes());
assertMetadata(schema, NAME, VERSION, DOC, NO_PARAMS);
}
@Test
public void testBytesBuilderInvalidDefault() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.bytes().defaultValue("a string, not bytes"));
}
@Test
public void testParameters() {
Map<String, String> expectedParameters = new HashMap<>();
expectedParameters.put("foo", "val");
expectedParameters.put("bar", "baz");
Schema schema = SchemaBuilder.string().parameter("foo", "val").parameter("bar", "baz").build();
assertTypeAndDefault(schema, Schema.Type.STRING, false, null);
assertMetadata(schema, null, null, null, expectedParameters);
schema = SchemaBuilder.string().parameters(expectedParameters).build();
assertTypeAndDefault(schema, Schema.Type.STRING, false, null);
assertMetadata(schema, null, null, null, expectedParameters);
}
@Test
public void testStructBuilder() {
Schema schema = SchemaBuilder.struct()
.field("field1", Schema.INT8_SCHEMA)
.field("field2", Schema.INT8_SCHEMA)
.build();
assertTypeAndDefault(schema, Schema.Type.STRUCT, false, null);
assertEquals(2, schema.fields().size());
assertEquals("field1", schema.fields().get(0).name());
assertEquals(0, schema.fields().get(0).index());
assertEquals(Schema.INT8_SCHEMA, schema.fields().get(0).schema());
assertEquals("field2", schema.fields().get(1).name());
assertEquals(1, schema.fields().get(1).index());
assertEquals(Schema.INT8_SCHEMA, schema.fields().get(1).schema());
assertNoMetadata(schema);
}
@Test
public void testNonStructCantHaveFields() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.int8().field("field", SchemaBuilder.int8().build()));
}
@Test
public void testArrayBuilder() {
Schema schema = SchemaBuilder.array(Schema.INT8_SCHEMA).build();
assertTypeAndDefault(schema, Schema.Type.ARRAY, false, null);
assertEquals(Schema.INT8_SCHEMA, schema.valueSchema());
assertNoMetadata(schema);
// Default value
List<Byte> defArray = List.of((byte) 1, (byte) 2);
schema = SchemaBuilder.array(Schema.INT8_SCHEMA).defaultValue(defArray).build();
assertTypeAndDefault(schema, Schema.Type.ARRAY, false, defArray);
assertEquals(Schema.INT8_SCHEMA, schema.valueSchema());
assertNoMetadata(schema);
}
@Test
public void testArrayBuilderInvalidDefault() {
// Array, but wrong embedded type
assertThrows(SchemaBuilderException.class,
() -> SchemaBuilder.array(Schema.INT8_SCHEMA).defaultValue(List.of("string")).build());
}
@Test
public void testMapBuilder() {
// SchemaBuilder should also pass the check
Schema schema = SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA);
assertTypeAndDefault(schema, Schema.Type.MAP, false, null);
assertEquals(Schema.INT8_SCHEMA, schema.keySchema());
assertEquals(Schema.INT8_SCHEMA, schema.valueSchema());
assertNoMetadata(schema);
schema = SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA).build();
assertTypeAndDefault(schema, Schema.Type.MAP, false, null);
assertEquals(Schema.INT8_SCHEMA, schema.keySchema());
assertEquals(Schema.INT8_SCHEMA, schema.valueSchema());
assertNoMetadata(schema);
// Default value
Map<Byte, Byte> defMap = Map.of((byte) 5, (byte) 10);
schema = SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA)
.defaultValue(defMap).build();
assertTypeAndDefault(schema, Schema.Type.MAP, false, defMap);
assertEquals(Schema.INT8_SCHEMA, schema.keySchema());
assertEquals(Schema.INT8_SCHEMA, schema.valueSchema());
assertNoMetadata(schema);
}
@Test
public void testMapBuilderInvalidDefault() {
// Map, but wrong embedded type
Map<Byte, String> defMap = Map.of((byte) 5, "foo");
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA)
.defaultValue(defMap).build());
}
@Test
public void testEmptyStruct() {
final SchemaBuilder emptyStructSchemaBuilder = SchemaBuilder.struct();
assertEquals(0, emptyStructSchemaBuilder.fields().size());
new Struct(emptyStructSchemaBuilder);
final Schema emptyStructSchema = emptyStructSchemaBuilder.build();
assertEquals(0, emptyStructSchema.fields().size());
new Struct(emptyStructSchema);
}
@Test
public void testDuplicateFields() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.struct()
.name("testing")
.field("id", SchemaBuilder.string().doc("").build())
.field("id", SchemaBuilder.string().doc("").build())
.build());
}
@Test
public void testDefaultFieldsSameValueOverwriting() {
final SchemaBuilder schemaBuilder = SchemaBuilder.string().name("testing").version(123);
schemaBuilder.name("testing");
schemaBuilder.version(123);
assertEquals("testing", schemaBuilder.name());
}
@Test
public void testDefaultFieldsDifferentValueOverwriting() {
final SchemaBuilder schemaBuilder = SchemaBuilder.string().name("testing").version(123);
schemaBuilder.name("testing");
assertThrows(SchemaBuilderException.class, () -> schemaBuilder.version(456));
}
@Test
public void testFieldNameNull() {
assertThrows(SchemaBuilderException.class,
() -> SchemaBuilder.struct().field(null, Schema.STRING_SCHEMA).build());
}
@Test
public void testFieldSchemaNull() {
assertThrows(SchemaBuilderException.class,
() -> SchemaBuilder.struct().field("fieldName", null).build());
}
@Test
public void testArraySchemaNull() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.array(null).build());
}
@Test
public void testMapKeySchemaNull() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.map(null, Schema.STRING_SCHEMA).build());
}
@Test
public void testMapValueSchemaNull() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.map(Schema.STRING_SCHEMA, null).build());
}
@Test
public void testTypeNotNull() {
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.type(null));
}
private void assertTypeAndDefault(Schema schema, Schema.Type type, boolean optional, Object defaultValue) {
assertEquals(type, schema.type());
assertEquals(optional, schema.isOptional());
if (type == Schema.Type.BYTES) {
// byte[] is not comparable, need to wrap to check correctly
if (defaultValue == null)
assertNull(schema.defaultValue());
else
assertEquals(ByteBuffer.wrap((byte[]) defaultValue), ByteBuffer.wrap((byte[]) schema.defaultValue()));
} else {
assertEquals(defaultValue, schema.defaultValue());
}
}
private void assertMetadata(Schema schema, String name, Integer version, String doc, Map<String, String> parameters) {
assertEquals(name, schema.name());
assertEquals(version, schema.version());
assertEquals(doc, schema.doc());
assertEquals(parameters, schema.parameters());
}
private void assertNoMetadata(Schema schema) {
assertMetadata(schema, null, null, null, null);
}
}
| SchemaBuilderTest |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/lifecycle/beancreationeventlistener/OffendingMethodListener.java | {
"start": 292,
"end": 711
} | class ____ implements BeanCreatedEventListener<B> {
static boolean initialized;
static boolean executed;
OffendingMethodListener() {
initialized = true;
}
@Inject
void setA(A a) {}
@Inject
void setC(Provider<C> cProvider) {}
@Override
public B onCreated(BeanCreatedEvent<B> event) {
executed = true;
return event.getBean();
}
}
| OffendingMethodListener |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/ValueSourceReaderTypeConversionTests.java | {
"start": 92310,
"end": 92667
} | class ____ extends TestBytesRefToBytesRefConverter {
TestStringToIPConverter(DriverContext driverContext) {
super(driverContext);
}
@Override
BytesRef convertByteRef(BytesRef bytesRef) {
return StringUtils.parseIP(bytesRef.utf8ToString());
}
}
/**
* Utility | TestStringToIPConverter |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java | {
"start": 2458,
"end": 8397
} | class ____ {
public static final Logger LOG =
LoggerFactory.getLogger(LocalDistributedCacheManager.class);
private List<String> localArchives = new ArrayList<String>();
private List<String> localFiles = new ArrayList<String>();
private List<String> localClasspaths = new ArrayList<String>();
private List<File> symlinksCreated = new ArrayList<File>();
private URLClassLoader classLoaderCreated = null;
private boolean setupCalled = false;
/**
* Set up the distributed cache by localizing the resources, and updating
* the configuration with references to the localized resources.
* @param conf
* @throws IOException
*/
public synchronized void setup(JobConf conf, JobID jobId) throws IOException {
File workDir = new File(System.getProperty("user.dir"));
// Generate YARN local resources objects corresponding to the distributed
// cache configuration
Map<String, LocalResource> localResources =
new LinkedHashMap<String, LocalResource>();
MRApps.setupDistributedCache(conf, localResources);
// Generating unique numbers for FSDownload.
// Find which resources are to be put on the local classpath
Map<String, Path> classpaths = new HashMap<String, Path>();
Path[] archiveClassPaths = JobContextImpl.getArchiveClassPaths(conf);
if (archiveClassPaths != null) {
for (Path p : archiveClassPaths) {
classpaths.put(p.toUri().getPath().toString(), p);
}
}
Path[] fileClassPaths = JobContextImpl.getFileClassPaths(conf);
if (fileClassPaths != null) {
for (Path p : fileClassPaths) {
classpaths.put(p.toUri().getPath().toString(), p);
}
}
// Localize the resources
LocalDirAllocator localDirAllocator =
new LocalDirAllocator(MRConfig.LOCAL_DIR);
FileContext localFSFileContext = FileContext.getLocalFSFileContext();
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
ExecutorService exec = null;
try {
ThreadFactory tf = new ThreadFactoryBuilder()
.setNameFormat("LocalDistributedCacheManager Downloader #%d")
.build();
exec = HadoopExecutors.newCachedThreadPool(tf);
Path destPath = localDirAllocator.getLocalPathForWrite(".", conf);
Map<LocalResource, Future<Path>> resourcesToPaths = Maps.newHashMap();
for (LocalResource resource : localResources.values()) {
Path destPathForDownload = new Path(destPath,
jobId.toString() + "_" + UUID.randomUUID().toString());
Callable<Path> download =
new FSDownload(localFSFileContext, ugi, conf, destPathForDownload,
resource);
Future<Path> future = exec.submit(download);
resourcesToPaths.put(resource, future);
}
for (Entry<String, LocalResource> entry : localResources.entrySet()) {
LocalResource resource = entry.getValue();
Path path;
try {
path = resourcesToPaths.get(resource).get();
} catch (InterruptedException e) {
throw new IOException(e);
} catch (ExecutionException e) {
throw new IOException(e);
}
String pathString = path.toUri().toString();
String link = entry.getKey();
String target = new File(path.toUri()).getPath();
symlink(workDir, target, link);
if (resource.getType() == LocalResourceType.ARCHIVE) {
localArchives.add(pathString);
} else if (resource.getType() == LocalResourceType.FILE) {
localFiles.add(pathString);
} else if (resource.getType() == LocalResourceType.PATTERN) {
//PATTERN is not currently used in local mode
throw new IllegalArgumentException("Resource type PATTERN is not " +
"implemented yet. " + resource.getResource());
}
Path resourcePath;
try {
resourcePath = resource.getResource().toPath();
} catch (URISyntaxException e) {
throw new IOException(e);
}
LOG.info(String.format("Localized %s as %s", resourcePath, path));
String cp = resourcePath.toUri().getPath();
if (classpaths.keySet().contains(cp)) {
localClasspaths.add(path.toUri().getPath().toString());
}
}
} finally {
if (exec != null) {
exec.shutdown();
}
}
// Update the configuration object with localized data.
if (!localArchives.isEmpty()) {
conf.set(MRJobConfig.CACHE_LOCALARCHIVES, StringUtils
.arrayToString(localArchives.toArray(new String[localArchives
.size()])));
}
if (!localFiles.isEmpty()) {
conf.set(MRJobConfig.CACHE_LOCALFILES, StringUtils
.arrayToString(localFiles.toArray(new String[localArchives
.size()])));
}
setupCalled = true;
}
/**
* Utility method for creating a symlink and warning on errors.
*
* If link is null, does nothing.
*/
private void symlink(File workDir, String target, String link)
throws IOException {
if (link != null) {
link = workDir.toString() + Path.SEPARATOR + link;
File flink = new File(link);
if (!flink.exists()) {
LOG.info(String.format("Creating symlink: %s <- %s", target, link));
if (0 != FileUtil.symLink(target, link)) {
LOG.warn(String.format("Failed to create symlink: %s <- %s", target,
link));
} else {
symlinksCreated.add(new File(link));
}
}
}
}
/**
* Are the resources that should be added to the classpath?
* Should be called after setup().
*
*/
public synchronized boolean hasLocalClasspaths() {
if (!setupCalled) {
throw new IllegalStateException(
"hasLocalClasspaths() should be called after setup()");
}
return !localClasspaths.isEmpty();
}
/**
* Creates a | LocalDistributedCacheManager |
java | apache__camel | test-infra/camel-test-infra-azure-common/src/main/java/org/apache/camel/test/infra/azure/common/services/AzureInfraService.java | {
"start": 1018,
"end": 1389
} | interface ____ extends InfrastructureService {
/**
* Gets the credentials for the test service
*
* @return
*/
AzureCredentialsHolder azureCredentials();
String accountName();
String accessKey();
String host();
int port();
default String credentialType() {
return "SHARED_KEY_CREDENTIAL";
}
}
| AzureInfraService |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/interceptor/RollbackRuleAttributeTests.java | {
"start": 5266,
"end": 5456
} | class ____ extends RuntimeException {
}
// Name intentionally starts with MyException (including package) but does
// NOT extend MyException.
@SuppressWarnings("serial")
static | MyException |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/orphan/onetomany/embedded/OneToManyInEmbeddedTest.java | {
"start": 2858,
"end": 3059
} | class ____ {
@Id
private int id;
public int getId() {
return id;
}
public ChildEntity() {
}
public ChildEntity(int id) {
this.id = id;
}
}
@Embeddable
public static | ChildEntity |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationprocessor/EndpointMetadataGenerationTests.java | {
"start": 2296,
"end": 10534
} | class ____ extends AbstractMetadataGenerationTests {
@Test
void simpleEndpoint() {
ConfigurationMetadata metadata = compile(SimpleEndpoint.class);
assertThat(metadata).has(Metadata.withGroup("management.endpoint.simple").fromSource(SimpleEndpoint.class));
assertThat(metadata).has(access("simple", TestAccess.UNRESTRICTED));
assertThat(metadata).has(cacheTtl("simple"));
assertThat(metadata.getItems()).hasSize(3);
}
@Test
void enabledEndpoint() {
ConfigurationMetadata metadata = compile(EnabledEndpoint.class);
assertThat(metadata).has(Metadata.withGroup("management.endpoint.enabled").fromSource(EnabledEndpoint.class));
assertThat(metadata).has(access("enabled", TestAccess.UNRESTRICTED));
assertThat(metadata.getItems()).hasSize(2);
}
@Test
void noAccessEndpoint() {
ConfigurationMetadata metadata = compile(NoAccessEndpoint.class);
assertThat(metadata).has(Metadata.withGroup("management.endpoint.noaccess").fromSource(NoAccessEndpoint.class));
assertThat(metadata).has(access("noaccess", TestAccess.NONE));
assertThat(metadata.getItems()).hasSize(2);
}
@Test
void readOnlyAccessEndpoint() {
ConfigurationMetadata metadata = compile(ReadOnlyAccessEndpoint.class);
assertThat(metadata)
.has(Metadata.withGroup("management.endpoint.readonlyaccess").fromSource(ReadOnlyAccessEndpoint.class));
assertThat(metadata).has(access("readonlyaccess", TestAccess.READ_ONLY));
assertThat(metadata.getItems()).hasSize(2);
}
@Test
void unrestrictedAccessEndpoint() {
ConfigurationMetadata metadata = compile(UnrestrictedAccessEndpoint.class);
assertThat(metadata).has(Metadata.withGroup("management.endpoint.unrestrictedaccess")
.fromSource(UnrestrictedAccessEndpoint.class));
assertThat(metadata).has(access("unrestrictedaccess", TestAccess.UNRESTRICTED));
assertThat(metadata.getItems()).hasSize(2);
}
@Test
void customPropertiesEndpoint() {
ConfigurationMetadata metadata = compile(CustomPropertiesEndpoint.class);
assertThat(metadata)
.has(Metadata.withGroup("management.endpoint.customprops").fromSource(CustomPropertiesEndpoint.class));
assertThat(metadata).has(Metadata.withProperty("management.endpoint.customprops.name")
.ofType(String.class)
.withDefaultValue("test"));
assertThat(metadata).has(access("customprops", TestAccess.UNRESTRICTED));
assertThat(metadata).has(cacheTtl("customprops"));
assertThat(metadata.getItems()).hasSize(4);
}
@Test
void specificEndpoint() {
ConfigurationMetadata metadata = compile(SpecificEndpoint.class);
assertThat(metadata).has(Metadata.withGroup("management.endpoint.specific").fromSource(SpecificEndpoint.class));
assertThat(metadata).has(access("specific", TestAccess.READ_ONLY));
assertThat(metadata).has(cacheTtl("specific"));
assertThat(metadata.getItems()).hasSize(3);
}
@Test
void camelCaseEndpoint() {
ConfigurationMetadata metadata = compile(CamelCaseEndpoint.class);
assertThat(metadata)
.has(Metadata.withGroup("management.endpoint.pascal-case").fromSource(CamelCaseEndpoint.class));
assertThat(metadata).has(defaultAccess("PascalCase", "pascal-case", TestAccess.UNRESTRICTED));
assertThat(metadata.getItems()).hasSize(2);
}
@Test
void incrementalEndpointBuildChangeDefaultAccess() {
TestProject project = new TestProject(IncrementalEndpoint.class);
ConfigurationMetadata metadata = project.compile();
assertThat(metadata)
.has(Metadata.withGroup("management.endpoint.incremental").fromSource(IncrementalEndpoint.class));
assertThat(metadata).has(access("incremental", TestAccess.UNRESTRICTED));
assertThat(metadata).has(cacheTtl("incremental"));
assertThat(metadata.getItems()).hasSize(3);
project.replaceText(IncrementalEndpoint.class, "id = \"incremental\"",
"id = \"incremental\", defaultAccess = org.springframework.boot.configurationsample.TestAccess.NONE");
metadata = project.compile();
assertThat(metadata)
.has(Metadata.withGroup("management.endpoint.incremental").fromSource(IncrementalEndpoint.class));
assertThat(metadata).has(access("incremental", TestAccess.NONE));
assertThat(metadata).has(cacheTtl("incremental"));
assertThat(metadata.getItems()).hasSize(3);
}
@Test
void incrementalEndpointBuildChangeCacheFlag() {
TestProject project = new TestProject(IncrementalEndpoint.class);
ConfigurationMetadata metadata = project.compile();
assertThat(metadata)
.has(Metadata.withGroup("management.endpoint.incremental").fromSource(IncrementalEndpoint.class));
assertThat(metadata).has(access("incremental", TestAccess.UNRESTRICTED));
assertThat(metadata).has(cacheTtl("incremental"));
assertThat(metadata.getItems()).hasSize(3);
project.replaceText(IncrementalEndpoint.class, "@Nullable String param", "String param");
metadata = project.compile();
assertThat(metadata)
.has(Metadata.withGroup("management.endpoint.incremental").fromSource(IncrementalEndpoint.class));
assertThat(metadata).has(access("incremental", TestAccess.UNRESTRICTED));
assertThat(metadata.getItems()).hasSize(2);
}
@Test
void incrementalEndpointBuildChangeAccessOfSpecificEndpoint() {
TestProject project = new TestProject(SpecificEndpoint.class);
ConfigurationMetadata metadata = project.compile();
assertThat(metadata).has(Metadata.withGroup("management.endpoint.specific").fromSource(SpecificEndpoint.class));
assertThat(metadata).has(access("specific", TestAccess.READ_ONLY));
assertThat(metadata).has(cacheTtl("specific"));
assertThat(metadata.getItems()).hasSize(3);
project.replaceText(SpecificEndpoint.class, "defaultAccess = TestAccess.READ_ONLY",
"defaultAccess = TestAccess.NONE");
metadata = project.compile();
assertThat(metadata).has(Metadata.withGroup("management.endpoint.specific").fromSource(SpecificEndpoint.class));
assertThat(metadata).has(access("specific", TestAccess.NONE));
assertThat(metadata).has(cacheTtl("specific"));
assertThat(metadata.getItems()).hasSize(3);
}
@Test
void shouldTolerateEndpointWithSameId() {
ConfigurationMetadata metadata = compile(SimpleEndpoint.class, SimpleEndpoint2.class);
assertThat(metadata).has(Metadata.withGroup("management.endpoint.simple").fromSource(SimpleEndpoint.class));
assertThat(metadata).has(defaultAccess("simple", "simple", TestAccess.UNRESTRICTED));
assertThat(metadata).has(cacheTtl("simple"));
assertThat(metadata.getItems()).hasSize(3);
}
@Test
void shouldFailIfEndpointWithSameIdButWithConflictingEnabledByDefaultSetting() {
assertThatRuntimeException().isThrownBy(() -> compile(SimpleEndpoint.class, SimpleEndpoint3.class))
.havingRootCause()
.isInstanceOf(IllegalStateException.class)
.withMessage(
"Existing property 'management.endpoint.simple.access' from type org.springframework.boot.configurationsample.endpoint.SimpleEndpoint has a conflicting value. Existing value: unrestricted, new value from type org.springframework.boot.configurationsample.endpoint.SimpleEndpoint3: none");
}
@Test
void endpointWithNullableParameter() {
ConfigurationMetadata metadata = compile(NullableParameterEndpoint.class);
assertThat(metadata)
.has(Metadata.withGroup("management.endpoint.nullable").fromSource(NullableParameterEndpoint.class));
assertThat(metadata).has(access("nullable", TestAccess.UNRESTRICTED));
assertThat(metadata).has(cacheTtl("nullable"));
assertThat(metadata.getItems()).hasSize(3);
}
private Metadata.MetadataItemCondition access(String endpointId, TestAccess defaultValue) {
return defaultAccess(endpointId, endpointId, defaultValue);
}
private Metadata.MetadataItemCondition defaultAccess(String endpointId, String endpointSuffix,
TestAccess defaultValue) {
return Metadata.withAccess("management.endpoint." + endpointSuffix + ".access")
.withDefaultValue(defaultValue.name().toLowerCase(Locale.ENGLISH))
.withDescription("Permitted level of access for the %s endpoint.".formatted(endpointId));
}
private Metadata.MetadataItemCondition cacheTtl(String endpointId) {
return Metadata.withProperty("management.endpoint." + endpointId + ".cache.time-to-live")
.ofType(Duration.class)
.withDefaultValue("0ms")
.withDescription("Maximum time that a response can be cached.");
}
}
| EndpointMetadataGenerationTests |
java | processing__processing4 | core/src/processing/core/PApplet.java | {
"start": 1491,
"end": 2858
} | class ____ all sketches that use processing.core.
* <p/>
* The <A HREF="https://github.com/processing/processing/wiki/Window-Size-and-Full-Screen">
* Window Size and Full Screen</A> page on the Wiki has useful information
* about sizing, multiple displays, full screen, etc.
* <p/>
* Processing uses active mode rendering. All animation tasks happen on the
* "Processing Animation Thread". The setup() and draw() methods are handled
* by that thread, and events (like mouse movement and key presses, which are
* fired by the event dispatch thread or EDT) are queued to be safely handled
* at the end of draw().
* <p/>
* Starting with 3.0a6, blit operations are on the EDT, so as not to cause
* GUI problems with Swing and AWT. In the case of the default renderer, the
* sketch renders to an offscreen image, then the EDT is asked to bring that
* image to the screen.
* <p/>
* For code that needs to run on the EDT, use EventQueue.invokeLater(). When
* doing so, be careful to synchronize between that code and the Processing
* animation thread. That is, you can't call Processing methods from the EDT
* or at any random time from another thread. Use of a callback function or
* the registerXxx() methods in PApplet can help ensure that your code doesn't
* do something naughty.
* <p/>
* As of Processing 3.0, we have removed Applet as the base | for |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/listener/ExporterChangeListener.java | {
"start": 980,
"end": 1447
} | interface ____ {
/**
* This method is called when an Exporter object is exported.
*
* @param exporter The Exporter object that has been exported.
*/
void onExporterChangeExport(Exporter<?> exporter);
/**
* This method is called when an Exporter object is unexported.
*
* @param exporter The Exporter object that has been unexported.
*/
void onExporterChangeUnExport(Exporter<?> exporter);
}
| ExporterChangeListener |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxOverTime.java | {
"start": 1502,
"end": 4248
} | class ____ extends TimeSeriesAggregateFunction implements OptionalArgument {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
Expression.class,
"MaxOverTime",
MaxOverTime::new
);
@FunctionInfo(
returnType = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "unsigned_long", "version" },
description = "Calculates the maximum over time value of a field.",
type = FunctionType.TIME_SERIES_AGGREGATE,
appliesTo = { @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.PREVIEW, version = "9.2.0") },
preview = true,
examples = { @Example(file = "k8s-timeseries", tag = "max_over_time") }
)
public MaxOverTime(
Source source,
@Param(
name = "field",
type = {
"aggregate_metric_double",
"boolean",
"double",
"integer",
"long",
"date",
"date_nanos",
"ip",
"keyword",
"text",
"unsigned_long",
"version" }
) Expression field,
@Param(
name = "window",
type = { "time_duration" },
description = "the time window over which to compute the maximum",
optional = true
) Expression window
) {
this(source, field, Literal.TRUE, Objects.requireNonNullElse(window, NO_WINDOW));
}
public MaxOverTime(Source source, Expression field, Expression filter, Expression window) {
super(source, field, filter, window, emptyList());
}
private MaxOverTime(StreamInput in) throws IOException {
super(in);
}
@Override
public String getWriteableName() {
return ENTRY.name;
}
@Override
public MaxOverTime withFilter(Expression filter) {
return new MaxOverTime(source(), field(), filter, window());
}
@Override
protected NodeInfo<MaxOverTime> info() {
return NodeInfo.create(this, MaxOverTime::new, field(), filter(), window());
}
@Override
public MaxOverTime replaceChildren(List<Expression> newChildren) {
return new MaxOverTime(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2));
}
@Override
protected TypeResolution resolveType() {
return perTimeSeriesAggregation().resolveType();
}
@Override
public DataType dataType() {
return perTimeSeriesAggregation().dataType();
}
@Override
public Max perTimeSeriesAggregation() {
return new Max(source(), field(), filter(), window());
}
}
| MaxOverTime |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/stream/StreamReferencesArgs.java | {
"start": 684,
"end": 1572
} | interface ____<T> {
/**
* Defines DELREF reference policy for consumer groups when trimming.
* When trimming, removes all references from consumer groups’ PEL
*
* Requires <b>Redis 8.2.0 and higher.</b>
*
* @return arguments object
*/
T removeReferences();
/**
* Defines KEEPREF reference policy for consumer groups when trimming.
* When trimming, preserves references in consumer groups’ PEL
*
* Requires <b>Redis 8.2.0 and higher.</b>
*
* @return arguments object
*/
T keepReferences();
/**
* Defines ACKED reference policy for consumer groups when trimming.
* When trimming, only removes entries acknowledged by all consumer groups
*
* Requires <b>Redis 8.2.0 and higher.</b>
*
* @return arguments object
*/
T removeAcknowledgedOnly();
}
| StreamReferencesArgs |
java | elastic__elasticsearch | client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/search/SearchRequestExecutor.java | {
"start": 531,
"end": 602
} | interface ____ {
boolean search(String source);
}
| SearchRequestExecutor |
java | quarkusio__quarkus | integration-tests/test-extension/tests/src/test/java/io/quarkus/it/extension/FinalFieldReflectionInGraalITCase.java | {
"start": 231,
"end": 458
} | class ____ {
@Test
public void testFieldAndGetterReflectionOnEntityFromServlet() {
RestAssured.when().get("/core/reflection/final").then()
.body(is("OK"));
}
}
| FinalFieldReflectionInGraalITCase |
java | grpc__grpc-java | core/src/test/java/io/grpc/internal/FakeCallCredentials.java | {
"start": 795,
"end": 1217
} | class ____ extends CallCredentials {
private final Metadata headers;
public <T> FakeCallCredentials(Metadata.Key<T> key, T value) {
headers = new Metadata();
headers.put(key, value);
}
@Override
public void applyRequestMetadata(
CallCredentials.RequestInfo requestInfo,
Executor appExecutor,
CallCredentials.MetadataApplier applier) {
applier.apply(headers);
}
}
| FakeCallCredentials |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/merge/MapMergeTest.java | {
"start": 392,
"end": 834
} | class ____
{
@JsonMerge
public Map<String,Object> values;
protected MergedMap() {
values = new LinkedHashMap<>();
values.put("a", "x");
}
public MergedMap(String a, String b) {
values = new LinkedHashMap<>();
values.put(a, b);
}
public MergedMap(Map<String,Object> src) {
values = src;
}
}
static | MergedMap |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsDataInputStream.java | {
"start": 1570,
"end": 3716
} | class ____ extends FSDataInputStream {
public HdfsDataInputStream(DFSInputStream in) {
super(in);
}
public HdfsDataInputStream(CryptoInputStream in) {
super(in);
Preconditions.checkArgument(in.getWrappedStream() instanceof DFSInputStream,
"CryptoInputStream should wrap a DFSInputStream");
}
private DFSInputStream getDFSInputStream() {
if (in instanceof CryptoInputStream) {
return (DFSInputStream) ((CryptoInputStream) in).getWrappedStream();
}
return (DFSInputStream) in;
}
/**
* Get a reference to the wrapped output stream. We always want to return the
* actual underlying InputStream, even when we're using a CryptoStream. e.g.
* in the delegated methods below.
*
* @return the underlying output stream
*/
public InputStream getWrappedStream() {
return in;
}
/**
* Get the datanode from which the stream is currently reading.
*/
public DatanodeInfo getCurrentDatanode() {
return getDFSInputStream().getCurrentDatanode();
}
/**
* Get the block containing the target position.
*/
public ExtendedBlock getCurrentBlock() {
return getDFSInputStream().getCurrentBlock();
}
/**
* Get the collection of blocks that has already been located.
*/
public List<LocatedBlock> getAllBlocks() throws IOException {
return getDFSInputStream().getAllBlocks();
}
/**
* Get the visible length of the file. It will include the length of the last
* block even if that is in UnderConstruction state.
*
* @return The visible length of the file.
*/
public long getVisibleLength() {
return getDFSInputStream().getFileLength();
}
/**
* Get statistics about the reads which this DFSInputStream has done.
* Note that because HdfsDataInputStream is buffered, these stats may
* be higher than you would expect just by adding up the number of
* bytes read through HdfsDataInputStream.
*/
public ReadStatistics getReadStatistics() {
return getDFSInputStream().getReadStatistics();
}
public void clearReadStatistics() {
getDFSInputStream().clearReadStatistics();
}
}
| HdfsDataInputStream |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DigitalSignatureEndpointBuilderFactory.java | {
"start": 20404,
"end": 23319
} | interface ____ {
/**
* Crypto (JCE) (camel-crypto)
* Sign and verify exchanges using the Signature Service of the Java
* Cryptographic Extension (JCE).
*
* Category: security,transformation
* Since: 2.3
* Maven coordinates: org.apache.camel:camel-crypto
*
* @return the dsl builder for the headers' name.
*/
default DigitalSignatureHeaderNameBuilder crypto() {
return DigitalSignatureHeaderNameBuilder.INSTANCE;
}
/**
* Crypto (JCE) (camel-crypto)
* Sign and verify exchanges using the Signature Service of the Java
* Cryptographic Extension (JCE).
*
* Category: security,transformation
* Since: 2.3
* Maven coordinates: org.apache.camel:camel-crypto
*
* Syntax: <code>crypto:cryptoOperation:name</code>
*
* Path parameter: cryptoOperation (required)
* Set the Crypto operation from that supplied after the crypto scheme
* in the endpoint uri e.g. crypto:sign sets sign as the operation.
* There are 2 enums and the value can be one of: sign, verify
*
* Path parameter: name (required)
* The logical name of this operation.
*
* @param path cryptoOperation:name
* @return the dsl builder
*/
default DigitalSignatureEndpointBuilder crypto(String path) {
return DigitalSignatureEndpointBuilderFactory.endpointBuilder("crypto", path);
}
/**
* Crypto (JCE) (camel-crypto)
* Sign and verify exchanges using the Signature Service of the Java
* Cryptographic Extension (JCE).
*
* Category: security,transformation
* Since: 2.3
* Maven coordinates: org.apache.camel:camel-crypto
*
* Syntax: <code>crypto:cryptoOperation:name</code>
*
* Path parameter: cryptoOperation (required)
* Set the Crypto operation from that supplied after the crypto scheme
* in the endpoint uri e.g. crypto:sign sets sign as the operation.
* There are 2 enums and the value can be one of: sign, verify
*
* Path parameter: name (required)
* The logical name of this operation.
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path cryptoOperation:name
* @return the dsl builder
*/
default DigitalSignatureEndpointBuilder crypto(String componentName, String path) {
return DigitalSignatureEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the Crypto (JCE) component.
*/
public static | DigitalSignatureBuilders |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetNodesToAttributesRequest.java | {
"start": 1414,
"end": 2130
} | class ____ {
public static GetNodesToAttributesRequest newInstance(Set<String> hostNames) {
GetNodesToAttributesRequest request =
Records.newRecord(GetNodesToAttributesRequest.class);
request.setHostNames(hostNames);
return request;
}
/**
* Set hostnames for which mapping is required.
*
* @param hostnames Set of hostnames.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract void setHostNames(Set<String> hostnames);
/**
* Get hostnames for which mapping is required.
*
* @return Set of hostnames.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract Set<String> getHostNames();
}
| GetNodesToAttributesRequest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/util/functions/StreamingFunctionUtils.java | {
"start": 2205,
"end": 9065
} | class ____ {
@SuppressWarnings("unchecked")
public static <T> void setOutputType(
Function userFunction,
TypeInformation<T> outTypeInfo,
ExecutionConfig executionConfig) {
Preconditions.checkNotNull(outTypeInfo);
Preconditions.checkNotNull(executionConfig);
while (true) {
if (trySetOutputType(userFunction, outTypeInfo, executionConfig)) {
break;
}
// inspect if the user function is wrapped, then unwrap and try again if we can snapshot
// the inner function
if (userFunction instanceof WrappingFunction) {
userFunction = ((WrappingFunction<?>) userFunction).getWrappedFunction();
} else {
break;
}
}
}
@SuppressWarnings("unchecked")
private static <T> boolean trySetOutputType(
Function userFunction,
TypeInformation<T> outTypeInfo,
ExecutionConfig executionConfig) {
Preconditions.checkNotNull(outTypeInfo);
Preconditions.checkNotNull(executionConfig);
if (OutputTypeConfigurable.class.isAssignableFrom(userFunction.getClass())) {
((OutputTypeConfigurable<T>) userFunction).setOutputType(outTypeInfo, executionConfig);
return true;
}
return false;
}
public static void snapshotFunctionState(
StateSnapshotContext context, OperatorStateBackend backend, Function userFunction)
throws Exception {
Preconditions.checkNotNull(context);
Preconditions.checkNotNull(backend);
while (true) {
if (trySnapshotFunctionState(context, backend, userFunction)) {
break;
}
// inspect if the user function is wrapped, then unwrap and try again if we can snapshot
// the inner function
if (userFunction instanceof WrappingFunction) {
userFunction = ((WrappingFunction<?>) userFunction).getWrappedFunction();
} else {
break;
}
}
}
private static boolean trySnapshotFunctionState(
StateSnapshotContext context, OperatorStateBackend backend, Function userFunction)
throws Exception {
if (userFunction instanceof CheckpointedFunction) {
((CheckpointedFunction) userFunction).snapshotState(context);
return true;
}
if (userFunction instanceof ListCheckpointed) {
@SuppressWarnings("unchecked")
List<Serializable> partitionableState =
((ListCheckpointed<Serializable>) userFunction)
.snapshotState(
context.getCheckpointId(), context.getCheckpointTimestamp());
// We are using JavaSerializer from the flink-runtime module here. This is very naughty
// and
// we shouldn't be doing it because ideally nothing in the API modules/connector depends
// directly on flink-runtime. We are doing it here because we need to maintain backwards
// compatibility with old state and because we will have to rework/remove this code
// soon.
ListStateDescriptor<Serializable> listStateDescriptor =
new ListStateDescriptor<>(
DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME,
new JavaSerializer<>());
ListState<Serializable> listState = backend.getListState(listStateDescriptor);
if (null != partitionableState) {
try {
listState.update(partitionableState);
} catch (Exception e) {
listState.clear();
throw new Exception(
"Could not write partitionable state to operator " + "state backend.",
e);
}
} else {
listState.clear();
}
return true;
}
return false;
}
public static void restoreFunctionState(
StateInitializationContext context, Function userFunction) throws Exception {
Preconditions.checkNotNull(context);
while (true) {
if (tryRestoreFunction(context, userFunction)) {
break;
}
// inspect if the user function is wrapped, then unwrap and try again if we can restore
// the inner function
if (userFunction instanceof WrappingFunction) {
userFunction = ((WrappingFunction<?>) userFunction).getWrappedFunction();
} else {
break;
}
}
}
private static boolean tryRestoreFunction(
StateInitializationContext context, Function userFunction) throws Exception {
if (userFunction instanceof CheckpointedFunction) {
((CheckpointedFunction) userFunction).initializeState(context);
return true;
}
if (context.isRestored() && userFunction instanceof ListCheckpointed) {
@SuppressWarnings("unchecked")
ListCheckpointed<Serializable> listCheckpointedFun =
(ListCheckpointed<Serializable>) userFunction;
// We are using JavaSerializer from the flink-runtime module here. This is very naughty
// and
// we shouldn't be doing it because ideally nothing in the API modules/connector depends
// directly on flink-runtime. We are doing it here because we need to maintain backwards
// compatibility with old state and because we will have to rework/remove this code
// soon.
ListStateDescriptor<Serializable> listStateDescriptor =
new ListStateDescriptor<>(
DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME,
new JavaSerializer<>());
ListState<Serializable> listState =
context.getOperatorStateStore().getListState(listStateDescriptor);
List<Serializable> list = new ArrayList<>();
for (Serializable serializable : listState.get()) {
list.add(serializable);
}
try {
listCheckpointedFun.restoreState(list);
} catch (Exception e) {
throw new Exception("Failed to restore state to function: " + e.getMessage(), e);
}
return true;
}
return false;
}
/** Private constructor to prevent instantiation. */
private StreamingFunctionUtils() {
throw new RuntimeException();
}
}
| StreamingFunctionUtils |
java | apache__camel | tooling/camel-tooling-model/src/main/java/org/apache/camel/tooling/model/ReleaseModel.java | {
"start": 851,
"end": 1667
} | class ____ {
protected String version;
protected String date;
protected String eol;
protected String kind;
protected String jdk;
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
public String getEol() {
return eol;
}
public void setEol(String eol) {
this.eol = eol;
}
public String getKind() {
return kind;
}
public void setKind(String kind) {
this.kind = kind;
}
public String getJdk() {
return jdk;
}
public void setJdk(String jdk) {
this.jdk = jdk;
}
}
| ReleaseModel |
java | quarkusio__quarkus | extensions/opentelemetry/runtime/src/test/java/io/quarkus/opentelemetry/runtime/tracing/DropTargetsSamplerTest.java | {
"start": 598,
"end": 3033
} | class ____ {
@Test
void testDropTargets() {
CountingSampler countingSampler = new CountingSampler();
var sut = new DropTargetsSampler(countingSampler, Set.of("/q/swagger-ui", "/q/swagger-ui*"));
assertEquals(SamplingResult.recordAndSample(), getShouldSample(sut, "/other"));
assertEquals(1, countingSampler.count.get());
assertEquals(SamplingResult.drop(), getShouldSample(sut, "/q/swagger-ui"));
assertEquals(1, countingSampler.count.get());
assertEquals(SamplingResult.drop(), getShouldSample(sut, "/q/swagger-ui/"));
assertEquals(1, countingSampler.count.get());
assertEquals(SamplingResult.drop(), getShouldSample(sut, "/q/swagger-ui/whatever"));
assertEquals(1, countingSampler.count.get());
assertEquals(SamplingResult.recordAndSample(), getShouldSample(sut, "/q/test"));
assertEquals(2, countingSampler.count.get());
}
@Test
void testDropTargetsWildcards() {
CountingSampler countingSampler = new CountingSampler();
var sut = new DropTargetsSampler(countingSampler, Set.of("/q/dev-ui", "/q/dev-ui/*"));
assertEquals(SamplingResult.recordAndSample(), getShouldSample(sut, "/other"));
assertEquals(1, countingSampler.count.get());
assertEquals(SamplingResult.recordAndSample(), getShouldSample(sut, "/q/dev-ui-test"));
assertEquals(2, countingSampler.count.get());
assertEquals(SamplingResult.drop(), getShouldSample(sut, "/q/dev-ui"));
assertEquals(2, countingSampler.count.get());
assertEquals(SamplingResult.drop(), getShouldSample(sut, "/q/dev-ui/"));
assertEquals(2, countingSampler.count.get());
assertEquals(SamplingResult.drop(), getShouldSample(sut, "/q/dev-ui/whatever"));
assertEquals(2, countingSampler.count.get());
assertEquals(SamplingResult.drop(), getShouldSample(sut, "/q/dev-ui/whatever/wherever/whenever"));
assertEquals(2, countingSampler.count.get());
assertEquals(SamplingResult.recordAndSample(), getShouldSample(sut, "/q/test"));
assertEquals(3, countingSampler.count.get());
}
private static SamplingResult getShouldSample(DropTargetsSampler sut, String target) {
return sut.shouldSample(null, null, null, SpanKind.SERVER,
Attributes.of(URL_PATH, target), null);
}
private static final | DropTargetsSamplerTest |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/cluster/remote/MemberReportHandler.java | {
"start": 1735,
"end": 2769
} | class ____ extends RequestHandler<MemberReportRequest, MemberReportResponse> {
private final ServerMemberManager memberManager;
public MemberReportHandler(ServerMemberManager memberManager) {
this.memberManager = memberManager;
}
@Override
@Secured(resource = "report", signType = SignType.SPECIFIED, apiType = ApiType.INNER_API)
public MemberReportResponse handle(MemberReportRequest request, RequestMeta meta) throws NacosException {
Member node = request.getNode();
if (!node.check()) {
MemberReportResponse result = new MemberReportResponse();
result.setErrorInfo(400, "Node information is illegal");
return result;
}
LoggerUtils.printIfDebugEnabled(Loggers.CLUSTER, "node state report, receive info : {}", node);
node.setState(NodeState.UP);
node.setFailAccessCnt(0);
memberManager.update(node);
return new MemberReportResponse(memberManager.getSelf());
}
}
| MemberReportHandler |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/event/GenericApplicationListenerAdapter.java | {
"start": 1408,
"end": 4241
} | class ____ implements GenericApplicationListener {
private static final Map<Class<?>, ResolvableType> eventTypeCache = new ConcurrentReferenceHashMap<>();
private final ApplicationListener<ApplicationEvent> delegate;
private final @Nullable ResolvableType declaredEventType;
/**
* Create a new GenericApplicationListener for the given delegate.
* @param delegate the delegate listener to be invoked
*/
@SuppressWarnings("unchecked")
public GenericApplicationListenerAdapter(ApplicationListener<?> delegate) {
Assert.notNull(delegate, "Delegate listener must not be null");
this.delegate = (ApplicationListener<ApplicationEvent>) delegate;
this.declaredEventType = resolveDeclaredEventType(this.delegate);
}
@Override
public void onApplicationEvent(ApplicationEvent event) {
this.delegate.onApplicationEvent(event);
}
@Override
@SuppressWarnings("unchecked")
public boolean supportsEventType(ResolvableType eventType) {
if (this.delegate instanceof GenericApplicationListener gal) {
return gal.supportsEventType(eventType);
}
else if (this.delegate instanceof SmartApplicationListener sal) {
Class<? extends ApplicationEvent> eventClass = (Class<? extends ApplicationEvent>) eventType.resolve();
return (eventClass != null && sal.supportsEventType(eventClass));
}
else {
return (this.declaredEventType == null || this.declaredEventType.isAssignableFrom(eventType));
}
}
@Override
public boolean supportsSourceType(@Nullable Class<?> sourceType) {
return (!(this.delegate instanceof SmartApplicationListener sal) || sal.supportsSourceType(sourceType));
}
@Override
public int getOrder() {
return (this.delegate instanceof Ordered ordered ? ordered.getOrder() : Ordered.LOWEST_PRECEDENCE);
}
@Override
public String getListenerId() {
return (this.delegate instanceof SmartApplicationListener sal ? sal.getListenerId() : "");
}
private static @Nullable ResolvableType resolveDeclaredEventType(ApplicationListener<ApplicationEvent> listener) {
ResolvableType declaredEventType = resolveDeclaredEventType(listener.getClass());
if (declaredEventType == null || declaredEventType.isAssignableFrom(ApplicationEvent.class)) {
Class<?> targetClass = AopUtils.getTargetClass(listener);
if (targetClass != listener.getClass()) {
declaredEventType = resolveDeclaredEventType(targetClass);
}
}
return declaredEventType;
}
static @Nullable ResolvableType resolveDeclaredEventType(Class<?> listenerType) {
ResolvableType eventType = eventTypeCache.get(listenerType);
if (eventType == null) {
eventType = ResolvableType.forClass(listenerType).as(ApplicationListener.class).getGeneric();
eventTypeCache.put(listenerType, eventType);
}
return (eventType != ResolvableType.NONE ? eventType : null);
}
}
| GenericApplicationListenerAdapter |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/LeastSquaresOnlineRegression.java | {
"start": 4374,
"end": 5694
} | class ____ {
private double count;
private final double[] stats;
RunningStatistics(int size) {
count = 0;
stats = new double[size];
}
void add(double[] values, double weight) {
assert values.length == stats.length
: "passed values for add are not of expected length; unable to update statistics for online least squares regression";
count += weight;
double alpha = weight / count;
double beta = 1 - alpha;
for (int i = 0; i < stats.length; i++) {
stats[i] = stats[i] * beta + alpha * values[i];
}
}
void remove(double[] values, double weight) {
assert values.length == stats.length
: "passed values for removal are not of expected length; unable to update statistics for online least squares regression";
count = Math.max(count - weight, 0);
if (count == 0) {
Arrays.fill(stats, 0);
return;
}
double alpha = weight / count;
double beta = 1 + alpha;
for (int i = 0; i < stats.length; i++) {
stats[i] = stats[i] * beta - alpha * values[i];
}
}
}
}
| RunningStatistics |
java | apache__camel | test-infra/camel-test-infra-keycloak/src/main/java/org/apache/camel/test/infra/keycloak/services/KeycloakLocalContainerInfraService.java | {
"start": 3082,
"end": 5686
} | class ____ extends GenericContainer<TestInfraKeycloakContainer> {
public TestInfraKeycloakContainer(boolean fixedPort) {
super(DockerImageName.parse(keycloakImage));
withExposedPorts(KEYCLOAK_PORT)
.withEnv("KEYCLOAK_ADMIN", DEFAULT_ADMIN_USERNAME)
.withEnv("KEYCLOAK_ADMIN_PASSWORD", DEFAULT_ADMIN_PASSWORD)
.withCommand("start-dev")
.waitingFor(Wait.forListeningPorts(KEYCLOAK_PORT))
.withStartupTimeout(Duration.ofMinutes(3L));
if (fixedPort) {
addFixedExposedPort(KEYCLOAK_PORT, KEYCLOAK_PORT);
}
}
}
return new TestInfraKeycloakContainer(ContainerEnvironmentUtil.isFixedPort(this.getClass()));
}
@Override
public void registerProperties() {
System.setProperty(KeycloakProperties.KEYCLOAK_SERVER_URL, getKeycloakServerUrl());
System.setProperty(KeycloakProperties.KEYCLOAK_REALM, getKeycloakRealm());
System.setProperty(KeycloakProperties.KEYCLOAK_USERNAME, getKeycloakUsername());
System.setProperty(KeycloakProperties.KEYCLOAK_PASSWORD, getKeycloakPassword());
}
@Override
public void initialize() {
LOG.info("Trying to start the Keycloak container");
container.start();
registerProperties();
LOG.info("Keycloak instance running at {}", getKeycloakServerUrl());
LOG.info("Keycloak admin console available at {}/admin", getKeycloakServerUrl());
}
@Override
public void shutdown() {
LOG.info("Stopping the Keycloak container");
container.stop();
}
@Override
public GenericContainer<?> getContainer() {
return container;
}
@Override
public String getKeycloakServerUrl() {
return String.format("http://%s:%d", container.getHost(), container.getMappedPort(KEYCLOAK_PORT));
}
@Override
public String getKeycloakRealm() {
return DEFAULT_REALM;
}
@Override
public String getKeycloakUsername() {
return DEFAULT_ADMIN_USERNAME;
}
@Override
public String getKeycloakPassword() {
return DEFAULT_ADMIN_PASSWORD;
}
@Override
public Keycloak getKeycloakAdminClient() {
return Keycloak.getInstance(
getKeycloakServerUrl(),
getKeycloakRealm(),
getKeycloakUsername(),
getKeycloakPassword(),
"admin-cli");
}
}
| TestInfraKeycloakContainer |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/query/KvStateServer.java | {
"start": 894,
"end": 1154
} | interface ____ the Queryable State Server running on each Task Manager in the cluster. This
* server is responsible for serving requests coming from the {@link KvStateClientProxy Queryable
* State Proxy} and requesting <b>locally</b> stored state.
*/
public | for |
java | quarkusio__quarkus | integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KnativeWithVolumesTest.java | {
"start": 640,
"end": 3734
} | class ____ {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar.addClasses(GreetingResource.class))
.setApplicationName("knative-with-volumes-properties")
.setApplicationVersion("0.1-SNAPSHOT")
.withConfigurationResource("knative-with-volumes.properties");
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void assertGeneratedResources() throws IOException {
Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("knative.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("knative.yml"))
.satisfies(p -> assertThat(p.toFile().listFiles()).hasSize(2));
List<HasMetadata> kubernetesList = DeserializationUtil
.deserializeAsList(kubernetesDir.resolve("knative.yml"));
assertThat(kubernetesList).filteredOn(i -> "Service".equals(i.getKind())).singleElement().satisfies(i -> {
assertThat(i).isInstanceOfSatisfying(Service.class, s -> {
assertThat(s.getSpec()).satisfies(spec -> {
assertThat(spec.getTemplate()).satisfies(template -> {
assertThat(template.getSpec()).satisfies(revisionSpec -> {
assertThat(revisionSpec.getVolumes()).haveAtLeastOne(new Condition<Volume>(
v -> v.getName().equals("client-crts")
&& v.getSecret().getSecretName().equals("clientcerts"),
"Has secret volume named client-crts referencing secret clientcerts"));
assertThat(revisionSpec.getVolumes()).haveAtLeastOne(new Condition<Volume>(
v -> v.getName().equals("client-cfg") && v.getConfigMap().getName().equals("clientconfig"),
"Has config-map named client-cfg referencing configmap clientconfig"));
assertThat(revisionSpec.getContainers()).hasSize(1).singleElement().satisfies(c -> {
assertThat(c.getVolumeMounts()).haveAtLeastOne(new Condition<VolumeMount>(
m -> m.getName().equals("client-crts"), "Has client-crts mount"));
assertThat(c.getVolumeMounts()).haveAtLeastOne(new Condition<VolumeMount>(
m -> m.getName().equals("client-cfg"), "Has client-cfg mount"));
assertThat(c.getPorts()).hasSize(1).singleElement().satisfies(p -> {
assertThat(p.getName()).isEqualTo("http1");
});
});
});
});
});
});
});
}
}
| KnativeWithVolumesTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/FinalApplicationStatus.java | {
"start": 1089,
"end": 1467
} | enum ____ {
/** Undefined state when either the application has not yet finished */
UNDEFINED,
/** Application which finished successfully. */
SUCCEEDED,
/** Application which failed. */
FAILED,
/** Application which was terminated by a user or admin. */
KILLED,
/** Application which has subtasks with multiple end states. */
ENDED
}
| FinalApplicationStatus |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/cdi/bcextensions/CustomQualifierTest.java | {
"start": 1270,
"end": 1864
} | class ____ implements BuildCompatibleExtension {
@Discovery
public void discovery(MetaAnnotations meta, ScannedClasses scan) {
scan.add(MyServiceFoo.class.getName());
scan.add(MyServiceBar.class.getName());
ClassConfig cfg = meta.addQualifier(MyAnnotation.class);
cfg.methods()
.stream()
.filter(it -> "value".equals(it.info().name()))
.forEach(it -> it.addAnnotation(Nonbinding.class));
}
}
// ---
@Retention(RetentionPolicy.RUNTIME)
@ | MyExtension |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableBufferTimed.java | {
"start": 7047,
"end": 10782
} | class ____<T, U extends Collection<? super T>>
extends QueueDrainObserver<T, U, U> implements Runnable, Disposable {
final Supplier<U> bufferSupplier;
final long timespan;
final long timeskip;
final TimeUnit unit;
final Worker w;
final List<U> buffers;
Disposable upstream;
BufferSkipBoundedObserver(Observer<? super U> actual,
Supplier<U> bufferSupplier, long timespan,
long timeskip, TimeUnit unit, Worker w) {
super(actual, new MpscLinkedQueue<>());
this.bufferSupplier = bufferSupplier;
this.timespan = timespan;
this.timeskip = timeskip;
this.unit = unit;
this.w = w;
this.buffers = new LinkedList<>();
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.validate(this.upstream, d)) {
this.upstream = d;
final U b; // NOPMD
try {
b = Objects.requireNonNull(bufferSupplier.get(), "The buffer supplied is null");
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
d.dispose();
EmptyDisposable.error(e, downstream);
w.dispose();
return;
}
buffers.add(b);
downstream.onSubscribe(this);
w.schedulePeriodically(this, timeskip, timeskip, unit);
w.schedule(new RemoveFromBufferEmit(b), timespan, unit);
}
}
@Override
public void onNext(T t) {
synchronized (this) {
for (U b : buffers) {
b.add(t);
}
}
}
@Override
public void onError(Throwable t) {
done = true;
clear();
downstream.onError(t);
w.dispose();
}
@Override
public void onComplete() {
List<U> bs;
synchronized (this) {
bs = new ArrayList<>(buffers);
buffers.clear();
}
for (U b : bs) {
queue.offer(b);
}
done = true;
if (enter()) {
QueueDrainHelper.drainLoop(queue, downstream, false, w, this);
}
}
@Override
public void dispose() {
if (!cancelled) {
cancelled = true;
clear();
upstream.dispose();
w.dispose();
}
}
@Override
public boolean isDisposed() {
return cancelled;
}
void clear() {
synchronized (this) {
buffers.clear();
}
}
@Override
public void run() {
if (cancelled) {
return;
}
final U b; // NOPMD
try {
b = Objects.requireNonNull(bufferSupplier.get(), "The bufferSupplier returned a null buffer");
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
downstream.onError(e);
dispose();
return;
}
synchronized (this) {
if (cancelled) {
return;
}
buffers.add(b);
}
w.schedule(new RemoveFromBuffer(b), timespan, unit);
}
@Override
public void accept(Observer<? super U> a, U v) {
a.onNext(v);
}
final | BufferSkipBoundedObserver |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/main/java/org/apache/hadoop/mapred/uploader/FrameworkUploader.java | {
"start": 17849,
"end": 22380
} | class ____. Defaults to the default classpath.")
.hasArg().build());
opts.addOption(Option.builder("whitelist")
.desc(
"Regex specifying the full path of jars to include in the" +
" framework tarball. Default is a hardcoded set of jars" +
" considered necessary to include")
.hasArg().build());
opts.addOption(Option.builder("blacklist")
.desc(
"Regex specifying the full path of jars to exclude in the" +
" framework tarball. Default is a hardcoded set of jars" +
" considered unnecessary to include")
.hasArg().build());
opts.addOption(Option.builder("fs")
.desc(
"Target file system to upload to." +
" Example: hdfs://foo.com:8020")
.hasArg().build());
opts.addOption(Option.builder("target")
.desc(
"Target file to upload to with a reference name." +
" Example: /usr/mr-framework.tar.gz#mr-framework")
.hasArg().build());
opts.addOption(Option.builder("initialReplication")
.desc(
"Desired initial replication count. Default 3.")
.hasArg().build());
opts.addOption(Option.builder("finalReplication")
.desc(
"Desired final replication count. Default 10.")
.hasArg().build());
opts.addOption(Option.builder("acceptableReplication")
.desc(
"Desired acceptable replication count. Default 9.")
.hasArg().build());
opts.addOption(Option.builder("timeout")
.desc(
"Desired timeout for the acceptable" +
" replication in seconds. Default 10")
.hasArg().build());
opts.addOption(Option.builder("nosymlink")
.desc("Ignore symlinks into the same directory")
.build());
GenericOptionsParser parser = new GenericOptionsParser(opts, args);
if (parser.getCommandLine().hasOption("help") ||
parser.getCommandLine().hasOption("h")) {
printHelp(opts);
return false;
}
input = parser.getCommandLine().getOptionValue(
"input", System.getProperty("java.class.path"));
whitelist = parser.getCommandLine().getOptionValue(
"whitelist", DefaultJars.DEFAULT_MR_JARS);
blacklist = parser.getCommandLine().getOptionValue(
"blacklist", DefaultJars.DEFAULT_EXCLUDED_MR_JARS);
initialReplication =
Short.parseShort(parser.getCommandLine().getOptionValue(
"initialReplication", "3"));
finalReplication =
Short.parseShort(parser.getCommandLine().getOptionValue(
"finalReplication", "10"));
acceptableReplication =
Short.parseShort(
parser.getCommandLine().getOptionValue(
"acceptableReplication", "9"));
timeout =
Integer.parseInt(
parser.getCommandLine().getOptionValue("timeout", "10"));
if (parser.getCommandLine().hasOption("nosymlink")) {
ignoreSymlink = true;
}
String fs = parser.getCommandLine()
.getOptionValue("fs", () -> null);
String path = parser.getCommandLine().getOptionValue("target",
"/usr/lib/mr-framework.tar.gz#mr-framework");
boolean isFullPath =
path.startsWith("hdfs://") ||
path.startsWith("file://");
if (fs == null) {
fs = conf.getTrimmed(FS_DEFAULT_NAME_KEY);
if (fs == null && !isFullPath) {
LOG.error("No filesystem specified in either fs or target.");
printHelp(opts);
return false;
} else {
LOG.info(String.format(
"Target file system not specified. Using default %s", fs));
}
}
if (path.isEmpty()) {
LOG.error("Target directory not specified");
printHelp(opts);
return false;
}
StringBuilder absolutePath = new StringBuilder();
if (!isFullPath) {
absolutePath.append(fs);
absolutePath.append(path.startsWith("/") ? "" : "/");
}
absolutePath.append(path);
target = absolutePath.toString();
if (parser.getRemainingArgs().length > 0) {
LOG.warn("Unexpected parameters");
printHelp(opts);
return false;
}
return true;
}
/**
* Tool entry point.
* @param args arguments
* @throws IOException thrown on configuration errors
*/
public static void main(String[] args) throws IOException {
FrameworkUploader uploader = new FrameworkUploader();
if(uploader.parseArguments(args)) {
uploader.run();
}
}
}
| path |
java | apache__maven | compat/maven-model-builder/src/main/java/org/apache/maven/model/profile/ProfileActivationContext.java | {
"start": 1133,
"end": 2947
} | interface ____ {
/**
* Key of the property containing the project's packaging.
* Available in {@link #getUserProperties()}.
* @since 3.9
*/
String PROPERTY_NAME_PACKAGING = "packaging";
/**
* Gets the identifiers of those profiles that should be activated by explicit demand.
*
* @return The identifiers of those profiles to activate, never {@code null}.
*/
List<String> getActiveProfileIds();
/**
* Gets the identifiers of those profiles that should be deactivated by explicit demand.
*
* @return The identifiers of those profiles to deactivate, never {@code null}.
*/
List<String> getInactiveProfileIds();
/**
* Gets the system properties to use for interpolation and profile activation. The system properties are collected
* from the runtime environment like {@link System#getProperties()} and environment variables.
*
* @return The execution properties, never {@code null}.
*/
Map<String, String> getSystemProperties();
/**
* Gets the user properties to use for interpolation and profile activation. The user properties have been
* configured directly by the user on his discretion, e.g. via the {@code -Dkey=value} parameter on the command
* line.
*
* @return The user properties, never {@code null}.
*/
Map<String, String> getUserProperties();
/**
* Gets the base directory of the current project (if any).
*
* @return The base directory of the current project or {@code null} if none.
*/
File getProjectDirectory();
/**
* Gets current calculated project properties
*
* @return The project properties, never {@code null}.
*/
Map<String, String> getProjectProperties();
}
| ProfileActivationContext |
java | jhy__jsoup | src/main/java/org/jsoup/select/Evaluator.java | {
"start": 9321,
"end": 10006
} | class ____ extends AttributeKeyPair {
public AttributeWithValueEnding(String key, String value) {
super(key, value);
}
@Override
public boolean matches(Element root, Element element) {
return element.hasAttr(key) && lowerCase(element.attr(key)).endsWith(value); // value is lower case
}
@Override protected int cost() {
return 4;
}
@Override
public String toString() {
return String.format("[%s$=%s]", key, value);
}
}
/**
* Evaluator for attribute name/value matching (value containing)
*/
public static final | AttributeWithValueEnding |
java | quarkusio__quarkus | integration-tests/grpc-streaming/src/test/java/io/quarkus/grpc/example/streaming/StreamingServiceTestBase.java | {
"start": 684,
"end": 2946
} | class ____ {
protected static final Duration TIMEOUT = Duration.ofSeconds(5);
private Vertx _vertx;
private Channel channel;
protected Vertx vertx() {
return null;
}
protected void close(Vertx vertx) {
}
@BeforeEach
public void init() {
_vertx = vertx();
channel = GRPCTestUtils.channel(_vertx);
}
@AfterEach
public void cleanup() {
GRPCTestUtils.close(channel);
close(_vertx);
}
@Test
public void testSourceWithBlockingStub() {
Iterator<Item> iterator = StreamingGrpc.newBlockingStub(channel).source(Empty.newBuilder().build());
List<String> list = new ArrayList<>();
iterator.forEachRemaining(i -> list.add(i.getValue()));
assertThat(list).containsExactly("0", "1", "2", "3", "4", "5", "6", "7", "8", "9");
}
@Test
public void testSourceWithMutinyStub() {
Multi<Item> source = MutinyStreamingGrpc.newMutinyStub(channel).source(Empty.newBuilder().build());
List<String> list = source.map(Item::getValue).collect().asList().await().atMost(TIMEOUT);
assertThat(list).containsExactly("0", "1", "2", "3", "4", "5", "6", "7", "8", "9");
}
@Test
public void testSinkWithMutinyStub() {
Uni<Empty> done = MutinyStreamingGrpc.newMutinyStub(channel)
.sink(Multi.createFrom().ticks().every(Duration.ofMillis(2))
.select().first(5)
.map(l -> Item.newBuilder().setValue(l.toString()).build()));
done.await().atMost(TIMEOUT);
}
@Test
public void testPipeWithMutinyStub() {
Multi<Item> source = Multi.createFrom().ticks().every(Duration.ofMillis(2))
.select().first(5)
.map(l -> Item.newBuilder().setValue(l.toString()).build());
Multi<Item> results = MutinyStreamingGrpc.newMutinyStub(channel).pipe(source);
List<Long> items = results
.map(i -> Long.parseLong(i.getValue()))
.collect().asList().await().atMost(TIMEOUT);
// Resulting stream is: initial state (0), 0 + 0, 0 + 1, 1 + 2, 3 + 3, 6 + 4
assertThat(items).containsExactly(0L, 0L, 1L, 3L, 6L, 10L);
}
}
| StreamingServiceTestBase |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/autoconfigure/logging/log4j2/Log4J2MetricsAutoConfiguration.java | {
"start": 2387,
"end": 2536
} | class ____ {
@Bean
@ConditionalOnMissingBean
Log4j2Metrics log4j2Metrics() {
return new Log4j2Metrics();
}
static | Log4J2MetricsAutoConfiguration |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ConstantOverflowTest.java | {
"start": 5789,
"end": 5999
} | class ____ {
void f() {
var x = 1 + Integer.MAX_VALUE;
}
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsReader.java | {
"start": 15825,
"end": 17485
} | class ____ extends FloatVectorValues {
private final FloatVectorValues rawVectorValues;
private final BinarizedByteVectorValues quantizedVectorValues;
BinarizedVectorValues(FloatVectorValues rawVectorValues, BinarizedByteVectorValues quantizedVectorValues) {
this.rawVectorValues = rawVectorValues;
this.quantizedVectorValues = quantizedVectorValues;
}
@Override
public int dimension() {
return rawVectorValues.dimension();
}
@Override
public int size() {
return rawVectorValues.size();
}
@Override
public float[] vectorValue(int ord) throws IOException {
return rawVectorValues.vectorValue(ord);
}
@Override
public BinarizedVectorValues copy() throws IOException {
return new BinarizedVectorValues(rawVectorValues.copy(), quantizedVectorValues.copy());
}
@Override
public Bits getAcceptOrds(Bits acceptDocs) {
return rawVectorValues.getAcceptOrds(acceptDocs);
}
@Override
public int ordToDoc(int ord) {
return rawVectorValues.ordToDoc(ord);
}
@Override
public DocIndexIterator iterator() {
return rawVectorValues.iterator();
}
@Override
public VectorScorer scorer(float[] query) throws IOException {
return quantizedVectorValues.scorer(query);
}
BinarizedByteVectorValues getQuantizedVectorValues() throws IOException {
return quantizedVectorValues;
}
}
}
| BinarizedVectorValues |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/integer_/IntegerAssert_isNegative_Test.java | {
"start": 890,
"end": 1206
} | class ____ extends IntegerAssertBaseTest {
@Override
protected IntegerAssert invoke_api_method() {
return assertions.isNegative();
}
@Override
protected void verify_internal_effects() {
verify(integers).assertIsNegative(getInfo(assertions), getActual(assertions));
}
}
| IntegerAssert_isNegative_Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/AlterTableQuoteSpecifiedSchemaTest.java | {
"start": 5092,
"end": 5234
} | class ____ {
@Id
public Integer id;
}
@Entity(name = "MyEntity")
@Table(name = "my_entity", schema = "my-schema")
public static | MyEntity |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/testng/web/ServletTestExecutionListenerTestNGIntegrationTests.java | {
"start": 1774,
"end": 2756
} | class ____ {
/* no beans required for this test */
}
@Autowired
private MockHttpServletRequest servletRequest;
/**
* Verifies bug fix for <a href="https://jira.spring.io/browse/SPR-11626">SPR-11626</a>.
*
* @see #ensureMocksAreReinjectedBetweenTests_2
*/
@Test
void ensureMocksAreReinjectedBetweenTests_1() {
assertInjectedServletRequestEqualsRequestInRequestContextHolder();
}
/**
* Verifies bug fix for <a href="https://jira.spring.io/browse/SPR-11626">SPR-11626</a>.
*
* @see #ensureMocksAreReinjectedBetweenTests_1
*/
@Test
void ensureMocksAreReinjectedBetweenTests_2() {
assertInjectedServletRequestEqualsRequestInRequestContextHolder();
}
private void assertInjectedServletRequestEqualsRequestInRequestContextHolder() {
assertThat(((ServletRequestAttributes) RequestContextHolder.getRequestAttributes()).getRequest())
.as("Injected ServletRequest must be stored in the RequestContextHolder")
.isEqualTo(servletRequest);
}
}
| Config |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_2318/Issue2318Mapper.java | {
"start": 1126,
"end": 1367
} | class ____ {
private Holder holder;
public Holder getHolder() {
return holder;
}
public void setHolder(Holder holder) {
this.holder = holder;
}
public static | SourceParent |
java | spring-projects__spring-security | ldap/src/test/java/org/springframework/security/ldap/jackson2/PersonMixinTests.java | {
"start": 1540,
"end": 6159
} | class ____ {
private static final String USER_PASSWORD = "Password1234";
private static final String AUTHORITIES_ARRAYLIST_JSON = "[\"java.util.Collections$UnmodifiableRandomAccessList\", []]";
// @formatter:off
private static final String PERSON_JSON = "{"
+ "\"@class\": \"org.springframework.security.ldap.userdetails.Person\", "
+ "\"dn\": \"ignored=ignored\","
+ "\"username\": \"ghengis\","
+ "\"password\": \"" + USER_PASSWORD + "\","
+ "\"givenName\": \"Ghengis\","
+ "\"sn\": \"Khan\","
+ "\"cn\": [\"java.util.Arrays$ArrayList\",[\"Ghengis Khan\"]],"
+ "\"description\": \"Scary\","
+ "\"telephoneNumber\": \"+442075436521\","
+ "\"accountNonExpired\": true, "
+ "\"accountNonLocked\": true, "
+ "\"credentialsNonExpired\": true, "
+ "\"enabled\": true, "
+ "\"authorities\": " + AUTHORITIES_ARRAYLIST_JSON + ","
+ "\"graceLoginsRemaining\": " + Integer.MAX_VALUE + ","
+ "\"timeBeforeExpiration\": " + Integer.MAX_VALUE
+ "}";
// @formatter:on
private ObjectMapper mapper;
@BeforeEach
public void setup() {
ClassLoader loader = getClass().getClassLoader();
this.mapper = new ObjectMapper();
this.mapper.registerModules(SecurityJackson2Modules.getModules(loader));
}
@Test
public void serializeWhenMixinRegisteredThenSerializes() throws Exception {
PersonContextMapper mapper = new PersonContextMapper();
Person p = (Person) mapper.mapUserFromContext(createUserContext(), "ghengis", AuthorityUtils.NO_AUTHORITIES);
String json = this.mapper.writeValueAsString(p);
JSONAssert.assertEquals(PERSON_JSON, json, true);
}
@Test
public void serializeWhenEraseCredentialInvokedThenUserPasswordIsNull()
throws JsonProcessingException, JSONException {
PersonContextMapper mapper = new PersonContextMapper();
Person p = (Person) mapper.mapUserFromContext(createUserContext(), "ghengis", AuthorityUtils.NO_AUTHORITIES);
p.eraseCredentials();
String actualJson = this.mapper.writeValueAsString(p);
JSONAssert.assertEquals(PERSON_JSON.replaceAll("\"" + USER_PASSWORD + "\"", "null"), actualJson, true);
}
@Test
public void deserializeWhenMixinNotRegisteredThenThrowJsonProcessingException() {
assertThatExceptionOfType(JsonProcessingException.class)
.isThrownBy(() -> new ObjectMapper().readValue(PERSON_JSON, Person.class));
}
@Test
public void deserializeWhenMixinRegisteredThenDeserializes() throws Exception {
PersonContextMapper mapper = new PersonContextMapper();
Person expectedAuthentication = (Person) mapper.mapUserFromContext(createUserContext(), "ghengis",
AuthorityUtils.NO_AUTHORITIES);
Person authentication = this.mapper.readValue(PERSON_JSON, Person.class);
assertThat(authentication.getAuthorities()).containsExactlyElementsOf(expectedAuthentication.getAuthorities());
assertThat(authentication.getDn()).isEqualTo(expectedAuthentication.getDn());
assertThat(authentication.getDescription()).isEqualTo(expectedAuthentication.getDescription());
assertThat(authentication.getUsername()).isEqualTo(expectedAuthentication.getUsername());
assertThat(authentication.getPassword()).isEqualTo(expectedAuthentication.getPassword());
assertThat(authentication.getSn()).isEqualTo(expectedAuthentication.getSn());
assertThat(authentication.getGivenName()).isEqualTo(expectedAuthentication.getGivenName());
assertThat(authentication.getTelephoneNumber()).isEqualTo(expectedAuthentication.getTelephoneNumber());
assertThat(authentication.getGraceLoginsRemaining())
.isEqualTo(expectedAuthentication.getGraceLoginsRemaining());
assertThat(authentication.getTimeBeforeExpiration())
.isEqualTo(expectedAuthentication.getTimeBeforeExpiration());
assertThat(authentication.isAccountNonExpired()).isEqualTo(expectedAuthentication.isAccountNonExpired());
assertThat(authentication.isAccountNonLocked()).isEqualTo(expectedAuthentication.isAccountNonLocked());
assertThat(authentication.isEnabled()).isEqualTo(expectedAuthentication.isEnabled());
assertThat(authentication.isCredentialsNonExpired())
.isEqualTo(expectedAuthentication.isCredentialsNonExpired());
}
private DirContextAdapter createUserContext() {
DirContextAdapter ctx = new DirContextAdapter();
ctx.setDn(LdapNameBuilder.newInstance("ignored=ignored").build());
ctx.setAttributeValue("userPassword", USER_PASSWORD);
ctx.setAttributeValue("cn", "Ghengis Khan");
ctx.setAttributeValue("description", "Scary");
ctx.setAttributeValue("givenName", "Ghengis");
ctx.setAttributeValue("sn", "Khan");
ctx.setAttributeValue("telephoneNumber", "+442075436521");
return ctx;
}
}
| PersonMixinTests |
java | quarkusio__quarkus | extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DefaultTokenIntrospectionUserInfoCache.java | {
"start": 1021,
"end": 3946
} | class ____ implements TokenIntrospectionCache, UserInfoCache {
private static final Logger LOG = Logger.getLogger(DefaultTokenIntrospectionUserInfoCache.class);
private static final Uni<TokenIntrospection> NULL_INTROSPECTION_UNI = Uni.createFrom().nullItem();
private static final Uni<UserInfo> NULL_USERINFO_UNI = Uni.createFrom().nullItem();
final MemoryCache<CacheEntry> cache;
public DefaultTokenIntrospectionUserInfoCache(OidcConfig oidcConfig, Vertx vertx) {
cache = new MemoryCache<CacheEntry>(vertx, oidcConfig.tokenCache().cleanUpTimerInterval(),
oidcConfig.tokenCache().timeToLive(), oidcConfig.tokenCache().maxSize());
}
@Override
public Uni<Void> addIntrospection(String token, TokenIntrospection introspection, OidcTenantConfig oidcTenantConfig,
OidcRequestContext<Void> requestContext) {
CacheEntry entry = cache.get(token);
if (entry != null) {
entry.introspection = introspection;
} else {
cache.add(token, new CacheEntry(introspection));
}
return CodeAuthenticationMechanism.VOID_UNI;
}
@Override
public Uni<TokenIntrospection> getIntrospection(String token, OidcTenantConfig oidcConfig,
OidcRequestContext<TokenIntrospection> requestContext) {
CacheEntry entry = cache.get(token);
if (entry == null || entry.introspection == null) {
return NULL_INTROSPECTION_UNI;
}
if (isTokenExpired(entry.introspection.getLong(OidcConstants.INTROSPECTION_TOKEN_EXP), oidcConfig)) {
LOG.debug("Introspected token has expired, removing it from the token introspection cache");
cache.remove(token);
return NULL_INTROSPECTION_UNI;
}
return Uni.createFrom().item(entry.introspection);
}
private static boolean isTokenExpired(Long exp, OidcTenantConfig oidcConfig) {
final long lifespanGrace = oidcConfig != null ? oidcConfig.token().lifespanGrace().orElse(0) : 0;
return exp != null
&& System.currentTimeMillis() / 1000 > (exp + lifespanGrace);
}
@Override
public Uni<Void> addUserInfo(String token, UserInfo userInfo, OidcTenantConfig oidcTenantConfig,
OidcRequestContext<Void> requestContext) {
CacheEntry entry = cache.get(token);
if (entry != null) {
entry.userInfo = userInfo;
} else {
cache.add(token, new CacheEntry(userInfo));
}
return CodeAuthenticationMechanism.VOID_UNI;
}
@Override
public Uni<UserInfo> getUserInfo(String token, OidcTenantConfig oidcConfig,
OidcRequestContext<UserInfo> requestContext) {
CacheEntry entry = cache.get(token);
return entry == null ? NULL_USERINFO_UNI : Uni.createFrom().item(entry.userInfo);
}
private static | DefaultTokenIntrospectionUserInfoCache |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java | {
"start": 80422,
"end": 106564
} | class ____ implements SimpleDiffable<ClusterState.Custom>, ClusterState.Custom {
static final String EXCEPTION_MESSAGE = "simulated";
@Override
public String getWriteableName() {
return "broken";
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
@Override
public void writeTo(StreamOutput out) {
throw new ElasticsearchException(EXCEPTION_MESSAGE);
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return Collections.emptyIterator();
}
}
public void testClusterRecoversAfterExceptionDuringSerialization() {
try (Cluster cluster = new Cluster(randomIntBetween(2, 5))) {
cluster.runRandomly();
cluster.stabilise();
final ClusterNode leader1 = cluster.getAnyLeader();
logger.info("--> submitting broken task to [{}]", leader1);
final AtomicBoolean failed = new AtomicBoolean();
leader1.submitUpdateTask("broken-task", cs -> ClusterState.builder(cs).putCustom("broken", new BrokenCustom()).build(), (e) -> {
assertThat(e.getCause(), instanceOf(ElasticsearchException.class));
assertThat(e.getCause().getMessage(), equalTo(BrokenCustom.EXCEPTION_MESSAGE));
failed.set(true);
});
// allow for forking 3 times:
// - once onto the master-service thread
// - once to fork the publication in FakeThreadPoolMasterService
// - once to fork the publication listener back onto the master-service thread
cluster.runFor(3 * DEFAULT_DELAY_VARIABILITY + 1, "processing broken task");
assertTrue(failed.get());
cluster.stabilise();
final ClusterNode leader2 = cluster.getAnyLeader();
long finalValue = randomLong();
logger.info("--> submitting value [{}] to [{}]", finalValue, leader2);
leader2.submitValue(finalValue);
cluster.stabilise(DEFAULT_CLUSTER_STATE_UPDATE_DELAY);
for (final ClusterNode clusterNode : cluster.clusterNodes) {
final String nodeId = clusterNode.getId();
final ClusterState appliedState = clusterNode.getLastAppliedClusterState();
assertThat(nodeId + " has the applied value", value(appliedState), is(finalValue));
}
}
}
@TestLogging(
reason = "testing ClusterFormationFailureHelper logging",
value = "org.elasticsearch.cluster.coordination.ClusterFormationFailureHelper:WARN"
)
public void testLogsWarningPeriodicallyIfClusterNotFormed() {
testLogsWarningPeriodicallyIfClusterNotFormed(
"master not discovered or elected yet, an election requires at least 2 nodes with ids from [",
nodeId -> "*have only discovered non-quorum *" + nodeId + "*discovery will continue*"
);
}
protected void testLogsWarningPeriodicallyIfClusterNotFormed(String expectedMessageStart, UnaryOperator<String> discoveryMessageFn) {
final long warningDelayMillis;
final Settings settings;
if (randomBoolean()) {
settings = Settings.EMPTY;
warningDelayMillis = ClusterFormationFailureHelper.DISCOVERY_CLUSTER_FORMATION_WARNING_TIMEOUT_SETTING.get(settings).millis();
} else {
warningDelayMillis = randomLongBetween(1, 100000);
settings = Settings.builder()
.put(ClusterFormationFailureHelper.DISCOVERY_CLUSTER_FORMATION_WARNING_TIMEOUT_SETTING.getKey(), warningDelayMillis + "ms")
.build();
}
logger.info("--> emitting warnings every [{}ms]", warningDelayMillis);
try (Cluster cluster = new Cluster(3, true, settings)) {
cluster.runRandomly();
cluster.stabilise();
logger.info("--> disconnecting all nodes");
for (final ClusterNode clusterNode : cluster.clusterNodes) {
clusterNode.disconnect();
}
cluster.runFor(
defaultMillis(LEADER_CHECK_TIMEOUT_SETTING) // to wait for any in-flight check to time out
+ defaultMillis(LEADER_CHECK_INTERVAL_SETTING) // to wait for the next check to be sent
+ 2 * DEFAULT_DELAY_VARIABILITY, // to send the failing check and receive the disconnection response
"waiting for leader failure"
);
for (final ClusterNode clusterNode : cluster.clusterNodes) {
assertThat(clusterNode.getId() + " is CANDIDATE", clusterNode.coordinator.getMode(), is(CANDIDATE));
}
for (int i = scaledRandomIntBetween(1, 10); i >= 0; i--) {
try (var mockLog = MockLog.capture(ClusterFormationFailureHelper.class)) {
mockLog.addExpectation(new MockLog.LoggingExpectation() {
final Set<DiscoveryNode> nodesLogged = new HashSet<>();
@Override
public void match(LogEvent event) {
final String message = event.getMessage().getFormattedMessage();
assertThat(message, startsWith(expectedMessageStart));
final List<ClusterNode> matchingNodes = cluster.clusterNodes.stream()
.filter(
n -> event.getContextData()
.<String>getValue(DeterministicTaskQueue.NODE_ID_LOG_CONTEXT_KEY)
.equals(DeterministicTaskQueue.getNodeIdForLogContext(n.getLocalNode()))
)
.toList();
assertThat(matchingNodes, hasSize(1));
assertTrue(message, Regex.simpleMatch(discoveryMessageFn.apply(matchingNodes.getFirst().toString()), message));
nodesLogged.add(matchingNodes.getFirst().getLocalNode());
}
@Override
public void assertMatched() {
assertThat(
nodesLogged + " vs " + cluster.clusterNodes,
nodesLogged,
equalTo(cluster.clusterNodes.stream().map(ClusterNode::getLocalNode).collect(Collectors.toSet()))
);
}
});
cluster.runFor(warningDelayMillis + DEFAULT_DELAY_VARIABILITY, "waiting for warning to be emitted");
mockLog.assertAllExpectationsMatched();
}
}
for (ClusterNode clusterNode : cluster.clusterNodes) {
clusterNode.heal();
}
}
}
@TestLogging(
reason = "testing warning of a single-node cluster having discovery seed hosts",
value = "org.elasticsearch.cluster.coordination.Coordinator:WARN"
)
public void testLogsWarningPeriodicallyIfSingleNodeClusterHasSeedHosts() {
final long warningDelayMillis;
final Settings settings;
final String fakeSeedHost = buildNewFakeTransportAddress().toString();
if (randomBoolean()) {
settings = Settings.builder().putList(DISCOVERY_SEED_HOSTS_SETTING.getKey(), fakeSeedHost).build();
warningDelayMillis = Coordinator.SINGLE_NODE_CLUSTER_SEED_HOSTS_CHECK_INTERVAL_SETTING.get(settings).millis();
} else {
warningDelayMillis = randomLongBetween(1, 100000);
settings = Settings.builder()
.put(ClusterFormationFailureHelper.DISCOVERY_CLUSTER_FORMATION_WARNING_TIMEOUT_SETTING.getKey(), warningDelayMillis + "ms")
.putList(DISCOVERY_SEED_HOSTS_SETTING.getKey(), fakeSeedHost)
.build();
}
logger.info("--> emitting warnings every [{}ms]", warningDelayMillis);
try (Cluster cluster = new Cluster(1, true, settings)) {
cluster.runRandomly();
cluster.stabilise();
for (int i = scaledRandomIntBetween(1, 10); i >= 0; i--) {
try (var mockLog = MockLog.capture(Coordinator.class)) {
mockLog.addExpectation(new MockLog.LoggingExpectation() {
String loggedClusterUuid;
@Override
public void match(LogEvent event) {
final String message = event.getMessage().getFormattedMessage();
assertThat(
message,
allOf(
startsWith("This node is a fully-formed single-node cluster with cluster UUID"),
containsString(ReferenceDocs.FORMING_SINGLE_NODE_CLUSTERS.toString())
)
);
loggedClusterUuid = (String) event.getMessage().getParameters()[0];
}
@Override
public void assertMatched() {
final String clusterUuid = cluster.getAnyNode().getLastAppliedClusterState().metadata().clusterUUID();
assertThat(loggedClusterUuid + " vs " + clusterUuid, clusterUuid, equalTo(clusterUuid));
}
});
cluster.runFor(warningDelayMillis + DEFAULT_DELAY_VARIABILITY, "waiting for warning to be emitted");
mockLog.assertAllExpectationsMatched();
}
}
}
}
public void testInvariantWhenTwoNodeClusterBecomesSingleNodeCluster() {
try (Cluster cluster = new Cluster(2)) {
cluster.stabilise();
assertTrue(cluster.getAnyNodeExcept(cluster.getAnyLeader()).disconnect()); // Remove non-leader node
cluster.stabilise();
}
}
@TestLogging(
reason = "testing LagDetector and CoordinatorPublication logging",
value = "org.elasticsearch.cluster.coordination.LagDetector:DEBUG,"
+ "org.elasticsearch.cluster.coordination.Coordinator.CoordinatorPublication:INFO"
)
public void testLogsMessagesIfPublicationDelayed() {
try (Cluster cluster = new Cluster(between(3, 5))) {
cluster.runRandomly();
cluster.stabilise();
final ClusterNode brokenNode = cluster.getAnyNodeExcept(cluster.getAnyLeader());
try (var mockLog = MockLog.capture(Coordinator.CoordinatorPublication.class, LagDetector.class)) {
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"publication info message",
Coordinator.CoordinatorPublication.class.getCanonicalName(),
Level.INFO,
"after [*] publication of cluster state version [*] is still waiting for "
+ brokenNode.getLocalNode()
+ " ["
+ Publication.PublicationTargetState.SENT_PUBLISH_REQUEST
+ ']'
)
);
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"publication warning",
Coordinator.CoordinatorPublication.class.getCanonicalName(),
Level.WARN,
"after [*] publication of cluster state version [*] is still waiting for "
+ brokenNode.getLocalNode()
+ " ["
+ Publication.PublicationTargetState.SENT_PUBLISH_REQUEST
+ ']'
)
);
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"lag warning",
LagDetector.class.getCanonicalName(),
Level.WARN,
"node ["
+ brokenNode
+ "] is lagging at cluster state version [*], "
+ "although publication of cluster state version [*] completed [*] ago"
)
);
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"hot threads from lagging node",
LagDetector.class.getCanonicalName(),
Level.DEBUG,
"hot threads from node ["
+ brokenNode.getLocalNode().descriptionWithoutAttributes()
+ "] lagging at version [*] despite commit of cluster state version [*]*"
)
);
// drop the publication messages to one node, but then restore connectivity so it remains in the cluster and does not fail
// health checks
brokenNode.blackhole();
cluster.deterministicTaskQueue.scheduleAt(
cluster.deterministicTaskQueue.getCurrentTimeMillis() + DEFAULT_CLUSTER_STATE_UPDATE_DELAY,
new Runnable() {
@Override
public void run() {
brokenNode.heal();
}
@Override
public String toString() {
return "healing " + brokenNode;
}
}
);
cluster.getAnyLeader().submitValue(randomLong());
cluster.runFor(
defaultMillis(PUBLISH_TIMEOUT_SETTING) + 2 * DEFAULT_DELAY_VARIABILITY + defaultMillis(
LagDetector.CLUSTER_FOLLOWER_LAG_TIMEOUT_SETTING
) + DEFAULT_DELAY_VARIABILITY + 2 * DEFAULT_DELAY_VARIABILITY,
"waiting for messages to be emitted"
);
mockLog.assertAllExpectationsMatched();
}
}
}
public void testDoesNotPerformElectionWhenRestartingFollower() {
try (Cluster cluster = new Cluster(randomIntBetween(2, 5), false, Settings.EMPTY)) {
cluster.runRandomly();
cluster.stabilise();
final ClusterNode leader = cluster.getAnyLeader();
final long expectedTerm = leader.coordinator.getCurrentTerm();
if (cluster.clusterNodes.stream().filter(n -> n.getLocalNode().isMasterNode()).count() == 2) {
// in the 2-node case, auto-shrinking the voting configuration is required to reduce the voting configuration down to just
// the leader, otherwise restarting the other master-eligible node triggers an election
leader.submitSetAutoShrinkVotingConfiguration(true);
cluster.stabilise(2 * DEFAULT_CLUSTER_STATE_UPDATE_DELAY); // 1st delay for the setting update, 2nd for the reconfiguration
}
for (final ClusterNode clusterNode : cluster.getAllNodesExcept(leader)) {
logger.info("--> restarting {}", clusterNode);
clusterNode.close();
cluster.clusterNodes.replaceAll(
cn -> cn == clusterNode ? cn.restartedNode(Function.identity(), Function.identity(), Settings.EMPTY) : cn
);
cluster.stabilise();
assertThat("term should not change", cluster.getAnyNode().coordinator.getCurrentTerm(), is(expectedTerm));
}
}
}
public void testImproveConfigurationPerformsVotingConfigExclusionStateCheck() {
try (Cluster cluster = new Cluster(1)) {
cluster.runRandomly();
cluster.stabilise();
final Coordinator coordinator = cluster.getAnyLeader().coordinator;
final ClusterState currentState = coordinator.getLastAcceptedState();
Set<CoordinationMetadata.VotingConfigExclusion> newVotingConfigExclusion1 = Set.of(
new CoordinationMetadata.VotingConfigExclusion(
"resolvableNodeId",
CoordinationMetadata.VotingConfigExclusion.MISSING_VALUE_MARKER
)
);
ClusterState newState1 = buildNewClusterStateWithVotingConfigExclusion(currentState, newVotingConfigExclusion1);
assertFalse(Coordinator.validVotingConfigExclusionState(newState1));
Set<CoordinationMetadata.VotingConfigExclusion> newVotingConfigExclusion2 = Set.of(
new CoordinationMetadata.VotingConfigExclusion(
CoordinationMetadata.VotingConfigExclusion.MISSING_VALUE_MARKER,
"resolvableNodeName"
)
);
ClusterState newState2 = buildNewClusterStateWithVotingConfigExclusion(currentState, newVotingConfigExclusion2);
assertFalse(Coordinator.validVotingConfigExclusionState(newState2));
}
}
public void testPeerFinderListener() throws Exception {
try (Cluster cluster = new Cluster(3, true, Settings.EMPTY)) {
cluster.runRandomly();
cluster.stabilise();
ClusterNode leader = cluster.getAnyLeader();
ClusterNode nodeWithListener = cluster.getAnyNodeExcept(leader);
AtomicBoolean listenerCalled = new AtomicBoolean(false);
nodeWithListener.coordinator.addPeerFinderListener(() -> listenerCalled.set(true));
assertFalse(listenerCalled.get());
leader.disconnect();
cluster.runFor(DEFAULT_STABILISATION_TIME, "Letting disconnect take effect");
cluster.stabilise();
assertTrue(cluster.clusterNodes.contains(nodeWithListener));
assertBusy(() -> assertTrue(listenerCalled.get()));
}
}
public void testElectionSchedulingAfterDiscoveryOutage() {
try (
Cluster cluster = new Cluster(
3,
true,
Settings.builder()
.put(ELECTION_MAX_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(10))
.put(ELECTION_BACK_OFF_TIME_SETTING.getKey(), TimeValue.timeValueMinutes(1))
.build()
)
) {
cluster.runRandomly();
// must allow extra time for stabilisation due to enthusiastic backoff settings
cluster.stabilise(DEFAULT_STABILISATION_TIME + TimeValue.timeValueMinutes(20).millis());
final long followerCheckMillis = defaultMillis(FOLLOWER_CHECK_INTERVAL_SETTING) + 2 * DEFAULT_DELAY_VARIABILITY;
final long leaderCheckMillis = defaultMillis(LEADER_CHECK_INTERVAL_SETTING) + 2 * DEFAULT_DELAY_VARIABILITY;
final long discoveryMillis = defaultMillis(DISCOVERY_FIND_PEERS_INTERVAL_SETTING) + 2 * DEFAULT_DELAY_VARIABILITY;
final long minOutageMillis =
// Allow long enough for the leader to stand down
followerCheckMillis * defaultInt(FOLLOWER_CHECK_RETRY_COUNT_SETTING) + DEFAULT_CLUSTER_STATE_UPDATE_DELAY
// and then for the followers to detect the leader failure
+ leaderCheckMillis * defaultInt(LEADER_CHECK_RETRY_COUNT_SETTING)
// and then long enough for discovery to stop working
+ discoveryMillis;
final var leader = cluster.getAnyLeader();
// This test is checking for a potential bug where an active election scheduler would remain active, repeatedly failing and
// backing off, while not even being able to discover a quorum of nodes. In that situation when the discovery problem is
// resolved it can take far too long for the next election to occur because of the backoff.
if (randomBoolean()) {
// STEP 1 (optional): get the cluster into a state where all the election schedulers are active:
logger.info("--> blocking key actions until cluster falls apart");
leader.addActionBlock(FollowersChecker.FOLLOWER_CHECK_ACTION_NAME);
for (ClusterNode clusterNode : cluster.clusterNodes) {
clusterNode.addActionBlock(StatefulPreVoteCollector.REQUEST_PRE_VOTE_ACTION_NAME);
}
cluster.runFor(
randomLongBetween(minOutageMillis, TimeValue.timeValueMinutes(2).millis()),
"simulate extended election failure"
);
assertTrue(
cluster.clusterNodes.stream()
.map(n -> n.coordinator)
.allMatch(c -> c.getMode() == CANDIDATE && c.electionSchedulerActive())
);
}
// STEP 2: now block discovery:
logger.info("--> blocking discovery");
for (ClusterNode clusterNode : cluster.clusterNodes) {
clusterNode.clearActionBlocks();
clusterNode.disconnect();
for (ClusterNode otherNode : cluster.clusterNodes) {
clusterNode.transportService.disconnectFromNode(otherNode.getLocalNode());
}
}
cluster.runFor(
randomLongBetween(minOutageMillis, TimeValue.timeValueHours(2).millis()),
"simulate extended discovery problems"
);
assertTrue(
cluster.clusterNodes.stream()
.map(n -> n.coordinator)
.allMatch(
c -> c.getMode() == CANDIDATE
&& c.electionSchedulerActive() == false
&& c.getFoundPeers().iterator().hasNext() == false
)
);
// STEP 3: now heal the discovery problem, fix elections (on one node only to avoid election clashes), and see that the cluster
// stabilises immediately:
logger.info("--> healing discovery and permitting elections on [{}]", leader);
for (ClusterNode clusterNode : cluster.clusterNodes) {
clusterNode.heal();
if (clusterNode != leader) {
clusterNode.addActionBlock(StatefulPreVoteCollector.REQUEST_PRE_VOTE_ACTION_NAME);
}
}
cluster.stabilise(
// Pinging all peers once should be enough to discover the other nodes
defaultMillis(DISCOVERY_FIND_PEERS_INTERVAL_SETTING)
// Then wait for an election to be scheduled
+ defaultMillis(ELECTION_INITIAL_TIMEOUT_SETTING)
// Allow two round-trips for pre-voting and voting
+ 4 * DEFAULT_DELAY_VARIABILITY
// Then a commit of the new leader's first cluster state
+ DEFAULT_CLUSTER_STATE_UPDATE_DELAY
// Then the remaining node may experience a disconnect (see comment in PeerFinder#closePeers) for which it is
// removed from the cluster, and its fault detection must also detect its removal
+ Math.max(
DEFAULT_CLUSTER_STATE_UPDATE_DELAY,
defaultMillis(LEADER_CHECK_TIMEOUT_SETTING) * LEADER_CHECK_RETRY_COUNT_SETTING.get(Settings.EMPTY)
)
// then it does another round of discovery
+ defaultMillis(DISCOVERY_FIND_PEERS_INTERVAL_SETTING)
// and finally it joins the master
+ DEFAULT_CLUSTER_STATE_UPDATE_DELAY
);
cluster.clusterNodes.forEach(ClusterNode::clearActionBlocks);
}
}
public void testElectionWithSlowPublication() {
final var delayedActions = new HashSet<>();
try (Cluster cluster = new Cluster(7, true, Settings.EMPTY) {
@Override
protected long transportDelayMillis(String actionName) {
return delayedActions.contains(actionName) ? between(1000, 2000) : 0;
}
}) {
cluster.runRandomly();
cluster.stabilise();
final var leader = cluster.getAnyLeader();
logger.info("--> marking leader [{}] as blackholed and adding action delays", leader);
delayedActions.add(PublicationTransportHandler.PUBLISH_STATE_ACTION_NAME);
delayedActions.add(FollowersChecker.FOLLOWER_CHECK_ACTION_NAME);
leader.blackhole();
cluster.stabilise();
delayedActions.clear();
}
}
private ClusterState buildNewClusterStateWithVotingConfigExclusion(
ClusterState currentState,
Set<CoordinationMetadata.VotingConfigExclusion> newVotingConfigExclusion
) {
DiscoveryNodes newNodes = DiscoveryNodes.builder(currentState.nodes())
.add(
DiscoveryNodeUtils.builder("resolvableNodeId")
.name("resolvableNodeName")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build()
)
.build();
CoordinationMetadata.Builder coordMetadataBuilder = CoordinationMetadata.builder(currentState.coordinationMetadata());
newVotingConfigExclusion.forEach(coordMetadataBuilder::addVotingConfigExclusion);
Metadata newMetadata = Metadata.builder(currentState.metadata()).coordinationMetadata(coordMetadataBuilder.build()).build();
return ClusterState.builder(currentState).nodes(newNodes).metadata(newMetadata).build();
}
}
| BrokenCustom |
java | spring-projects__spring-framework | spring-websocket/src/main/java/org/springframework/web/socket/adapter/standard/ConvertingEncoderDecoderSupport.java | {
"start": 8412,
"end": 8810
} | class ____<T> extends ConvertingEncoderDecoderSupport<T, ByteBuffer>
implements Encoder.Binary<T> {
}
/**
* A binary {@link jakarta.websocket.Encoder.Binary jakarta.websocket.Encoder} that delegates
* to Spring's conversion service. See {@link ConvertingEncoderDecoderSupport} for details.
* @param <T> the type that this Decoder can convert from
*/
public abstract static | BinaryEncoder |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CheckReturnValueTest.java | {
"start": 37816,
"end": 38208
} | class ____ {
void foo(Lib l) {
l.makeBarOrThrow();
}
}
""")
// The checker doesn't suggest CIRV, so it applies a different fix instead.
.addOutputLines(
"Test.java",
"""
import com.google.errorprone.annotations.CheckReturnValue;
@CheckReturnValue
| Test |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/PojoUtilsForNonPublicStaticTest.java | {
"start": 1265,
"end": 1576
} | class ____ {
private String name;
public NonPublicStaticData(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}
| NonPublicStaticData |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/async/InternalAsyncUtil.java | {
"start": 1033,
"end": 1119
} | class ____ some async logging-related functionality.
* <p>
* Consider this | providing |
java | quarkusio__quarkus | extensions/elasticsearch-rest-client/deployment/src/test/java/io/quarkus/elasticsearch/restclient/lowlevel/runtime/ElasticsearchClientConfigTest.java | {
"start": 687,
"end": 1477
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class).addClasses(TestConfigurator.class, RestClientBuilderHelper.class)
.addAsResource(new StringAsset("quarkus.elasticsearch.hosts=elasticsearch:9200"),
"application.properties"));
@Inject
ElasticsearchConfig config;
@Test
public void testRestClientBuilderHelperWithElasticsearchClientConfig() {
RestClientBuilderHelper.createRestClientBuilder(config).build();
assertTrue(TestConfigurator.invoked);
}
@ElasticsearchClientConfig
@ApplicationScoped
public static | ElasticsearchClientConfigTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/creators/PolymorphicPropsCreatorsTest.java | {
"start": 1545,
"end": 2140
} | class ____
{
protected final String opt;
protected AbstractRoot(String opt) {
this.opt = opt;
}
@JsonCreator
public static final AbstractRoot make(@JsonProperty("which") int which,
@JsonProperty("opt") String opt) {
if (1 == which) {
return new One(opt);
}
throw new RuntimeException("cannot instantiate " + which);
}
abstract public int getWhich();
public final String getOpt() {
return opt;
}
}
static final | AbstractRoot |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/NotifyCheckpointAbortedITCase.java | {
"start": 17259,
"end": 17830
} | class ____
implements StateBackendFactory<DeclineSinkFailingStateBackend> {
@Override
public DeclineSinkFailingStateBackend createFromConfig(
ReadableConfig config, ClassLoader classLoader)
throws IllegalConfigurationException, IOException {
return new DeclineSinkFailingStateBackend(checkpointPath);
}
}
/**
* The state backend to create {@link DeclineSinkFailingOperatorStateBackend} at {@link
* DeclineSink}.
*/
private static | DeclineSinkFailingStateBackendFactory |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/AsyncSnapshotCallable.java | {
"start": 2098,
"end": 2365
} | class ____ {@link
* #cleanupProvidedResources()}. The implementation of this method should release all provided
* resources that have been passed into the snapshot from the synchronous part of the snapshot.
*
* @param <T> type of the result.
*/
public abstract | calls |
java | quarkusio__quarkus | integration-tests/picocli-native/src/main/java/io/quarkus/it/picocli/BaseTestCommand.java | {
"start": 113,
"end": 316
} | class ____ {
@CommandLine.Option(names = { "-f", "--files" }, description = "Some files.")
private List<File> files;
public List<File> getFiles() {
return files;
}
}
| BaseTestCommand |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableMergeDelayErrorTest.java | {
"start": 17480,
"end": 19806
} | class ____ extends DefaultSubscriber<String> {
volatile Throwable e;
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
this.e = e;
}
@Override
public void onNext(String args) {
}
}
@Test
public void errorInParentFlowable() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
Flowable.mergeDelayError(
Flowable.just(Flowable.just(1), Flowable.just(2))
.startWithItem(Flowable.<Integer> error(new RuntimeException()))
).subscribe(ts);
ts.awaitDone(5, TimeUnit.SECONDS);
ts.assertTerminated();
ts.assertValues(1, 2);
assertEquals(1, ts.errors().size());
}
@Test
public void errorInParentFlowableDelayed() throws Exception {
for (int i = 0; i < 50; i++) {
final TestASynchronous1sDelayedFlowable f1 = new TestASynchronous1sDelayedFlowable();
final TestASynchronous1sDelayedFlowable f2 = new TestASynchronous1sDelayedFlowable();
Flowable<Flowable<String>> parentFlowable = Flowable.unsafeCreate(new Publisher<Flowable<String>>() {
@Override
public void subscribe(Subscriber<? super Flowable<String>> op) {
op.onSubscribe(new BooleanSubscription());
op.onNext(Flowable.unsafeCreate(f1));
op.onNext(Flowable.unsafeCreate(f2));
op.onError(new NullPointerException("throwing exception in parent"));
}
});
stringSubscriber = TestHelper.mockSubscriber();
TestSubscriberEx<String> ts = new TestSubscriberEx<>(stringSubscriber);
Flowable<String> m = Flowable.mergeDelayError(parentFlowable);
m.subscribe(ts);
System.out.println("testErrorInParentFlowableDelayed | " + i);
ts.awaitDone(2000, TimeUnit.MILLISECONDS);
ts.assertTerminated();
verify(stringSubscriber, times(2)).onNext("hello");
verify(stringSubscriber, times(1)).onError(any(NullPointerException.class));
verify(stringSubscriber, never()).onComplete();
}
}
private static | CaptureObserver |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapRunnable.java | {
"start": 1015,
"end": 1289
} | interface ____ {@link Mapper}s.
*
* <p>Custom implementations of <code>MapRunnable</code> can exert greater
* control on map processing e.g. multi-threaded, asynchronous mappers etc.</p>
*
* @see Mapper
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public | for |
java | google__guava | android/guava-testlib/src/com/google/common/testing/NullPointerTester.java | {
"start": 3256,
"end": 7683
} | class ____ {
private final ClassToInstanceMap<Object> defaults = MutableClassToInstanceMap.create();
private final List<Member> ignoredMembers = new ArrayList<>();
private ExceptionTypePolicy policy = ExceptionTypePolicy.NPE_OR_UOE;
/*
* Requiring desugaring for guava-*testlib* is likely safe, at least for the reflection-based
* NullPointerTester. But if you are a user who is reading this because this change caused you
* trouble, please let us know: https://github.com/google/guava/issues/new
*/
@IgnoreJRERequirement
public NullPointerTester() {
try {
/*
* Converter.apply has a non-nullable parameter type but doesn't throw for null arguments. For
* more information, see the comments in that class.
*
* We already know that that's how it behaves, and subclasses of Converter can't change that
* behavior. So there's no sense in making all subclass authors exclude the method from any
* NullPointerTester tests that they have.
*/
ignoredMembers.add(Converter.class.getMethod("apply", Object.class));
} catch (NoSuchMethodException shouldBeImpossible) {
// Fine: If it doesn't exist, then there's no chance that we're going to be asked to test it.
}
/*
* These methods "should" call checkNotNull. However, I'm wary of accidentally introducing
* anything that might slow down execution on such a hot path. Given that the methods are only
* package-private, I feel OK with just not testing them for NPE.
*
* Note that testing casValue is particularly dangerous because it uses Unsafe under some
* versions of Java, and apparently Unsafe can cause SIGSEGV instead of NPE—almost as if it's
* not safe.
*/
concat(
stream(AbstractFuture.class.getDeclaredMethods()),
stream(requireNonNull(AbstractFuture.class.getSuperclass()).getDeclaredMethods()))
.filter(
m ->
m.getName().equals("getDoneValue")
|| m.getName().equals("casValue")
|| m.getName().equals("casListeners")
|| m.getName().equals("gasListeners"))
.forEach(ignoredMembers::add);
}
/**
* Sets a default value that can be used for any parameter of type {@code type}. Returns this
* object.
*/
@CanIgnoreReturnValue
public <T> NullPointerTester setDefault(Class<T> type, T value) {
defaults.putInstance(type, checkNotNull(value));
return this;
}
/**
* Ignore {@code method} in the tests that follow. Returns this object.
*
* @since 13.0
*/
@CanIgnoreReturnValue
public NullPointerTester ignore(Method method) {
ignoredMembers.add(checkNotNull(method));
return this;
}
/**
* Ignore {@code constructor} in the tests that follow. Returns this object.
*
* @since 22.0
*/
@CanIgnoreReturnValue
public NullPointerTester ignore(Constructor<?> constructor) {
ignoredMembers.add(checkNotNull(constructor));
return this;
}
/**
* Runs {@link #testConstructor} on every constructor in class {@code c} that has at least {@code
* minimalVisibility}.
*/
public void testConstructors(Class<?> c, Visibility minimalVisibility) {
for (Constructor<?> constructor : c.getDeclaredConstructors()) {
if (minimalVisibility.isVisible(constructor) && !isIgnored(constructor)) {
testConstructor(constructor);
}
}
}
/** Runs {@link #testConstructor} on every public constructor in class {@code c}. */
public void testAllPublicConstructors(Class<?> c) {
testConstructors(c, Visibility.PUBLIC);
}
/**
* Runs {@link #testMethod} on every static method of class {@code c} that has at least {@code
* minimalVisibility}, including those "inherited" from superclasses of the same package.
*/
public void testStaticMethods(Class<?> c, Visibility minimalVisibility) {
for (Method method : minimalVisibility.getStaticMethods(c)) {
if (!isIgnored(method)) {
testMethod(null, method);
}
}
}
/**
* Runs {@link #testMethod} on every public static method of class {@code c}, including those
* "inherited" from superclasses of the same package.
*/
public void testAllPublicStaticMethods(Class<?> c) {
testStaticMethods(c, Visibility.PUBLIC);
}
/**
* Runs {@link #testMethod} on every instance method of the | NullPointerTester |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/cfg/Environment.java | {
"start": 3018,
"end": 4481
} | class ____</td>
* </tr>
* <tr>
* <td>{@value #MAX_FETCH_DEPTH}</td>
* <td>maximum depth of outer join fetching</td>
* </tr>
* <tr>
* <td>{@value #STATEMENT_BATCH_SIZE}</td>
* <td>enable use of JDBC2 batch API for drivers which support it</td>
* </tr>
* <tr>
* <td>{@value #STATEMENT_FETCH_SIZE}</td>
* <td>set the JDBC fetch size</td>
* </tr>
* <tr>
* <td>{@value #USE_GET_GENERATED_KEYS}</td>
* <td>enable use of JDBC3 {@link java.sql.PreparedStatement#getGeneratedKeys()}
* to retrieve natively generated keys after insert. Requires JDBC3+ driver and
* JRE1.4+</td>
* </tr>
* <tr>
* <td>{@value #HBM2DDL_AUTO}</td>
* <td>enable auto DDL export</td>
* </tr>
* <tr>
* <td>{@value #DEFAULT_SCHEMA}</td>
* <td>use given schema name for unqualified tables (always optional)</td>
* </tr>
* <tr>
* <td>{@value #DEFAULT_CATALOG}</td>
* <td>use given catalog name for unqualified tables (always optional)</td>
* </tr>
* <tr>
* <td>{@value #JTA_PLATFORM}</td>
* <td>name of {@link org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform}
* implementation</td>
* </tr>
* </table>
*
* @see org.hibernate.SessionFactory
*
* @apiNote This is really considered an internal contract, but leaving in place in this
* package as many applications use it historically. However, consider migrating to use
* {@link AvailableSettings} instead.
*
* @author Gavin King
*/
@Internal
public final | name |
java | redisson__redisson | redisson-quarkus/redisson-quarkus-30/cdi/integration-tests/src/main/java/org/redisson/quarkus/client/it/QuarkusRedissonClientResource.java | {
"start": 1762,
"end": 3481
} | class ____ {
@Inject
RedissonClient redisson;
@GET
@Path("/map")
public String map() {
RMap<String, Integer> m = redisson.getMap("test");
m.put("1", 2);
return m.get("1").toString();
}
@GET
@Path("/remoteService")
public String remoteService() {
RRemoteService t = redisson.getRemoteService("test");
t.register(RemService.class, new RemoteServiceImpl());
RemService rs = t.get(RemService.class);
return rs.executeMe();
}
@GET
@Path("/pingAll")
public String pingAll() {
redisson.getRedisNodes(RedisNodes.SINGLE).pingAll();
return "OK";
}
@GET
@Path("/executeTask")
public String executeTask() throws ExecutionException, InterruptedException {
RScheduledExecutorService t = redisson.getExecutorService("test");
t.registerWorkers(WorkerOptions.defaults());
RExecutorFuture<String> r = t.submit(new Task());
return r.get();
}
@GET
@Path("/bucket")
public Uni<String> getBucket(){
RBucketReactive<String> bucket = redisson.reactive().getBucket("test-bucket", new StringCodec());
return Uni.createFrom().future(bucket.set("world").toFuture())
.flatMap( unused -> Uni.createFrom().future(bucket.get().toFuture()));
}
@GET
@Path("/delBucket")
public Uni<Boolean> deleteBucket(){
RBucketReactive<String> bucket = redisson.reactive().getBucket("test-bucket", new StringCodec());
return Uni.createFrom().future(bucket.set("world").toFuture())
.flatMap( unused -> Uni.createFrom().future(bucket.delete().toFuture()));
}
}
| QuarkusRedissonClientResource |
java | netty__netty | codec-classes-quic/src/main/java/io/netty/handler/codec/quic/InsecureQuicTokenHandler.java | {
"start": 1034,
"end": 2967
} | class ____ implements QuicTokenHandler {
private static final String SERVER_NAME = "netty";
private static final byte[] SERVER_NAME_BYTES = SERVER_NAME.getBytes(CharsetUtil.US_ASCII);
private static final ByteBuf SERVER_NAME_BUFFER = Unpooled.unreleasableBuffer(
Unpooled.wrappedBuffer(SERVER_NAME_BYTES)).asReadOnly();
// Just package-private for unit tests
static final int MAX_TOKEN_LEN = Quic.MAX_CONN_ID_LEN +
NetUtil.LOCALHOST6.getAddress().length + SERVER_NAME_BYTES.length;
private InsecureQuicTokenHandler() {
Quic.ensureAvailability();
}
public static final InsecureQuicTokenHandler INSTANCE = new InsecureQuicTokenHandler();
@Override
public boolean writeToken(ByteBuf out, ByteBuf dcid, InetSocketAddress address) {
byte[] addr = address.getAddress().getAddress();
out.writeBytes(SERVER_NAME_BYTES)
.writeBytes(addr)
.writeBytes(dcid, dcid.readerIndex(), dcid.readableBytes());
return true;
}
@Override
public int validateToken(ByteBuf token, InetSocketAddress address) {
final byte[] addr = address.getAddress().getAddress();
int minLength = SERVER_NAME_BYTES.length + address.getAddress().getAddress().length;
if (token.readableBytes() <= SERVER_NAME_BYTES.length + addr.length) {
return -1;
}
if (!SERVER_NAME_BUFFER.equals(token.slice(0, SERVER_NAME_BYTES.length))) {
return -1;
}
ByteBuf addressBuffer = Unpooled.wrappedBuffer(addr);
try {
if (!addressBuffer.equals(token.slice(SERVER_NAME_BYTES.length, addr.length))) {
return -1;
}
} finally {
addressBuffer.release();
}
return minLength;
}
@Override
public int maxTokenLength() {
return MAX_TOKEN_LEN;
}
}
| InsecureQuicTokenHandler |
java | apache__camel | tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/AbstractGenerateConfigurerMojo.java | {
"start": 2161,
"end": 2213
} | class ____ configurer generator.
*/
public abstract | for |
java | google__guice | core/test/com/google/inject/TypeListenerTest.java | {
"start": 26490,
"end": 26845
} | class ____ {
int guiceInjected = 0;
int userInjected = 0;
int listenersNotified = 0;
@Inject
void guiceInjected() {
guiceInjected++;
}
void assertAllCounts(int expected) {
assertEquals(expected, guiceInjected);
assertEquals(expected, userInjected);
assertEquals(expected, listenersNotified);
}
}
}
| D |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/tasks/TwoInputStreamTask.java | {
"start": 1895,
"end": 5174
} | class ____<IN1, IN2, OUT> extends AbstractTwoInputStreamTask<IN1, IN2, OUT> {
@Nullable private CheckpointBarrierHandler checkpointBarrierHandler;
public TwoInputStreamTask(Environment env) throws Exception {
super(env);
}
@Override
protected Optional<CheckpointBarrierHandler> getCheckpointBarrierHandler() {
return Optional.ofNullable(checkpointBarrierHandler);
}
@SuppressWarnings("unchecked")
@Override
protected void createInputProcessor(
List<IndexedInputGate> inputGates1,
List<IndexedInputGate> inputGates2,
Function<Integer, StreamPartitioner<?>> gatePartitioners) {
// create an input instance for each input
checkpointBarrierHandler =
InputProcessorUtil.createCheckpointBarrierHandler(
this,
getJobConfiguration(),
configuration,
getCheckpointCoordinator(),
getTaskNameWithSubtaskAndId(),
new List[] {inputGates1, inputGates2},
Collections.emptyList(),
mainMailboxExecutor,
systemTimerService);
CheckpointedInputGate[] checkpointedInputGates =
InputProcessorUtil.createCheckpointedMultipleInputGate(
mainMailboxExecutor,
new List[] {inputGates1, inputGates2},
getEnvironment().getMetricGroup().getIOMetricGroup(),
checkpointBarrierHandler,
configuration);
checkState(checkpointedInputGates.length == 2);
inputProcessor =
StreamTwoInputProcessorFactory.create(
this,
checkpointedInputGates,
getEnvironment().getIOManager(),
getEnvironment().getMemoryManager(),
getEnvironment().getMetricGroup().getIOMetricGroup(),
mainOperator,
input1WatermarkGauge,
input2WatermarkGauge,
operatorChain,
getConfiguration(),
getEnvironment().getTaskManagerInfo().getConfiguration(),
getJobConfiguration(),
getExecutionConfig(),
getUserCodeClassLoader(),
setupNumRecordsInCounter(mainOperator),
getEnvironment().getTaskStateManager().getInputRescalingDescriptor(),
gatePartitioners,
getEnvironment().getTaskInfo(),
getCanEmitBatchOfRecords());
}
// This is needed for StreamMultipleInputProcessor#processInput to preserve the existing
// behavior of choosing an input every time a record is emitted. This behavior is good for
// fairness between input consumption. But it can reduce throughput due to added control
// flow cost on the per-record code path.
@Override
public CanEmitBatchOfRecordsChecker getCanEmitBatchOfRecords() {
return () -> false;
}
}
| TwoInputStreamTask |
java | quarkusio__quarkus | extensions/reactive-routes/deployment/src/main/java/io/quarkus/vertx/web/deployment/AnnotatedRouteFilterBuildItem.java | {
"start": 226,
"end": 833
} | class ____ extends MultiBuildItem {
private final BeanInfo bean;
private final AnnotationInstance routeFilter;
private final MethodInfo method;
public AnnotatedRouteFilterBuildItem(BeanInfo bean, MethodInfo method, AnnotationInstance routeFilter) {
this.bean = bean;
this.method = method;
this.routeFilter = routeFilter;
}
public BeanInfo getBean() {
return bean;
}
public MethodInfo getMethod() {
return method;
}
public AnnotationInstance getRouteFilter() {
return routeFilter;
}
}
| AnnotatedRouteFilterBuildItem |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/reflect/ConstructorUtils.java | {
"start": 7610,
"end": 7918
} | class ____ the right constructor from the types of the arguments.
*
* <p>
* This locates and calls a constructor. The constructor signature must match the argument types by assignment compatibility.
* </p>
*
* @param <T> the type to be constructed.
* @param cls the | inferring |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/ql/NamedNativeQueryWithGenericsTest.java | {
"start": 1286,
"end": 2063
} | class ____ {
@Test
public void testNamedNativeQuery(SessionFactoryScope scope) {
scope.inTransaction( session -> {
List myEntities = session.getNamedNativeQuery( "MyEntity.findMyEntity" )
.addEntity( "ame", MyEntity.class )
.getResultList();
assertNotNull( myEntities );
} );
}
@NamedNativeQueries(value = {
@NamedNativeQuery(
name = "MyEntity.findMyEntity",
query = "WITH all_my_entities AS " +
"(SELECT me.* FROM my_entity me) " +
"SELECT {ame.*} FROM all_my_entities ame")
})
@Entity(name = "my_entity")
@Table(name = "my_entity")
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
@DiscriminatorColumn(discriminatorType = DiscriminatorType.STRING, name = "entity_type")
public static | NamedNativeQueryWithGenericsTest |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/LinkedCaseInsensitiveMap.java | {
"start": 13252,
"end": 13393
} | class ____ extends EntryIterator<String> {
@Override
public String next() {
return nextEntry().getKey();
}
}
private | KeySetIterator |
java | apache__camel | components/camel-beanio/src/main/java/org/apache/camel/dataformat/beanio/BeanIOHelper.java | {
"start": 1029,
"end": 2536
} | class ____ {
private BeanIOHelper() {
// utility class
}
public static BeanReaderErrorHandler getOrCreateBeanReaderErrorHandler(
BeanIOConfiguration configuration, Exchange exchange,
List<Object> results, BeanIOIterator iterator)
throws Exception {
BeanReaderErrorHandler answer = null;
if (ObjectHelper.isNotEmpty(configuration.getBeanReaderErrorHandlerType())) {
Class<?> clazz = exchange.getContext().getClassResolver()
.resolveMandatoryClass(configuration.getBeanReaderErrorHandlerType());
Object instance = exchange.getContext().getInjector().newInstance(clazz);
answer = (BeanReaderErrorHandler) instance;
}
if (answer == null && ObjectHelper.isNotEmpty(configuration.getBeanReaderErrorHandler())) {
answer = configuration.getBeanReaderErrorHandler();
}
if (answer == null) {
answer = new BeanIOErrorHandler();
}
// if the error handler extends BeanIOErrorHandler then its prototype scoped
// and then inject the current exchange and init
if (answer instanceof BeanIOErrorHandler) {
BeanIOErrorHandler eh = (BeanIOErrorHandler) answer;
eh.setConfiguration(configuration);
eh.setExchange(exchange);
eh.setResults(results);
eh.setIterator(iterator);
eh.init();
}
return answer;
}
}
| BeanIOHelper |
java | apache__flink | flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/sync/ForStSyncMapState.java | {
"start": 27824,
"end": 30581
} | class ____ implements StateSnapshotTransformer<byte[]> {
private static final byte[] NULL_VALUE;
private static final byte NON_NULL_VALUE_PREFIX;
static {
DataOutputSerializer dov = new DataOutputSerializer(1);
try {
dov.writeBoolean(true);
NULL_VALUE = dov.getCopyOfBuffer();
dov.clear();
dov.writeBoolean(false);
NON_NULL_VALUE_PREFIX = dov.getSharedBuffer()[0];
} catch (IOException e) {
throw new FlinkRuntimeException(
"Failed to serialize boolean flag of map user null value", e);
}
}
private final StateSnapshotTransformer<byte[]> elementTransformer;
private final DataInputDeserializer div;
StateSnapshotTransformerWrapper(StateSnapshotTransformer<byte[]> originalTransformer) {
this.elementTransformer = originalTransformer;
this.div = new DataInputDeserializer();
}
@Override
@Nullable
public byte[] filterOrTransform(@Nullable byte[] value) {
if (value == null || isNull(value)) {
return NULL_VALUE;
} else {
// we have to skip the first byte indicating null user value
// TODO: optimization here could be to work with slices and not byte arrays
// and copy slice sub-array only when needed
byte[] woNullByte = Arrays.copyOfRange(value, 1, value.length);
byte[] filteredValue = elementTransformer.filterOrTransform(woNullByte);
if (filteredValue == null) {
filteredValue = NULL_VALUE;
} else if (filteredValue != woNullByte) {
filteredValue = prependWithNonNullByte(filteredValue, value);
} else {
filteredValue = value;
}
return filteredValue;
}
}
private boolean isNull(byte[] value) {
try {
div.setBuffer(value, 0, 1);
return div.readBoolean();
} catch (IOException e) {
throw new FlinkRuntimeException(
"Failed to deserialize boolean flag of map user null value", e);
}
}
private static byte[] prependWithNonNullByte(byte[] value, byte[] reuse) {
int len = 1 + value.length;
byte[] result = reuse.length == len ? reuse : new byte[len];
result[0] = NON_NULL_VALUE_PREFIX;
System.arraycopy(value, 0, result, 1, value.length);
return result;
}
}
}
| StateSnapshotTransformerWrapper |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java | {
"start": 649,
"end": 1605
} | class ____ implements AggregatorFunctionSupplier {
public MinBooleanAggregatorFunctionSupplier() {
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
return MinBooleanAggregatorFunction.intermediateStateDesc();
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return MinBooleanGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public MinBooleanAggregatorFunction aggregator(DriverContext driverContext,
List<Integer> channels) {
return MinBooleanAggregatorFunction.create(driverContext, channels);
}
@Override
public MinBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext,
List<Integer> channels) {
return MinBooleanGroupingAggregatorFunction.create(channels, driverContext);
}
@Override
public String describe() {
return "min of booleans";
}
}
| MinBooleanAggregatorFunctionSupplier |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/context/ApplicationPidFileWriter.java | {
"start": 6669,
"end": 6846
} | interface ____ {
@Nullable String getValue(SpringApplicationEvent event);
}
/**
* {@link Property} obtained from Spring's {@link Environment}.
*/
private static | Property |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/GenericTypeResolverTests.java | {
"start": 14268,
"end": 14342
} | interface ____<V> extends Repository<V, Long> {
}
static | IdFixingRepository |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/authentication/event/AuthenticationFailureLockedEvent.java | {
"start": 982,
"end": 1297
} | class ____ extends AbstractAuthenticationFailureEvent {
@Serial
private static final long serialVersionUID = -5126110096093568463L;
public AuthenticationFailureLockedEvent(Authentication authentication, AuthenticationException exception) {
super(authentication, exception);
}
}
| AuthenticationFailureLockedEvent |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.