language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/joda/JodaTest_1_LocalDateTime.java | {
"start": 231,
"end": 2388
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
VO vo = new VO();
vo.setDate(LocalDateTime.now());
String text = JSON.toJSONString(vo);
VO vo1 = JSON.parseObject(text, VO.class);
Assert.assertEquals(JSON.toJSONString(vo.getDate()), JSON.toJSONString(vo1.getDate()));
}
/**
* 方法描述: 测试LocalDateTime 转化时间戳等 操作
* 问题点1、 LocalDateTime 进来的值无法确定其时区,所以此处统一按着系统时区走。
* 问题点2、 如果设置 SerializerFeature.WriteDateUseDateFormat 时按着 "yyyy-MM-dd HH:mm:ss" 进行格式化
* 问题点3: 如果设置 SerializerFeature.UseISO8601DateFormat 时按着ISO8601的标准 "yyyy-MM-dd'T'HH:mm:ss"进行格式化
* 问题点4:
* 1)格式化LocalDateTime时, 默认格式成 时间戳格式,
* 2)如设置WriteDateUseDateFormat 按 "yyyy-MM-dd HH:mm:ss" 进行格式化
* 3)如设置UseISO8601DateFormat 按ISO8601的标准 "yyyy-MM-dd'T'HH:mm:ss"进行格式化
* 4)如设置WriteDateUseDateFormat、UseISO8601DateFormat 同时设置,则按ISO8601的标准 "yyyy-MM-dd'T'HH:mm:ss"进行格式化
* @author wuqiong 2017/11/22 15:08
*/
public void test_toJsonString_ofLong()throws Exception {
VO vo = new VO();
vo.setDate(LocalDateTime.now());
VO vo1 = JSON.parseObject("{\"date\":1511334591189}", VO.class);
String text2 = JSON.toJSONString(vo, SerializerFeature.WriteDateUseDateFormat);
System.out.println(text2);//{"date":"2017-11-22 15:09:51"}
VO vo2 = JSON.parseObject(text2, VO.class);
String text3 = JSON.toJSONString(vo, SerializerFeature.UseISO8601DateFormat);
System.out.println(text3);//{"date":"2017-11-22T15:09:51"}
VO vo3 = JSON.parseObject(text3, VO.class);
String text4 = JSON.toJSONString(vo, SerializerFeature.UseISO8601DateFormat, SerializerFeature.WriteDateUseDateFormat);
System.out.println(text4);//{"date":"2017-11-22T15:09:51"}
VO vo4 = JSON.parseObject(text4, VO.class);
}
public void test_for_issue_1() throws Exception {
String text = "{\"date\":\"2018-08-03 22:38:33.145\"}";
VO vo1 = JSON.parseObject(text, VO.class);
assertNotNull(vo1.date);
}
public static | JodaTest_1_LocalDateTime |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/collection/erroneous/ErroneousCollectionNoKeyMappingFound.java | {
"start": 431,
"end": 681
} | interface ____ {
ErroneousCollectionNoKeyMappingFound INSTANCE =
Mappers.getMapper( ErroneousCollectionNoKeyMappingFound.class );
Map<NoProperties, String> map(Map<WithProperties, String> source);
}
| ErroneousCollectionNoKeyMappingFound |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/args/OnCloseInvalidArgumentTest.java | {
"start": 409,
"end": 820
} | class ____ {
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> {
root.addClasses(Endpoint.class);
})
.setExpectedException(WebSocketException.class);
@Test
void testInvalidArgument() {
fail();
}
@WebSocket(path = "/end")
public static | OnCloseInvalidArgumentTest |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/decorators/generics/DecoratorWithTypeVariableTest.java | {
"start": 1663,
"end": 2026
} | class ____<K, V> {
final K key;
final V value;
public MyParameterizedType(K key, V value) {
this.key = key;
this.value = value;
}
@Override
public String toString() {
return "key=" + key.toString() + ", value=" + value.toString();
}
}
public | MyParameterizedType |
java | quarkusio__quarkus | extensions/panache/hibernate-orm-panache/runtime/src/main/java/io/quarkus/hibernate/orm/panache/PanacheEntityBase.java | {
"start": 1251,
"end": 1565
} | class ____ extra operations (eg. CriteriaQueries)
*
* @return the {@link EntityManager} for this entity class
*/
@GenerateBridge
public static EntityManager getEntityManager() {
throw implementationInjectionMissing();
}
/**
* Returns the {@link Session} for this entity | for |
java | grpc__grpc-java | interop-testing/src/main/java/io/grpc/testing/integration/SoakClient.java | {
"start": 1965,
"end": 2344
} | class ____ {
public SoakIterationResult(long latencyMs, Status status) {
this.latencyMs = latencyMs;
this.status = status;
}
public long getLatencyMs() {
return latencyMs;
}
public Status getStatus() {
return status;
}
private long latencyMs = -1;
private Status status = Status.OK;
}
private static | SoakIterationResult |
java | apache__rocketmq | broker/src/main/java/org/apache/rocketmq/broker/longpolling/PollingHeader.java | {
"start": 1016,
"end": 2216
} | class ____ {
private final String consumerGroup;
private final String topic;
private final int queueId;
private final long bornTime;
private final long pollTime;
public PollingHeader(PopMessageRequestHeader requestHeader) {
this.consumerGroup = requestHeader.getConsumerGroup();
this.topic = requestHeader.getTopic();
this.queueId = requestHeader.getQueueId();
this.bornTime = requestHeader.getBornTime();
this.pollTime = requestHeader.getPollTime();
}
public PollingHeader(NotificationRequestHeader requestHeader) {
this.consumerGroup = requestHeader.getConsumerGroup();
this.topic = requestHeader.getTopic();
this.queueId = requestHeader.getQueueId();
this.bornTime = requestHeader.getBornTime();
this.pollTime = requestHeader.getPollTime();
}
public String getConsumerGroup() {
return consumerGroup;
}
public String getTopic() {
return topic;
}
public int getQueueId() {
return queueId;
}
public long getBornTime() {
return bornTime;
}
public long getPollTime() {
return pollTime;
}
}
| PollingHeader |
java | spring-projects__spring-boot | module/spring-boot-security/src/test/java/org/springframework/boot/security/autoconfigure/UserDetailsServiceAutoConfigurationTests.java | {
"start": 12745,
"end": 13040
} | class ____ {
@Bean
AuthenticationProvider myAuthenticationProvider() {
return new TestingAuthenticationProvider();
}
}
@Configuration(proxyBeanMethods = false)
@EnableWebSecurity
@EnableConfigurationProperties(SecurityProperties.class)
static | TestAuthenticationProviderConfiguration |
java | apache__spark | sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaSimpleWritableDataSource.java | {
"start": 10304,
"end": 11117
} | class ____ implements DataWriter<InternalRow> {
private final FileSystem fs;
private final Path file;
private final FSDataOutputStream out;
JavaCSVDataWriter(FileSystem fs, Path file) throws IOException {
this.fs = fs;
this.file = file;
out = fs.create(file);
}
@Override
public void write(InternalRow record) throws IOException {
out.writeBytes(String.format("%d,%d\n", record.getInt(0), record.getInt(1)));
}
@Override
public WriterCommitMessage commit() throws IOException {
out.close();
return null;
}
@Override
public void abort() throws IOException {
try {
out.close();
} finally {
fs.delete(file, false);
}
}
@Override
public void close() {
}
}
}
| JavaCSVDataWriter |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/catalog/DefaultResolvedCatalogModel.java | {
"start": 1268,
"end": 4616
} | class ____ implements ResolvedCatalogModel, CatalogModel {
private final CatalogModel origin;
private final ResolvedSchema resolvedInputSchema;
private final ResolvedSchema resolvedOutputSchema;
public DefaultResolvedCatalogModel(
CatalogModel origin,
ResolvedSchema resolvedInputSchema,
ResolvedSchema resolvedOutputSchema) {
this.origin =
Preconditions.checkNotNull(origin, "Original catalog model must not be null.");
this.resolvedInputSchema =
Preconditions.checkNotNull(
resolvedInputSchema, "Resolved input schema must not be null.");
this.resolvedOutputSchema =
Preconditions.checkNotNull(
resolvedOutputSchema, "Resolved output schema must not be null.");
}
@Override
public CatalogModel getOrigin() {
return origin;
}
@Override
public ResolvedSchema getResolvedInputSchema() {
return resolvedInputSchema;
}
@Override
public ResolvedSchema getResolvedOutputSchema() {
return resolvedOutputSchema;
}
public Map<String, String> toProperties(SqlFactory sqlFactory) {
return CatalogPropertiesUtil.serializeResolvedCatalogModel(this, sqlFactory);
}
// --------------------------------------------------------------------------------------------
// Delegations to original CatalogModel
// --------------------------------------------------------------------------------------------
@Override
public Map<String, String> getOptions() {
return origin.getOptions();
}
@Override
public Schema getInputSchema() {
return origin.getInputSchema();
}
@Override
public Schema getOutputSchema() {
return origin.getOutputSchema();
}
@Override
public String getComment() {
return origin.getComment();
}
@Override
public ResolvedCatalogModel copy() {
return new DefaultResolvedCatalogModel(
origin.copy(), resolvedInputSchema, resolvedOutputSchema);
}
@Override
public ResolvedCatalogModel copy(Map<String, String> modelOptions) {
return new DefaultResolvedCatalogModel(
origin.copy(modelOptions), resolvedInputSchema, resolvedOutputSchema);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DefaultResolvedCatalogModel that = (DefaultResolvedCatalogModel) o;
return Objects.equals(origin, that.origin)
&& Objects.equals(resolvedInputSchema, that.resolvedInputSchema)
&& Objects.equals(resolvedOutputSchema, that.resolvedOutputSchema);
}
@Override
public int hashCode() {
return Objects.hash(origin, resolvedInputSchema, resolvedOutputSchema);
}
@Override
public String toString() {
return "ResolvedCatalogModel{"
+ "origin="
+ origin
+ ", resolvedInputSchema="
+ resolvedInputSchema
+ ", resolvedOutputSchema="
+ resolvedOutputSchema
+ '}';
}
}
| DefaultResolvedCatalogModel |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/NitriteEndpointBuilderFactory.java | {
"start": 17058,
"end": 20588
} | interface ____
extends
NitriteEndpointConsumerBuilder,
NitriteEndpointProducerBuilder {
default AdvancedNitriteEndpointBuilder advanced() {
return (AdvancedNitriteEndpointBuilder) this;
}
/**
* Name of Nitrite collection. Cannot be used in combination with
* repositoryClass option.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param collection the value to set
* @return the dsl builder
*/
default NitriteEndpointBuilder collection(String collection) {
doSetProperty("collection", collection);
return this;
}
/**
* Class of Nitrite ObjectRepository. Cannot be used in combination with
* collection option.
*
* The option is a: <code>java.lang.Class<java.lang.Object></code>
* type.
*
* Group: common
*
* @param repositoryClass the value to set
* @return the dsl builder
*/
default NitriteEndpointBuilder repositoryClass(Class<java.lang.Object> repositoryClass) {
doSetProperty("repositoryClass", repositoryClass);
return this;
}
/**
* Class of Nitrite ObjectRepository. Cannot be used in combination with
* collection option.
*
* The option will be converted to a
* <code>java.lang.Class<java.lang.Object></code> type.
*
* Group: common
*
* @param repositoryClass the value to set
* @return the dsl builder
*/
default NitriteEndpointBuilder repositoryClass(String repositoryClass) {
doSetProperty("repositoryClass", repositoryClass);
return this;
}
/**
* Optional name of ObjectRepository. Can be only used in combination
* with repositoryClass, otherwise have no effect.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param repositoryName the value to set
* @return the dsl builder
*/
default NitriteEndpointBuilder repositoryName(String repositoryName) {
doSetProperty("repositoryName", repositoryName);
return this;
}
/**
* Password for Nitrite database. Required, if option username
* specified.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default NitriteEndpointBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* Username for Nitrite database. Database is not secured if option not
* specified.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param username the value to set
* @return the dsl builder
*/
default NitriteEndpointBuilder username(String username) {
doSetProperty("username", username);
return this;
}
}
/**
* Advanced builder for endpoint for the Nitrite component.
*/
public | NitriteEndpointBuilder |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/service/repository/ConfigRowMapperInjector.java | {
"start": 20179,
"end": 21159
} | class ____ implements RowMapper<ConfigInfo4Tag> {
@Override
public ConfigInfo4Tag mapRow(ResultSet rs, int rowNum) throws SQLException {
ConfigInfo4Tag info = new ConfigInfo4Tag();
info.setDataId(rs.getString("data_id"));
info.setGroup(rs.getString("group_id"));
info.setTenant(rs.getString("tenant_id"));
info.setTag(rs.getString("tag_id"));
info.setAppName(rs.getString("app_name"));
try {
info.setContent(rs.getString("content"));
} catch (SQLException ignore) {
}
try {
info.setId(rs.getLong("id"));
} catch (SQLException ignore) {
}
try {
info.setMd5(rs.getString("md5"));
} catch (SQLException ignore) {
}
return info;
}
}
public static final | ConfigInfo4TagRowMapper |
java | apache__camel | components/camel-dns/src/generated/java/org/apache/camel/component/dns/cloud/DnsServiceDiscoveryFactoryConfigurer.java | {
"start": 734,
"end": 2265
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.component.dns.cloud.DnsServiceDiscoveryFactory target = (org.apache.camel.component.dns.cloud.DnsServiceDiscoveryFactory) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "domain": target.setDomain(property(camelContext, java.lang.String.class, value)); return true;
case "proto": target.setProto(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "domain": return java.lang.String.class;
case "proto": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.component.dns.cloud.DnsServiceDiscoveryFactory target = (org.apache.camel.component.dns.cloud.DnsServiceDiscoveryFactory) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "domain": return target.getDomain();
case "proto": return target.getProto();
default: return null;
}
}
}
| DnsServiceDiscoveryFactoryConfigurer |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/security/annotation/HttpAuthenticationMechanism.java | {
"start": 843,
"end": 1263
} | interface ____ {
/**
* {@link io.quarkus.vertx.http.runtime.security.HttpAuthenticationMechanism} scheme as returned by
* {@link HttpCredentialTransport#getAuthenticationScheme()}.
* Custom mechanisms can set this name inside
* {@link io.quarkus.vertx.http.runtime.security.HttpAuthenticationMechanism#getCredentialTransport(RoutingContext)}.
*/
String value();
}
| HttpAuthenticationMechanism |
java | elastic__elasticsearch | benchmarks/src/main/java/org/elasticsearch/benchmark/bytes/PagedBytesReferenceReadLongBenchmark.java | {
"start": 1570,
"end": 2613
} | class ____ {
@Param(value = { "1" })
private int dataMb;
private BytesReference pagedBytes;
private StreamInput streamInput;
@Setup
public void initResults() throws IOException {
final BytesStreamOutput tmp = new BytesStreamOutput();
final long bytes = ByteSizeValue.of(dataMb, ByteSizeUnit.MB).getBytes();
for (int i = 0; i < bytes / 8; i++) {
tmp.writeLong(i);
}
pagedBytes = tmp.bytes();
if (pagedBytes instanceof PagedBytesReference == false) {
throw new AssertionError("expected PagedBytesReference but saw [" + pagedBytes.getClass() + "]");
}
this.streamInput = pagedBytes.streamInput();
}
@Benchmark
public long readLong() throws IOException {
long res = 0L;
streamInput.reset();
final int reads = pagedBytes.length() / 8;
for (int i = 0; i < reads; i++) {
res = res ^ streamInput.readLong();
}
return res;
}
}
| PagedBytesReferenceReadLongBenchmark |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/hooks/MasterHooksTest.java | {
"start": 7475,
"end": 7672
} | class ____ implements Executor {
Runnable command;
@Override
public void execute(@Nonnull Runnable command) {
this.command = command;
}
}
}
| TestExecutor |
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/howto/testing/slicetests/MySecurityConfiguration.java | {
"start": 995,
"end": 1216
} | class ____ {
@Bean
public SecurityFilterChain securityFilterChain(HttpSecurity http) {
http.authorizeHttpRequests((requests) -> requests.anyRequest().authenticated());
return http.build();
}
}
| MySecurityConfiguration |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/oidc/authentication/OidcLogoutAuthenticationProvider.java | {
"start": 2841,
"end": 10211
} | class ____ implements AuthenticationProvider {
private static final OAuth2TokenType ID_TOKEN_TOKEN_TYPE = new OAuth2TokenType(OidcParameterNames.ID_TOKEN);
private final Log logger = LogFactory.getLog(getClass());
private final RegisteredClientRepository registeredClientRepository;
private final OAuth2AuthorizationService authorizationService;
private final SessionRegistry sessionRegistry;
private Consumer<OidcLogoutAuthenticationContext> authenticationValidator = new OidcLogoutAuthenticationValidator();
/**
* Constructs an {@code OidcLogoutAuthenticationProvider} using the provided
* parameters.
* @param registeredClientRepository the repository of registered clients
* @param authorizationService the authorization service
* @param sessionRegistry the {@link SessionRegistry} used to track OpenID Connect
* sessions
*/
public OidcLogoutAuthenticationProvider(RegisteredClientRepository registeredClientRepository,
OAuth2AuthorizationService authorizationService, SessionRegistry sessionRegistry) {
Assert.notNull(registeredClientRepository, "registeredClientRepository cannot be null");
Assert.notNull(authorizationService, "authorizationService cannot be null");
Assert.notNull(sessionRegistry, "sessionRegistry cannot be null");
this.registeredClientRepository = registeredClientRepository;
this.authorizationService = authorizationService;
this.sessionRegistry = sessionRegistry;
}
@Override
public Authentication authenticate(Authentication authentication) throws AuthenticationException {
OidcLogoutAuthenticationToken oidcLogoutAuthentication = (OidcLogoutAuthenticationToken) authentication;
OAuth2Authorization authorization = this.authorizationService
.findByToken(oidcLogoutAuthentication.getIdTokenHint(), ID_TOKEN_TOKEN_TYPE);
if (authorization == null) {
throwError(OAuth2ErrorCodes.INVALID_TOKEN, "id_token_hint");
}
if (this.logger.isTraceEnabled()) {
this.logger.trace("Retrieved authorization with ID Token");
}
OAuth2Authorization.Token<OidcIdToken> authorizedIdToken = authorization.getToken(OidcIdToken.class);
if (authorizedIdToken.isInvalidated() || authorizedIdToken.isBeforeUse()) {
// Expired ID Token should be accepted
throwError(OAuth2ErrorCodes.INVALID_TOKEN, "id_token_hint");
}
RegisteredClient registeredClient = this.registeredClientRepository
.findById(authorization.getRegisteredClientId());
if (this.logger.isTraceEnabled()) {
this.logger.trace("Retrieved registered client");
}
OidcIdToken idToken = authorizedIdToken.getToken();
// Validate client identity
List<String> audClaim = idToken.getAudience();
if (CollectionUtils.isEmpty(audClaim) || !audClaim.contains(registeredClient.getClientId())) {
throwError(OAuth2ErrorCodes.INVALID_TOKEN, IdTokenClaimNames.AUD);
}
if (StringUtils.hasText(oidcLogoutAuthentication.getClientId())
&& !oidcLogoutAuthentication.getClientId().equals(registeredClient.getClientId())) {
throwError(OAuth2ErrorCodes.INVALID_REQUEST, OAuth2ParameterNames.CLIENT_ID);
}
OidcLogoutAuthenticationContext context = OidcLogoutAuthenticationContext.with(oidcLogoutAuthentication)
.registeredClient(registeredClient)
.build();
this.authenticationValidator.accept(context);
if (this.logger.isTraceEnabled()) {
this.logger.trace("Validated logout request parameters");
}
// Validate user identity
if (oidcLogoutAuthentication.isPrincipalAuthenticated()) {
Authentication currentUserPrincipal = (Authentication) oidcLogoutAuthentication.getPrincipal();
Authentication authorizedUserPrincipal = authorization.getAttribute(Principal.class.getName());
if (!StringUtils.hasText(idToken.getSubject())
|| !currentUserPrincipal.getName().equals(authorizedUserPrincipal.getName())) {
throwError(OAuth2ErrorCodes.INVALID_TOKEN, IdTokenClaimNames.SUB);
}
// Check for active session
if (StringUtils.hasText(oidcLogoutAuthentication.getSessionId())) {
SessionInformation sessionInformation = findSessionInformation(currentUserPrincipal,
oidcLogoutAuthentication.getSessionId());
if (sessionInformation != null) {
String sessionIdHash;
try {
sessionIdHash = createHash(sessionInformation.getSessionId());
}
catch (NoSuchAlgorithmException ex) {
OAuth2Error error = new OAuth2Error(OAuth2ErrorCodes.SERVER_ERROR,
"Failed to compute hash for Session ID.", null);
throw new OAuth2AuthenticationException(error);
}
String sidClaim = idToken.getClaim("sid");
if (!StringUtils.hasText(sidClaim) || !sidClaim.equals(sessionIdHash)) {
throwError(OAuth2ErrorCodes.INVALID_TOKEN, "sid");
}
}
}
}
if (this.logger.isTraceEnabled()) {
this.logger.trace("Authenticated logout request");
}
return new OidcLogoutAuthenticationToken(idToken, (Authentication) oidcLogoutAuthentication.getPrincipal(),
oidcLogoutAuthentication.getSessionId(), oidcLogoutAuthentication.getClientId(),
oidcLogoutAuthentication.getPostLogoutRedirectUri(), oidcLogoutAuthentication.getState());
}
@Override
public boolean supports(Class<?> authentication) {
return OidcLogoutAuthenticationToken.class.isAssignableFrom(authentication);
}
/**
* Sets the {@code Consumer} providing access to the
* {@link OidcLogoutAuthenticationContext} and is responsible for validating specific
* OpenID Connect RP-Initiated Logout Request parameters associated in the
* {@link OidcLogoutAuthenticationToken}. The default authentication validator is
* {@link OidcLogoutAuthenticationValidator}.
*
* <p>
* <b>NOTE:</b> The authentication validator MUST throw
* {@link OAuth2AuthenticationException} if validation fails.
* @param authenticationValidator the {@code Consumer} providing access to the
* {@link OidcLogoutAuthenticationContext} and is responsible for validating specific
* OpenID Connect RP-Initiated Logout Request parameters
*/
public void setAuthenticationValidator(Consumer<OidcLogoutAuthenticationContext> authenticationValidator) {
Assert.notNull(authenticationValidator, "authenticationValidator cannot be null");
this.authenticationValidator = authenticationValidator;
}
private SessionInformation findSessionInformation(Authentication principal, String sessionId) {
List<SessionInformation> sessions = this.sessionRegistry.getAllSessions(principal.getPrincipal(), true);
SessionInformation sessionInformation = null;
if (!CollectionUtils.isEmpty(sessions)) {
for (SessionInformation session : sessions) {
if (session.getSessionId().equals(sessionId)) {
sessionInformation = session;
break;
}
}
}
return sessionInformation;
}
private static void throwError(String errorCode, String parameterName) {
OAuth2Error error = new OAuth2Error(errorCode, "OpenID Connect 1.0 Logout Request Parameter: " + parameterName,
"https://openid.net/specs/openid-connect-rpinitiated-1_0.html#ValidationAndErrorHandling");
throw new OAuth2AuthenticationException(error);
}
private static String createHash(String value) throws NoSuchAlgorithmException {
MessageDigest md = MessageDigest.getInstance("SHA-256");
byte[] digest = md.digest(value.getBytes(StandardCharsets.US_ASCII));
return Base64.getUrlEncoder().withoutPadding().encodeToString(digest);
}
}
| OidcLogoutAuthenticationProvider |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/test/ExtraAssertions.java | {
"start": 1600,
"end": 4147
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(
ExtraAssertions.class);
private ExtraAssertions() {
}
/**
* Assert that the number of files in a destination matches that expected.
* @param message text to use in the message
* @param fs filesystem
* @param path path to list (recursively)
* @param expected expected count
* @throws IOException IO problem
*/
public static void assertFileCount(final String message,
final FileSystem fs,
final Path path,
final long expected)
throws IOException {
List<String> files = new ArrayList<>();
try (DurationInfo ignored = new DurationInfo(LOG, false,
"Counting files in %s", path)) {
applyLocatedFiles(fs.listFiles(path, true),
(status) -> files.add(status.getPath().toString()));
}
long actual = files.size();
if (actual != expected) {
String ls = files.stream().collect(Collectors.joining("\n"));
Assertions.fail(message + ": expected " + expected + " files in " + path
+ " but got " + actual + "\n" + ls);
}
}
/**
* Assert that a string contains a piece of text.
* @param text text to can.
* @param contained text to look for.
*/
public static void assertTextContains(String text, String contained) {
assertTrue(text != null && text.contains(contained),
"string \"" + contained + "\" not found in \"" + text + "\"");
}
/**
* If the condition is met, throw an AssertionError with the message
* and any nested exception.
* @param condition condition
* @param message text to use in the exception
* @param cause a (possibly null) throwable to init the cause with
* @throws AssertionError with the text and throwable if condition == true.
*/
public static void failIf(boolean condition,
String message,
Throwable cause) {
if (condition) {
ContractTestUtils.fail(message, cause);
}
}
/**
* If the condition is met, throw an AssertionError with the message
* and any nested exception.
* @param condition condition
* @param message text to use in the exception
* @param cause a (possibly null) throwable to init the cause with
* @throws AssertionError with the text and throwable if condition == true.
*/
public static void failUnless(boolean condition,
String message,
Throwable cause) {
failIf(!condition, message, cause);
}
/**
* Extract the inner cause of an exception.
* @param expected expected | ExtraAssertions |
java | google__guava | android/guava/src/com/google/common/collect/Maps.java | {
"start": 122968,
"end": 125277
} | class ____ extends KeySet implements SortedSet<K> {
@Override
public @Nullable Comparator<? super K> comparator() {
return sortedMap().comparator();
}
@Override
public SortedSet<K> subSet(
@ParametricNullness K fromElement, @ParametricNullness K toElement) {
return (SortedSet<K>) subMap(fromElement, toElement).keySet();
}
@Override
public SortedSet<K> headSet(@ParametricNullness K toElement) {
return (SortedSet<K>) headMap(toElement).keySet();
}
@Override
public SortedSet<K> tailSet(@ParametricNullness K fromElement) {
return (SortedSet<K>) tailMap(fromElement).keySet();
}
@Override
@ParametricNullness
public K first() {
return firstKey();
}
@Override
@ParametricNullness
public K last() {
return lastKey();
}
}
@Override
public @Nullable Comparator<? super K> comparator() {
return sortedMap().comparator();
}
@Override
@ParametricNullness
public K firstKey() {
// correctly throws NoSuchElementException when filtered map is empty.
return keySet().iterator().next();
}
@Override
@ParametricNullness
public K lastKey() {
SortedMap<K, V> headMap = sortedMap();
while (true) {
// correctly throws NoSuchElementException when filtered map is empty.
K key = headMap.lastKey();
// The cast is safe because the key is taken from the map.
if (apply(key, uncheckedCastNullableTToT(unfiltered.get(key)))) {
return key;
}
headMap = sortedMap().headMap(key);
}
}
@Override
public SortedMap<K, V> headMap(@ParametricNullness K toKey) {
return new FilteredEntrySortedMap<>(sortedMap().headMap(toKey), predicate);
}
@Override
public SortedMap<K, V> subMap(@ParametricNullness K fromKey, @ParametricNullness K toKey) {
return new FilteredEntrySortedMap<>(sortedMap().subMap(fromKey, toKey), predicate);
}
@Override
public SortedMap<K, V> tailMap(@ParametricNullness K fromKey) {
return new FilteredEntrySortedMap<>(sortedMap().tailMap(fromKey), predicate);
}
}
@GwtIncompatible // NavigableMap
private static final | SortedKeySet |
java | quarkusio__quarkus | extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/MessageBundleProcessor.java | {
"start": 39732,
"end": 40089
} | interface
____ bundleImpl = generateImplementation(bundle, null, null, bundleInterfaceWrapper,
defaultClassOutput, messageTemplateMethods, defaultKeyToMap, null, index);
generatedTypes.put(bundleInterface.name().toString(), ClassDesc.of(bundleImpl));
// Generate imeplementation for each localized | String |
java | junit-team__junit5 | junit-platform-launcher/src/main/java/org/junit/platform/launcher/tagexpression/Parser.java | {
"start": 456,
"end": 911
} | class ____ {
private final Tokenizer tokenizer = new Tokenizer();
ParseResult parse(@Nullable String infixTagExpression) {
return constructExpressionFrom(tokensDerivedFrom(infixTagExpression));
}
private List<Token> tokensDerivedFrom(@Nullable String infixTagExpression) {
return tokenizer.tokenize(infixTagExpression);
}
private ParseResult constructExpressionFrom(List<Token> tokens) {
return new ShuntingYard(tokens).execute();
}
}
| Parser |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/JsonTypeInfoSimpleClassName4061Test.java | {
"start": 4318,
"end": 9346
} | class ____ has contains dollar sign
@Test
public void testMinimalInnerClass() throws Exception
{
String jsonStr = a2q("{'@c':'.JsonTypeInfoSimpleClassName4061Test$MinimalInnerSub4061A'}");
// ser
assertEquals(jsonStr, MAPPER.writeValueAsString(new MinimalInnerSub4061A()));
// deser <- breaks!
MinimalInnerSuper4061 bean = MAPPER.readValue(jsonStr, MinimalInnerSuper4061.class);
assertInstanceOf(MinimalInnerSuper4061.class, bean);
assertNotNull(bean);
}
// Basic : non-inner class, without dollar sign
@Test
public void testBasicClass() throws Exception
{
String jsonStr = a2q("{'@type':'BasicSub4061A'}");
// ser
assertEquals(jsonStr, MAPPER.writeValueAsString(new BasicSub4061A()));
// deser
BasicSuper4061 bean = MAPPER.readValue(jsonStr, BasicSuper4061.class);
assertInstanceOf(BasicSuper4061.class, bean);
assertInstanceOf(BasicSub4061A.class, bean);
}
// Mixed SimpleClassName : parent as inner, subtype as basic
@Test
public void testMixedClass() throws Exception
{
String jsonStr = a2q("{'@type':'MixedSub4061A'}");
// ser
assertEquals(jsonStr, MAPPER.writeValueAsString(new MixedSub4061A()));
// deser
MixedSuper4061 bean = MAPPER.readValue(jsonStr, MixedSuper4061.class);
assertInstanceOf(MixedSuper4061.class, bean);
assertInstanceOf(MixedSub4061A.class, bean);
}
// Mixed MinimalClass : parent as inner, subtype as basic
@Test
public void testMixedMinimalClass() throws Exception
{
String jsonStr = a2q("{'@c':'.MixedMinimalSub4061A'}");
// ser
assertEquals(jsonStr, MAPPER.writeValueAsString(new MixedMinimalSub4061A()));
// deser
MixedMinimalSuper4061 bean = MAPPER.readValue(jsonStr, MixedMinimalSuper4061.class);
assertInstanceOf(MixedMinimalSuper4061.class, bean);
assertInstanceOf(MixedMinimalSub4061A.class, bean);
}
@Test
public void testPolymorphicNewObject() throws Exception
{
String jsonStr = "{\"child\": { \"@type\": \"MergeChildA\", \"name\": \"I'm child A\" }}";
Root root = MAPPER.readValue(jsonStr, Root.class);
assertTrue(root.child instanceof MergeChildA);
assertEquals("I'm child A", ((MergeChildA) root.child).name);
}
// case insenstive type name
@Test
public void testPolymorphicNewObjectCaseInsensitive() throws Exception
{
String jsonStr = "{\"child\": { \"@type\": \"mergechilda\", \"name\": \"I'm child A\" }}";
ObjectMapper mapper = jsonMapperBuilder()
.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES)
.build();
Root root = mapper.readValue(jsonStr, Root.class);
assertTrue(root.child instanceof MergeChildA);
assertEquals("I'm child A", ((MergeChildA) root.child).name);
}
@Test
public void testPolymorphicNewObjectUnknownTypeId() throws Exception
{
try {
MAPPER.readValue("{\"child\": { \"@type\": \"UnknownChildA\", \"name\": \"I'm child A\" }}", Root.class);
} catch (InvalidTypeIdException e) {
verifyException(e, "Could not resolve type id 'UnknownChildA' as a subtype of");
}
}
@Test
public void testAliasWithPolymorphic() throws Exception
{
String jsonStr = a2q("{'value': ['ab', {'nm' : 'Bob', 'A' : 17} ] }");
PolyWrapperForAlias value = MAPPER.readValue(jsonStr, PolyWrapperForAlias.class);
assertNotNull(value.value);
AliasBean bean = (AliasBean) value.value;
assertEquals("Bob", bean.name);
assertEquals(17, bean._a);
}
@Test
public void testGetMechanism()
{
final DeserializationConfig config = MAPPER.deserializationConfig();
JavaType javaType = config.constructType(InnerSub4061B.class);
List<NamedType> namedTypes = new ArrayList<>();
namedTypes.add(new NamedType(InnerSub4061A.class));
namedTypes.add(new NamedType(InnerSub4061B.class));
SimpleNameIdResolver idResolver = SimpleNameIdResolver.construct(config, javaType, namedTypes, false, true);
assertEquals(JsonTypeInfo.Id.SIMPLE_NAME, idResolver.getMechanism());
}
@Test
public void testDuplicateNameLastOneWins() throws Exception
{
String jsonStr = a2q("{'@type':'DuplicateSubClass'}");
// deser
DuplicateSuperClass bean = MAPPER.readValue(jsonStr, DuplicateSuperClass.class);
assertInstanceOf(tools.jackson.databind.jsontype.DuplicateSubClass.class, bean);
}
}
@JsonTypeInfo(
use = JsonTypeInfo.Id.SIMPLE_NAME)
@JsonSubTypes({
@JsonSubTypes.Type(value = BasicSub4061A.class),
@JsonSubTypes.Type(value = BasicSub4061B.class)
})
| that |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/kryo/KryoSerializerDebugInitHelper.java | {
"start": 1145,
"end": 1794
} | class ____ {
/**
* This captures the initial setting after initialization. It is used to validate in tests that
* we never change the default to true.
*/
static final boolean INITIAL_SETTING;
/** The flag that is used to initialize the KryoSerializer's concurrency check flag. */
static boolean setToDebug = false;
static {
// capture the default setting, for tests
INITIAL_SETTING = setToDebug;
// if assertions are active, the check should be activated
//noinspection AssertWithSideEffects,ConstantConditions
assert setToDebug = true;
}
}
| KryoSerializerDebugInitHelper |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/JsonIteratorByteArrayTest.java | {
"start": 1486,
"end": 1605
} | class ____ {
public int id;
public String name;
// public Type type;
}
public static | Model |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointDetector.java | {
"start": 714,
"end": 2259
} | class ____ {
private static final Logger logger = LogManager.getLogger(ChangePointDetector.class);
static final double P_VALUE_THRESHOLD = 0.01;
static final int MINIMUM_BUCKETS = 10;
/**
* Returns the ChangeType of a series of values.
*/
public static ChangeType getChangeType(MlAggsHelper.DoubleBucketValues bucketValues) {
if (bucketValues.getValues().length < (2 * MINIMUM_BUCKETS) + 2) {
return new ChangeType.Indeterminable(
"not enough buckets to calculate change_point. Requires at least ["
+ ((2 * MINIMUM_BUCKETS) + 2)
+ "]; found ["
+ bucketValues.getValues().length
+ "]"
);
}
ChangeType spikeOrDip;
try {
SpikeAndDipDetector detect = new SpikeAndDipDetector(bucketValues);
spikeOrDip = detect.detect(P_VALUE_THRESHOLD);
logger.trace("spike or dip p-value: [{}]", spikeOrDip.pValue());
} catch (NotStrictlyPositiveException nspe) {
logger.debug("failure testing for dips and spikes", nspe);
spikeOrDip = new ChangeType.Indeterminable("failure testing for dips and spikes");
}
ChangeType change = new ChangeDetector(bucketValues).detect(P_VALUE_THRESHOLD);
logger.trace("change p-value: [{}]", change.pValue());
if (spikeOrDip.pValue() < change.pValue()) {
change = spikeOrDip;
}
return change;
}
}
| ChangePointDetector |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/highavailability/nonha/embedded/HaLeadershipControl.java | {
"start": 1038,
"end": 1452
} | interface ____ {
CompletableFuture<Void> revokeDispatcherLeadership();
CompletableFuture<Void> grantDispatcherLeadership();
CompletableFuture<Void> revokeJobMasterLeadership(JobID jobId);
CompletableFuture<Void> grantJobMasterLeadership(JobID jobId);
CompletableFuture<Void> revokeResourceManagerLeadership();
CompletableFuture<Void> grantResourceManagerLeadership();
}
| HaLeadershipControl |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CompileTimeConstantCheckerTest.java | {
"start": 11330,
"end": 12009
} | class ____ {
public static void m(String s, @CompileTimeConstant String... p) {}
public static void r(String s) {
m(s);
m(s, "foo");
m(s, "foo", "bar");
m(s, "foo", "bar", "baz");
}
}
""")
.doTest();
}
@Test
public void matches_effectivelyFinalCompileTimeConstantParam() {
compilationHelper
.addSourceLines(
"test/CompileTimeConstantTestCase.java",
"""
package test;
import com.google.errorprone.annotations.CompileTimeConstant;
public | CompileTimeConstantTestCase |
java | apache__dubbo | dubbo-config/dubbo-config-api/src/main/java/org/apache/dubbo/config/deploy/DefaultModuleDeployer.java | {
"start": 3233,
"end": 24832
} | class ____ extends AbstractDeployer<ModuleModel> implements ModuleDeployer {
private static final ErrorTypeAwareLogger logger =
LoggerFactory.getErrorTypeAwareLogger(DefaultModuleDeployer.class);
private final List<CompletableFuture<?>> asyncExportingFutures = new ArrayList<>();
private final List<CompletableFuture<?>> asyncReferringFutures = new ArrayList<>();
private final List<ServiceConfigBase<?>> exportedServices = new ArrayList<>();
private final ModuleModel moduleModel;
private final FrameworkExecutorRepository frameworkExecutorRepository;
private final ExecutorRepository executorRepository;
private final ModuleConfigManager configManager;
private final SimpleReferenceCache referenceCache;
private final ApplicationDeployer applicationDeployer;
private CompletableFuture startFuture;
private Boolean background;
private Boolean exportAsync;
private Boolean referAsync;
private boolean registryInteracted;
private CompletableFuture<?> exportFuture;
private CompletableFuture<?> referFuture;
public DefaultModuleDeployer(ModuleModel moduleModel) {
super(moduleModel);
this.moduleModel = moduleModel;
configManager = moduleModel.getConfigManager();
frameworkExecutorRepository = moduleModel
.getApplicationModel()
.getFrameworkModel()
.getBeanFactory()
.getBean(FrameworkExecutorRepository.class);
executorRepository = ExecutorRepository.getInstance(moduleModel.getApplicationModel());
referenceCache = SimpleReferenceCache.newCache();
applicationDeployer = DefaultApplicationDeployer.get(moduleModel);
// load spi listener
Set<ModuleDeployListener> listeners =
moduleModel.getExtensionLoader(ModuleDeployListener.class).getSupportedExtensionInstances();
for (ModuleDeployListener listener : listeners) {
this.addDeployListener(listener);
}
}
@Override
public void initialize() throws IllegalStateException {
if (initialized) {
return;
}
// Ensure that the initialization is completed when concurrent calls
synchronized (this) {
if (initialized) {
return;
}
onInitialize();
loadConfigs();
// read ModuleConfig
ModuleConfig moduleConfig = moduleModel
.getConfigManager()
.getModule()
.orElseThrow(() -> new IllegalStateException("Default module config is not initialized"));
exportAsync = Boolean.TRUE.equals(moduleConfig.getExportAsync());
referAsync = Boolean.TRUE.equals(moduleConfig.getReferAsync());
// start in background
background = moduleConfig.getBackground();
if (background == null) {
// compatible with old usages
background = isExportBackground() || isReferBackground();
}
initialized = true;
if (logger.isInfoEnabled()) {
logger.info(getIdentifier() + " has been initialized!");
}
}
}
@Override
public Future start() throws IllegalStateException {
// initialize,maybe deadlock applicationDeployer lock & moduleDeployer lock
applicationDeployer.initialize();
return startSync();
}
private synchronized Future startSync() throws IllegalStateException {
if (isStopping() || isStopped() || isFailed()) {
throw new IllegalStateException(getIdentifier() + " is stopping or stopped, can not start again");
}
try {
if (isStarting() || isStarted() || isCompletion()) {
return startFuture;
}
onModuleStarting();
initialize();
// export services
exportServices();
// prepare application instance
// exclude internal module to avoid wait itself
if (moduleModel != moduleModel.getApplicationModel().getInternalModule()) {
applicationDeployer.prepareInternalModule();
}
// refer services
referServices();
// if no async export/refer services, just set started
if (asyncExportingFutures.isEmpty() && asyncReferringFutures.isEmpty()) {
// publish module started event
onModuleStarted();
// register services to registry
registerServices();
// check reference config
checkReferences();
// publish module completion event
onModuleCompletion();
// complete module start future after application state changed
completeStartFuture(true);
} else {
frameworkExecutorRepository.getSharedExecutor().submit(() -> {
try {
// wait for export finish
waitExportFinish();
// wait for refer finish
waitReferFinish();
// publish module started event
onModuleStarted();
// register services to registry
registerServices();
// check reference config
checkReferences();
// publish module completion event
onModuleCompletion();
} catch (Throwable e) {
logger.warn(
CONFIG_FAILED_WAIT_EXPORT_REFER,
"",
"",
"wait for export/refer services occurred an exception",
e);
onModuleFailed(getIdentifier() + " start failed: " + e, e);
} finally {
// complete module start future after application state changed
completeStartFuture(true);
}
});
}
} catch (Throwable e) {
onModuleFailed(getIdentifier() + " start failed: " + e, e);
throw e;
}
return startFuture;
}
@Override
public Future getStartFuture() {
return startFuture;
}
private boolean hasExportedServices() {
return !configManager.getServices().isEmpty();
}
@Override
public void stop() throws IllegalStateException {
moduleModel.destroy();
}
@Override
public void preDestroy() throws IllegalStateException {
if (isStopping() || isStopped()) {
return;
}
onModuleStopping();
offline();
}
private void offline() {
try {
ModuleServiceRepository serviceRepository = moduleModel.getServiceRepository();
List<ProviderModel> exportedServices = serviceRepository.getExportedServices();
for (ProviderModel exportedService : exportedServices) {
List<ProviderModel.RegisterStatedURL> statedUrls = exportedService.getStatedUrl();
for (ProviderModel.RegisterStatedURL statedURL : statedUrls) {
if (statedURL.isRegistered()) {
doOffline(statedURL);
}
}
}
} catch (Throwable t) {
logger.error(
LoggerCodeConstants.INTERNAL_ERROR, "", "", "Exceptions occurred when unregister services.", t);
}
}
private void doOffline(ProviderModel.RegisterStatedURL statedURL) {
RegistryFactory registryFactory = statedURL
.getRegistryUrl()
.getOrDefaultApplicationModel()
.getExtensionLoader(RegistryFactory.class)
.getAdaptiveExtension();
Registry registry = registryFactory.getRegistry(statedURL.getRegistryUrl());
registry.unregister(statedURL.getProviderUrl());
statedURL.setRegistered(false);
}
@Override
public synchronized void postDestroy() throws IllegalStateException {
if (isStopped()) {
return;
}
unexportServices();
unreferServices();
ModuleServiceRepository serviceRepository = moduleModel.getServiceRepository();
if (serviceRepository != null) {
List<ConsumerModel> consumerModels = serviceRepository.getReferredServices();
for (ConsumerModel consumerModel : consumerModels) {
try {
if (consumerModel.getDestroyRunner() != null) {
consumerModel.getDestroyRunner().run();
}
} catch (Throwable t) {
logger.error(
CONFIG_UNABLE_DESTROY_MODEL,
"there are problems with the custom implementation.",
"",
"Unable to destroy model: consumerModel.",
t);
}
}
List<ProviderModel> exportedServices = serviceRepository.getExportedServices();
for (ProviderModel providerModel : exportedServices) {
try {
if (providerModel.getDestroyRunner() != null) {
providerModel.getDestroyRunner().run();
}
} catch (Throwable t) {
logger.error(
CONFIG_UNABLE_DESTROY_MODEL,
"there are problems with the custom implementation.",
"",
"Unable to destroy model: providerModel.",
t);
}
}
serviceRepository.destroy();
}
onModuleStopped();
}
private void onInitialize() {
for (DeployListener<ModuleModel> listener : listeners) {
try {
listener.onInitialize(moduleModel);
} catch (Throwable e) {
logger.error(
CONFIG_FAILED_START_MODEL,
"",
"",
getIdentifier() + " an exception occurred when handle initialize event",
e);
}
}
}
private void onModuleStarting() {
setStarting();
startFuture = new CompletableFuture();
logger.info(getIdentifier() + " is starting.");
applicationDeployer.notifyModuleChanged(moduleModel, DeployState.STARTING);
}
private void onModuleStarted() {
if (isStarting()) {
setStarted();
logger.info(getIdentifier() + " has started.");
applicationDeployer.notifyModuleChanged(moduleModel, DeployState.STARTED);
}
}
private void onModuleCompletion() {
if (isStarted()) {
setCompletion();
logger.info(getIdentifier() + " has completed.");
applicationDeployer.notifyModuleChanged(moduleModel, DeployState.COMPLETION);
}
}
private void onModuleFailed(String msg, Throwable ex) {
try {
try {
// un-export all services if start failure
unexportServices();
} catch (Throwable t) {
logger.info("Failed to un-export services after module failed.", t);
}
setFailed(ex);
logger.error(CONFIG_FAILED_START_MODEL, "", "", "Model start failed: " + msg, ex);
applicationDeployer.notifyModuleChanged(moduleModel, DeployState.FAILED);
} finally {
completeStartFuture(false);
}
}
private void completeStartFuture(boolean value) {
if (startFuture != null && !startFuture.isDone()) {
startFuture.complete(value);
}
if (exportFuture != null && !exportFuture.isDone()) {
exportFuture.cancel(true);
}
if (referFuture != null && !referFuture.isDone()) {
referFuture.cancel(true);
}
}
private void onModuleStopping() {
try {
setStopping();
logger.info(getIdentifier() + " is stopping.");
applicationDeployer.notifyModuleChanged(moduleModel, DeployState.STOPPING);
} finally {
completeStartFuture(false);
}
}
private void onModuleStopped() {
try {
setStopped();
logger.info(getIdentifier() + " has stopped.");
applicationDeployer.notifyModuleChanged(moduleModel, DeployState.STOPPED);
} finally {
completeStartFuture(false);
}
}
private void loadConfigs() {
// load module configs
moduleModel.getConfigManager().loadConfigs();
moduleModel.getConfigManager().refreshAll();
}
private void exportServices() {
for (ServiceConfigBase sc : configManager.getServices()) {
exportServiceInternal(sc);
}
}
private void registerServices() {
for (ServiceConfigBase sc : configManager.getServices()) {
if (!Boolean.FALSE.equals(sc.isRegister())) {
registerServiceInternal(sc);
}
}
applicationDeployer.refreshServiceInstance();
}
private void checkReferences() {
Optional<ModuleConfig> module = configManager.getModule();
long timeout = module.map(ModuleConfig::getCheckReferenceTimeout).orElse(30000L);
for (ReferenceConfigBase<?> rc : configManager.getReferences()) {
referenceCache.check(rc, timeout);
}
}
private void exportServiceInternal(ServiceConfigBase sc) {
ServiceConfig<?> serviceConfig = (ServiceConfig<?>) sc;
if (!serviceConfig.isRefreshed()) {
serviceConfig.refresh();
}
if (sc.isExported()) {
return;
}
if (exportAsync || sc.shouldExportAsync()) {
ExecutorService executor = executorRepository.getServiceExportExecutor();
CompletableFuture<Void> future = CompletableFuture.runAsync(
() -> {
try {
if (!sc.isExported()) {
sc.export();
exportedServices.add(sc);
}
} catch (Throwable t) {
logger.error(
CONFIG_FAILED_EXPORT_SERVICE,
"",
"",
"Failed to async export service config: " + getIdentifier() + " , catch error : "
+ t.getMessage(),
t);
}
},
executor);
asyncExportingFutures.add(future);
} else {
if (!sc.isExported()) {
sc.export(RegisterTypeEnum.AUTO_REGISTER_BY_DEPLOYER);
exportedServices.add(sc);
}
}
if (serviceConfig.hasRegistrySpecified()) {
registryInteracted = true;
}
}
private void registerServiceInternal(ServiceConfigBase sc) {
ServiceConfig<?> serviceConfig = (ServiceConfig<?>) sc;
if (!serviceConfig.isRefreshed()) {
serviceConfig.refresh();
}
if (!sc.isExported()) {
return;
}
if (sc.shouldDelay()) {
return;
}
sc.register(true);
}
private void unexportServices() {
exportedServices.forEach(sc -> {
try {
configManager.removeConfig(sc);
sc.unexport();
} catch (Throwable t) {
logger.info("Failed to un-export service. Service Key: " + sc.getUniqueServiceName(), t);
}
});
exportedServices.clear();
asyncExportingFutures.forEach(future -> {
if (!future.isDone()) {
future.cancel(true);
}
});
asyncExportingFutures.clear();
}
private void referServices() {
configManager.getReferences().forEach(rc -> {
try {
ReferenceConfig<?> referenceConfig = (ReferenceConfig<?>) rc;
if (!referenceConfig.isRefreshed()) {
referenceConfig.refresh();
}
if (rc.shouldInit()) {
if (referAsync || rc.shouldReferAsync()) {
ExecutorService executor = executorRepository.getServiceReferExecutor();
CompletableFuture<Void> future = CompletableFuture.runAsync(
() -> {
try {
referenceCache.get(rc, false);
} catch (Throwable t) {
logger.error(
CONFIG_FAILED_EXPORT_SERVICE,
"",
"",
"Failed to async export service config: " + getIdentifier()
+ " , catch error : " + t.getMessage(),
t);
}
},
executor);
asyncReferringFutures.add(future);
} else {
referenceCache.get(rc, false);
}
}
} catch (Throwable t) {
logger.error(
CONFIG_FAILED_REFERENCE_MODEL,
"",
"",
"Model reference failed: " + getIdentifier() + " , catch error : " + t.getMessage(),
t);
referenceCache.destroy(rc);
throw t;
}
});
}
private void unreferServices() {
try {
asyncReferringFutures.forEach(future -> {
if (!future.isDone()) {
future.cancel(true);
}
});
asyncReferringFutures.clear();
referenceCache.destroyAll();
for (ReferenceConfigBase<?> rc : configManager.getReferences()) {
rc.destroy();
}
} catch (Exception ignored) {
}
}
private void waitExportFinish() {
try {
logger.info(getIdentifier() + " waiting services exporting ...");
exportFuture = CompletableFuture.allOf(asyncExportingFutures.toArray(new CompletableFuture[0]));
exportFuture.get();
} catch (Throwable e) {
logger.warn(
CONFIG_FAILED_EXPORT_SERVICE,
"",
"",
getIdentifier() + " export services occurred an exception: " + e.toString());
} finally {
logger.info(getIdentifier() + " export services finished.");
asyncExportingFutures.clear();
}
}
private void waitReferFinish() {
try {
logger.info(getIdentifier() + " waiting services referring ...");
referFuture = CompletableFuture.allOf(asyncReferringFutures.toArray(new CompletableFuture[0]));
referFuture.get();
} catch (Throwable e) {
logger.warn(
CONFIG_FAILED_REFER_SERVICE,
"",
"",
getIdentifier() + " refer services occurred an exception: " + e.toString());
} finally {
logger.info(getIdentifier() + " refer services finished.");
asyncReferringFutures.clear();
}
}
@Override
public boolean isBackground() {
return background;
}
private boolean isExportBackground() {
return moduleModel.getConfigManager().getProviders().stream()
.map(ProviderConfig::getExportBackground)
.anyMatch(k -> k != null && k);
}
private boolean isReferBackground() {
return moduleModel.getConfigManager().getConsumers().stream()
.map(ConsumerConfig::getReferBackground)
.anyMatch(k -> k != null && k);
}
@Override
public ReferenceCache getReferenceCache() {
return referenceCache;
}
@Override
public void registerServiceInstance() {
applicationDeployer.registerServiceInstance();
}
/**
* Prepare for export/refer service, trigger initializing application and module
*/
@Override
public void prepare() {
applicationDeployer.initialize();
this.initialize();
}
@Override
public boolean hasRegistryInteraction() {
return registryInteracted;
}
@Override
public ApplicationDeployer getApplicationDeployer() {
return applicationDeployer;
}
}
| DefaultModuleDeployer |
java | netty__netty | codec-base/src/test/java/io/netty/handler/codec/EmptyHeadersTest.java | {
"start": 14774,
"end": 14858
} | class ____ extends EmptyHeaders<String, String, TestEmptyHeaders> { }
}
| TestEmptyHeaders |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDNFencing.java | {
"start": 3129,
"end": 23562
} | class ____ {
protected static final Logger LOG = LoggerFactory.getLogger(TestDNFencing.class);
private static final String TEST_FILE = "/testStandbyIsHot";
private static final Path TEST_FILE_PATH = new Path(TEST_FILE);
private static final int SMALL_BLOCK = 1024;
private Configuration conf;
private MiniDFSCluster cluster;
private NameNode nn1, nn2;
private FileSystem fs;
static {
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
}
@BeforeEach
public void setupCluster() throws Exception {
conf = new Configuration();
conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, SMALL_BLOCK);
// Bump up redundancy interval so that we only run low redundancy
// checks explicitly.
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY,
600);
// Increase max streams so that we re-replicate quickly.
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY, 1000);
// See RandomDeleterPolicy javadoc.
conf.setClass(DFSConfigKeys.DFS_BLOCK_REPLICATOR_CLASSNAME_KEY,
RandomDeleterPolicy.class, BlockPlacementPolicy.class);
conf.setInt(DFSConfigKeys.DFS_HA_TAILEDITS_PERIOD_KEY, 1);
cluster = new MiniDFSCluster.Builder(conf)
.nnTopology(MiniDFSNNTopology.simpleHATopology())
.numDataNodes(3)
.build();
nn1 = cluster.getNameNode(0);
nn2 = cluster.getNameNode(1);
cluster.waitActive();
cluster.transitionToActive(0);
// Trigger block reports so that the first NN trusts all
// of the DNs, and will issue deletions
cluster.triggerBlockReports();
fs = HATestUtil.configureFailoverFs(cluster, conf);
}
@AfterEach
public void shutdownCluster() throws Exception {
if (cluster != null) {
banner("Shutting down cluster. NN1 metadata:");
doMetasave(nn1);
banner("Shutting down cluster. NN2 metadata:");
doMetasave(nn2);
cluster.shutdown();
cluster = null;
}
}
@Test
public void testDnFencing() throws Exception {
// Create a file with replication level 3.
DFSTestUtil.createFile(fs, TEST_FILE_PATH, 30*SMALL_BLOCK, (short)3, 1L);
ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, TEST_FILE_PATH);
// Drop its replication count to 1, so it becomes over-replicated.
// Then compute the invalidation of the extra blocks and trigger
// heartbeats so the invalidations are flushed to the DNs.
nn1.getRpcServer().setReplication(TEST_FILE, (short) 1);
BlockManagerTestUtil.computeInvalidationWork(
nn1.getNamesystem().getBlockManager());
cluster.triggerHeartbeats();
// Transition nn2 to active even though nn1 still thinks it's active.
banner("Failing to NN2 but let NN1 continue to think it's active");
NameNodeAdapter.abortEditLogs(nn1);
NameNodeAdapter.enterSafeMode(nn1, false);
cluster.transitionToActive(1);
// Check that the standby picked up the replication change.
assertEquals(1, nn2.getRpcServer().getFileInfo(TEST_FILE).getReplication());
// Dump some info for debugging purposes.
banner("NN2 Metadata immediately after failover");
doMetasave(nn2);
banner("Triggering heartbeats and block reports so that fencing is completed");
cluster.triggerHeartbeats();
cluster.triggerBlockReports();
banner("Metadata after nodes have all block-reported");
doMetasave(nn2);
// Force a rescan of postponedMisreplicatedBlocks.
BlockManager nn2BM = nn2.getNamesystem().getBlockManager();
BlockManagerTestUtil.checkHeartbeat(nn2BM);
BlockManagerTestUtil.rescanPostponedMisreplicatedBlocks(nn2BM);
// The blocks should no longer be postponed.
assertEquals(0, nn2.getNamesystem().getPostponedMisreplicatedBlocks());
// Wait for NN2 to enact its deletions (redundancy monitor has to run, etc)
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
cluster.triggerHeartbeats();
HATestUtil.waitForDNDeletions(cluster);
cluster.triggerDeletionReports();
assertEquals(0, nn2.getNamesystem().getUnderReplicatedBlocks());
assertEquals(0, nn2.getNamesystem().getPendingReplicationBlocks());
banner("Making sure the file is still readable");
FileSystem fs2 = cluster.getFileSystem(1);
DFSTestUtil.readFile(fs2, TEST_FILE_PATH);
banner("Waiting for the actual block files to get deleted from DNs.");
waitForTrueReplication(cluster, block, 1);
}
/**
* Test case which restarts the standby node in such a way that,
* when it exits safemode, it will want to invalidate a bunch
* of over-replicated block replicas. Ensures that if we failover
* at this point it won't lose data.
*/
@Test
public void testNNClearsCommandsOnFailoverAfterStartup()
throws Exception {
// Make lots of blocks to increase chances of triggering a bug.
DFSTestUtil.createFile(fs, TEST_FILE_PATH, 30*SMALL_BLOCK, (short)3, 1L);
banner("Shutting down NN2");
cluster.shutdownNameNode(1);
banner("Setting replication to 1, rolling edit log.");
nn1.getRpcServer().setReplication(TEST_FILE, (short) 1);
nn1.getRpcServer().rollEditLog();
// Start NN2 again. When it starts up, it will see all of the
// blocks as over-replicated, since it has the metadata for
// replication=1, but the DNs haven't yet processed the deletions.
banner("Starting NN2 again.");
cluster.restartNameNode(1);
nn2 = cluster.getNameNode(1);
banner("triggering BRs");
cluster.triggerBlockReports();
// We expect that both NN1 and NN2 will have some number of
// deletions queued up for the DNs.
banner("computing invalidation on nn1");
BlockManagerTestUtil.computeInvalidationWork(
nn1.getNamesystem().getBlockManager());
banner("computing invalidation on nn2");
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
// Dump some info for debugging purposes.
banner("Metadata immediately before failover");
doMetasave(nn2);
// Transition nn2 to active even though nn1 still thinks it's active
banner("Failing to NN2 but let NN1 continue to think it's active");
NameNodeAdapter.abortEditLogs(nn1);
NameNodeAdapter.enterSafeMode(nn1, false);
cluster.transitionToActive(1);
// Check that the standby picked up the replication change.
assertEquals(1, nn2.getRpcServer().getFileInfo(TEST_FILE).getReplication());
// Dump some info for debugging purposes.
banner("Metadata immediately after failover");
doMetasave(nn2);
banner("Triggering heartbeats and block reports so that fencing is completed");
cluster.triggerHeartbeats();
cluster.triggerBlockReports();
banner("Metadata after nodes have all block-reported");
doMetasave(nn2);
// Force a rescan of postponedMisreplicatedBlocks.
BlockManager nn2BM = nn2.getNamesystem().getBlockManager();
BlockManagerTestUtil.checkHeartbeat(nn2BM);
BlockManagerTestUtil.rescanPostponedMisreplicatedBlocks(nn2BM);
// The block should no longer be postponed.
assertEquals(0, nn2.getNamesystem().getPostponedMisreplicatedBlocks());
// Wait for NN2 to enact its deletions (redundancy monitor has to run, etc)
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
HATestUtil.waitForNNToIssueDeletions(nn2);
cluster.triggerHeartbeats();
HATestUtil.waitForDNDeletions(cluster);
cluster.triggerDeletionReports();
assertEquals(0, nn2.getNamesystem().getUnderReplicatedBlocks());
assertEquals(0, nn2.getNamesystem().getPendingReplicationBlocks());
banner("Making sure the file is still readable");
FileSystem fs2 = cluster.getFileSystem(1);
DFSTestUtil.readFile(fs2, TEST_FILE_PATH);
}
/**
* Test case that reduces replication of a file with a lot of blocks
* and then fails over right after those blocks enter the DN invalidation
* queues on the active. Ensures that fencing is correct and no replicas
* are lost.
*/
@Test
public void testNNClearsCommandsOnFailoverWithReplChanges()
throws Exception {
// Make lots of blocks to increase chances of triggering a bug.
DFSTestUtil.createFile(fs, TEST_FILE_PATH, 30*SMALL_BLOCK, (short)1, 1L);
banner("rolling NN1's edit log, forcing catch-up");
HATestUtil.waitForStandbyToCatchUp(nn1, nn2);
// Get some new replicas reported so that NN2 now considers
// them over-replicated and schedules some more deletions
nn1.getRpcServer().setReplication(TEST_FILE, (short) 2);
while (BlockManagerTestUtil.getComputedDatanodeWork(
nn1.getNamesystem().getBlockManager()) > 0) {
LOG.info("Getting more replication work computed");
}
BlockManager bm1 = nn1.getNamesystem().getBlockManager();
while (bm1.getPendingReconstructionBlocksCount() > 0) {
BlockManagerTestUtil.updateState(bm1);
cluster.triggerHeartbeats();
Thread.sleep(1000);
}
banner("triggering BRs");
cluster.triggerBlockReports();
nn1.getRpcServer().setReplication(TEST_FILE, (short) 1);
banner("computing invalidation on nn1");
BlockManagerTestUtil.computeInvalidationWork(
nn1.getNamesystem().getBlockManager());
doMetasave(nn1);
banner("computing invalidation on nn2");
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
doMetasave(nn2);
// Dump some info for debugging purposes.
banner("Metadata immediately before failover");
doMetasave(nn2);
// Transition nn2 to active even though nn1 still thinks it's active
banner("Failing to NN2 but let NN1 continue to think it's active");
NameNodeAdapter.abortEditLogs(nn1);
NameNodeAdapter.enterSafeMode(nn1, false);
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
cluster.transitionToActive(1);
// Check that the standby picked up the replication change.
assertEquals(1, nn2.getRpcServer().getFileInfo(TEST_FILE).getReplication());
// Dump some info for debugging purposes.
banner("Metadata immediately after failover");
doMetasave(nn2);
banner("Triggering heartbeats and block reports so that fencing is completed");
cluster.triggerHeartbeats();
cluster.triggerBlockReports();
banner("Metadata after nodes have all block-reported");
doMetasave(nn2);
// Force a rescan of postponedMisreplicatedBlocks.
BlockManager nn2BM = nn2.getNamesystem().getBlockManager();
BlockManagerTestUtil.checkHeartbeat(nn2BM);
BlockManagerTestUtil.rescanPostponedMisreplicatedBlocks(nn2BM);
// The block should no longer be postponed.
assertEquals(0, nn2.getNamesystem().getPostponedMisreplicatedBlocks());
// Wait for NN2 to enact its deletions (redundancy monitor has to run, etc)
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
HATestUtil.waitForNNToIssueDeletions(nn2);
cluster.triggerHeartbeats();
HATestUtil.waitForDNDeletions(cluster);
cluster.triggerDeletionReports();
assertEquals(0, nn2.getNamesystem().getUnderReplicatedBlocks());
assertEquals(0, nn2.getNamesystem().getPendingReplicationBlocks());
banner("Making sure the file is still readable");
FileSystem fs2 = cluster.getFileSystem(1);
DFSTestUtil.readFile(fs2, TEST_FILE_PATH);
}
/**
* Regression test for HDFS-2742. The issue in this bug was:
* - DN does a block report while file is open. This BR contains
* the block in RBW state.
* - Standby queues the RBW state in PendingDatanodeMessages
* - Standby processes edit logs during failover. Before fixing
* this bug, it was mistakenly applying the RBW reported state
* after the block had been completed, causing the block to get
* marked corrupt. Instead, we should now be applying the RBW
* message on OP_ADD, and then the FINALIZED message on OP_CLOSE.
*/
@Test
public void testBlockReportsWhileFileBeingWritten() throws Exception {
FSDataOutputStream out = fs.create(TEST_FILE_PATH);
try {
AppendTestUtil.write(out, 0, 10);
out.hflush();
// Block report will include the RBW replica, but will be
// queued on the StandbyNode.
cluster.triggerBlockReports();
} finally {
IOUtils.closeStream(out);
}
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
// Verify that no replicas are marked corrupt, and that the
// file is readable from the failed-over standby.
BlockManagerTestUtil.updateState(nn1.getNamesystem().getBlockManager());
BlockManagerTestUtil.updateState(nn2.getNamesystem().getBlockManager());
assertEquals(0, nn1.getNamesystem().getCorruptReplicaBlocks());
assertEquals(0, nn2.getNamesystem().getCorruptReplicaBlocks());
DFSTestUtil.readFile(fs, TEST_FILE_PATH);
}
/**
* Test that, when a block is re-opened for append, the related
* datanode messages are correctly queued by the SBN because
* they have future states and genstamps.
*/
@Test
public void testQueueingWithAppend() throws Exception {
// case 1: create file and call hflush after write
FSDataOutputStream out = fs.create(TEST_FILE_PATH);
try {
AppendTestUtil.write(out, 0, 10);
out.hflush();
// Opening the file will report RBW replicas, but will be
// queued on the StandbyNode.
// However, the delivery of RBW messages is delayed by HDFS-7217 fix.
// Apply cluster.triggerBlockReports() to trigger the reporting sooner.
//
cluster.triggerBlockReports();
// The cluster.triggerBlockReports() call above does a full
// block report that incurs 3 extra RBW messages
} finally {
IOUtils.closeStream(out);
}
cluster.triggerBlockReports();
assertEquals(3, nn2.getNamesystem().getPendingDataNodeMessageCount(),
"The queue should only have the latest report for each DN");
// case 2: append to file and call hflush after write
try {
out = fs.append(TEST_FILE_PATH);
AppendTestUtil.write(out, 10, 10);
out.hflush();
cluster.triggerBlockReports();
} finally {
IOUtils.closeStream(out);
cluster.triggerHeartbeats();
}
assertEquals(3, nn2.getNamesystem().getPendingDataNodeMessageCount(),
"The queue should only have the latest report for each DN");
// case 3: similar to case 2, except no hflush is called.
try {
out = fs.append(TEST_FILE_PATH);
AppendTestUtil.write(out, 20, 10);
} finally {
// The write operation in the try block is buffered, thus no RBW message
// is reported yet until the closeStream call here. When closeStream is
// called, before HDFS-7217 fix, there would be three RBW messages
// (blockReceiving), plus three FINALIZED messages (blockReceived)
// delivered to NN. However, because of HDFS-7217 fix, the reporting of
// RBW messages is postponed. In this case, they are even overwritten
// by the blockReceived messages of the same block when they are waiting
// to be delivered. All this happens within the closeStream() call.
// What's delivered to NN is the three blockReceived messages. See
// BPServiceActor#addPendingReplicationBlockInfo
//
IOUtils.closeStream(out);
}
cluster.triggerBlockReports();
assertEquals(3, nn2.getNamesystem().getPendingDataNodeMessageCount(),
"The queue should only have the latest report for each DN");
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
// Verify that no replicas are marked corrupt, and that the
// file is readable from the failed-over standby.
BlockManagerTestUtil.updateState(nn1.getNamesystem().getBlockManager());
BlockManagerTestUtil.updateState(nn2.getNamesystem().getBlockManager());
assertEquals(0, nn1.getNamesystem().getCorruptReplicaBlocks());
assertEquals(0, nn2.getNamesystem().getCorruptReplicaBlocks());
AppendTestUtil.check(fs, TEST_FILE_PATH, 30);
}
/**
* Another regression test for HDFS-2742. This tests the following sequence:
* - DN does a block report while file is open. This BR contains
* the block in RBW state.
* - The block report is delayed in reaching the standby.
* - The file is closed.
* - The standby processes the OP_ADD and OP_CLOSE operations before
* the RBW block report arrives.
* - The standby should not mark the block as corrupt.
*/
@Test
public void testRBWReportArrivesAfterEdits() throws Exception {
final CountDownLatch brFinished = new CountDownLatch(1);
DelayAnswer delayer = new GenericTestUtils.DelayAnswer(LOG) {
@Override
protected Object passThrough(InvocationOnMock invocation)
throws Throwable {
try {
return super.passThrough(invocation);
} finally {
// inform the test that our block report went through.
brFinished.countDown();
}
}
};
FSDataOutputStream out = fs.create(TEST_FILE_PATH);
try {
AppendTestUtil.write(out, 0, 10);
out.hflush();
DataNode dn = cluster.getDataNodes().get(0);
DatanodeProtocolClientSideTranslatorPB spy =
InternalDataNodeTestUtils.spyOnBposToNN(dn, nn2);
Mockito.doAnswer(delayer)
.when(spy).blockReport(
any(),
anyString(),
any(),
any());
dn.scheduleAllBlockReport(0);
delayer.waitForCall();
} finally {
IOUtils.closeStream(out);
}
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
delayer.proceed();
brFinished.await();
// Verify that no replicas are marked corrupt, and that the
// file is readable from the failed-over standby.
BlockManagerTestUtil.updateState(nn1.getNamesystem().getBlockManager());
BlockManagerTestUtil.updateState(nn2.getNamesystem().getBlockManager());
assertEquals(0, nn1.getNamesystem().getCorruptReplicaBlocks());
assertEquals(0, nn2.getNamesystem().getCorruptReplicaBlocks());
DFSTestUtil.readFile(fs, TEST_FILE_PATH);
}
/**
* Print a big banner in the test log to make debug easier.
*/
private void banner(String string) {
LOG.info("\n\n\n\n================================================\n" +
string + "\n" +
"==================================================\n\n");
}
private void doMetasave(NameNode nn2) {
nn2.getNamesystem().writeLock(RwLockMode.BM);
try {
PrintWriter pw = new PrintWriter(System.err);
nn2.getNamesystem().getBlockManager().metaSave(pw);
pw.flush();
} finally {
nn2.getNamesystem().writeUnlock(RwLockMode.BM, "metaSave");
}
}
private void waitForTrueReplication(final MiniDFSCluster cluster,
final ExtendedBlock block, final int waitFor) throws Exception {
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
try {
return getTrueReplication(cluster, block) == waitFor;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}, 500, 10000);
}
private int getTrueReplication(MiniDFSCluster cluster, ExtendedBlock block)
throws IOException {
int count = 0;
for (DataNode dn : cluster.getDataNodes()) {
try {
if (DataNodeTestUtils.getFSDataset(dn).getStoredBlock(
block.getBlockPoolId(), block.getBlockId()) != null) {
count++;
}
} catch (ReplicaNotFoundException e) {
continue;
}
}
return count;
}
/**
* A BlockPlacementPolicy which, rather than using space available, makes
* random decisions about which excess replica to delete. This is because,
* in the test cases, the two NNs will usually (but not quite always)
* make the same decision of which replica to delete. The fencing issues
* are exacerbated when the two NNs make different decisions, which can
* happen in "real life" when they have slightly out-of-sync heartbeat
* information regarding disk usage.
*/
public static | TestDNFencing |
java | spring-projects__spring-boot | module/spring-boot-hazelcast/src/test/java/org/springframework/boot/hazelcast/autoconfigure/HazelcastAutoConfigurationClientTests.java | {
"start": 8894,
"end": 9355
} | class ____ {
@Bean
HazelcastConnectionDetails hazelcastConnectionDetails() {
ClientConfig config = new ClientConfig();
config.setLabels(Set.of("connection-details"));
config.getConnectionStrategyConfig().getConnectionRetryConfig().setClusterConnectTimeoutMillis(60000);
config.getNetworkConfig().getAddresses().add(endpointAddress);
return () -> config;
}
}
@Configuration(proxyBeanMethods = false)
static | HazelcastConnectionDetailsConfig |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestTests.java | {
"start": 640,
"end": 4311
} | class ____ extends ESTestCase {
public void testSerialization() throws Exception {
ClusterAllocationExplainRequest request = new ClusterAllocationExplainRequest(
randomTimeValue(),
randomAlphaOfLength(4),
randomIntBetween(0, Integer.MAX_VALUE),
randomBoolean(),
randomBoolean() ? randomAlphaOfLength(5) : null
);
request.includeYesDecisions(randomBoolean());
request.includeDiskInfo(randomBoolean());
BytesStreamOutput output = new BytesStreamOutput();
request.writeTo(output);
ClusterAllocationExplainRequest actual = new ClusterAllocationExplainRequest(output.bytes().streamInput());
assertEquals(request.masterNodeTimeout(), actual.masterNodeTimeout());
assertEquals(request.getIndex(), actual.getIndex());
assertEquals(request.getShard(), actual.getShard());
assertEquals(request.isPrimary(), actual.isPrimary());
assertEquals(request.includeYesDecisions(), actual.includeYesDecisions());
assertEquals(request.includeDiskInfo(), actual.includeDiskInfo());
assertEquals(request.getCurrentNode(), actual.getCurrentNode());
}
public void testToStringWithEmptyBody() {
ClusterAllocationExplainRequest clusterAllocationExplainRequest = new ClusterAllocationExplainRequest(randomTimeValue());
clusterAllocationExplainRequest.includeYesDecisions(true);
clusterAllocationExplainRequest.includeDiskInfo(false);
String expected = "ClusterAllocationExplainRequest[useAnyUnassignedShard=true,"
+ "include_yes_decisions?=true,include_disk_info?=false";
assertEquals(expected, clusterAllocationExplainRequest.toString());
}
public void testToStringWithValidBodyButCurrentNodeIsNull() {
String index = "test-index";
int shard = randomInt();
boolean primary = randomBoolean();
ClusterAllocationExplainRequest clusterAllocationExplainRequest = new ClusterAllocationExplainRequest(
randomTimeValue(),
index,
shard,
primary,
null
);
clusterAllocationExplainRequest.includeYesDecisions(false);
clusterAllocationExplainRequest.includeDiskInfo(true);
String expected = "ClusterAllocationExplainRequest[index="
+ index
+ ",shard="
+ shard
+ ",primary?="
+ primary
+ ",include_yes_decisions?=false"
+ ",include_disk_info?=true";
assertEquals(expected, clusterAllocationExplainRequest.toString());
}
public void testToStringWithAllBodyParameters() {
String index = "test-index";
int shard = randomInt();
boolean primary = randomBoolean();
String currentNode = "current_node";
ClusterAllocationExplainRequest clusterAllocationExplainRequest = new ClusterAllocationExplainRequest(
randomTimeValue(),
index,
shard,
primary,
currentNode
);
clusterAllocationExplainRequest.includeYesDecisions(false);
clusterAllocationExplainRequest.includeDiskInfo(true);
String expected = "ClusterAllocationExplainRequest[index="
+ index
+ ",shard="
+ shard
+ ",primary?="
+ primary
+ ",current_node="
+ currentNode
+ ",include_yes_decisions?=false"
+ ",include_disk_info?=true";
assertEquals(expected, clusterAllocationExplainRequest.toString());
}
}
| ClusterAllocationExplainRequestTests |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AbstractAWSCredentialProvider.java | {
"start": 1144,
"end": 1795
} | class ____
implements AwsCredentialsProvider {
private final URI binding;
private final Configuration conf;
/**
* Construct from URI + configuration.
* @param uri URI: may be null.
* @param conf configuration.
*/
protected AbstractAWSCredentialProvider(
@Nullable final URI uri,
final Configuration conf) {
this.conf = conf;
this.binding = uri;
}
public Configuration getConf() {
return conf;
}
/**
* Get the binding URI: may be null.
* @return the URI this instance was constructed with,
* if any.
*/
public URI getUri() {
return binding;
}
}
| AbstractAWSCredentialProvider |
java | micronaut-projects__micronaut-core | retry/src/main/java/io/micronaut/retry/annotation/CircuitBreaker.java | {
"start": 1345,
"end": 3084
} | interface ____ {
int MAX_RETRY_ATTEMPTS = 4;
/**
* @return The exception types to include (defaults to all)
*/
@AliasFor(annotation = Retryable.class, member = "includes")
Class<? extends Throwable>[] includes() default {};
/**
* @return The exception types to exclude (defaults to none)
*/
@AliasFor(annotation = Retryable.class, member = "excludes")
Class<? extends Throwable>[] excludes() default {};
/**
* @return The maximum number of retry attempts
*/
@Digits(integer = MAX_RETRY_ATTEMPTS, fraction = 0)
@AliasFor(annotation = Retryable.class, member = "attempts")
String attempts() default "3";
/**
* @return The delay between retry attempts
*/
@AliasFor(annotation = Retryable.class, member = "delay")
String delay() default "500ms";
/**
* @return The multiplier to use to calculate the delay between retries.
*/
@Digits(integer = 2, fraction = 2)
@AliasFor(annotation = Retryable.class, member = "multiplier")
String multiplier() default "0";
/**
* The maximum overall delay for an operation to complete until the Circuit state is set to
* {@link io.micronaut.retry.CircuitState#OPEN}.
*
* @return The maximum overall delay
*/
@AliasFor(annotation = Retryable.class, member = "maxDelay")
String maxDelay() default "5s";
/**
* Sets the {@link java.time.Duration} of time before resetting the circuit to
* {@link io.micronaut.retry.CircuitState#HALF_OPEN} allowing a single retry.
*
* @return The {@link java.time.Duration} of time before reset
*/
String reset() default "20s";
/**
* @return The retry predicate | CircuitBreaker |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxConcatMapNoPrefetch.java | {
"start": 2439,
"end": 2528
} | class ____<T, R> implements FluxConcatMapSupport<T, R> {
| FluxConcatMapNoPrefetchSubscriber |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/initializers/annotation/GlobalConfig.java | {
"start": 893,
"end": 1027
} | class ____ {
@Bean
public String foo() {
return "foo";
}
@Bean
public String baz() {
return "global config";
}
}
| GlobalConfig |
java | apache__camel | components/camel-wordpress/src/main/java/org/apache/camel/component/wordpress/api/model/Context.java | {
"start": 956,
"end": 1004
} | enum ____ {
view,
embed,
edit;
}
| Context |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/id/enhanced/OptimizerUnitTest.java | {
"start": 12623,
"end": 13932
} | class ____ implements AccessCallback {
private IdentifierGeneratorHelper.BasicHolder value = new IdentifierGeneratorHelper.BasicHolder( Long.class );
private long initialValue;
private int increment;
private int timesCalled = 0;
public SourceMock(long initialValue) {
this( initialValue, 1 );
}
public SourceMock(long initialValue, int increment) {
this( initialValue, increment, 0 );
}
public SourceMock(long initialValue, int increment, int timesCalled) {
this.increment = increment;
this.timesCalled = timesCalled;
if ( timesCalled != 0 ) {
this.value.initialize( initialValue );
this.initialValue = 1;
}
else {
this.value.initialize( -1 );
this.initialValue = initialValue;
}
}
public IntegralDataTypeHolder getNextValue() {
try {
if ( timesCalled == 0 ) {
initValue();
return value.copy();
}
else {
return value.add( increment ).copy();
}
}
finally {
timesCalled++;
}
}
@Override
public String getTenantIdentifier() {
return null;
}
private void initValue() {
this.value.initialize( initialValue );
}
public int getTimesCalled() {
return timesCalled;
}
public long getCurrentValue() {
return value == null ? -1 : value.getActualLongValue();
}
}
}
| SourceMock |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/bigdecimals/BigDecimalAssert_scale_Test.java | {
"start": 923,
"end": 1768
} | class ____ {
@Test
void should_be_able_to_use_scale_assertions_on_big_decimal_scale() throws Exception {
// GIVEN
BigDecimal threeDecimal = new BigDecimal("1.111");
// THEN
then(threeDecimal).scale()
.isLessThan(4)
.isGreaterThan(2)
.isPositive()
.returnToBigDecimal()
.hasScaleOf(3);
}
@Test
void should_have_a_helpful_error_message_when_scale_assertion_is_used_on_a_null_big_decimal() {
// GIVEN
BigDecimal nullBigDecimal = null;
// WHEN/THEN
assertThatNullPointerException().isThrownBy(() -> assertThat(nullBigDecimal).scale().isBetween(2, 3))
.withMessage("Can not perform assertions on the scale of a null BigDecimal");
}
}
| BigDecimalAssert_scale_Test |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java | {
"start": 2169,
"end": 3252
} | class ____
implements Serializer<Serializable> {
private ObjectOutputStream oos;
@Override
public void open(OutputStream out) throws IOException {
oos = new ObjectOutputStream(out) {
@Override protected void writeStreamHeader() {
// no header
}
};
}
@Override
public void serialize(Serializable object) throws IOException {
oos.reset(); // clear (class) back-references
oos.writeObject(object);
}
@Override
public void close() throws IOException {
oos.close();
}
}
@Override
@InterfaceAudience.Private
public boolean accept(Class<?> c) {
return Serializable.class.isAssignableFrom(c);
}
@Override
@InterfaceAudience.Private
public Deserializer<Serializable> getDeserializer(Class<Serializable> c) {
return new JavaSerializationDeserializer<Serializable>();
}
@Override
@InterfaceAudience.Private
public Serializer<Serializable> getSerializer(Class<Serializable> c) {
return new JavaSerializationSerializer();
}
}
| JavaSerializationSerializer |
java | apache__camel | components/camel-mail/src/test/java/org/apache/camel/component/mail/MailUsernameWithAtSignTest.java | {
"start": 1215,
"end": 2419
} | class ____ extends CamelTestSupport {
private static final MailboxUser jamesAtSign
= Mailbox.getOrCreateUser("jamesAtSign@localhost", "jamesAtSign@localhost", "secret");
@Test
public void testMailUsingAtSignInUsername() throws Exception {
Mailbox.clearAll();
String body = "Hello Claus.\r\nYes it does.\r\n\r\nRegards James.\r\n";
template.sendBody("direct:a", body);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.expectedBodiesReceived(body);
mock.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:a").to("smtp://localhost:" + Mailbox.getPort(Protocol.smtp)
+ "?username=jamesAtSign@localhost&password=secret");
from("pop3://localhost:" + Mailbox.getPort(Protocol.pop3) + "?username=" + jamesAtSign.getEmail() + "&password="
+ jamesAtSign.getPassword() + "&initialDelay=100&delay=100").to("mock:result");
}
};
}
}
| MailUsernameWithAtSignTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/issues/AdviceWithCBRTest.java | {
"start": 1117,
"end": 3692
} | class ____ extends ContextTestSupport {
@Test
public void testAdviceCBR() throws Exception {
RouteDefinition route = context.getRouteDefinitions().get(0);
AdviceWith.adviceWith(route, context, new AdviceWithRouteBuilder() {
@Override
public void configure() {
weaveById("foo").after().to("mock:foo2");
weaveById("bar").after().to("mock:bar2");
}
});
getMockEndpoint("mock:foo").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:foo2").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:bar").expectedBodiesReceived("Bye World");
getMockEndpoint("mock:bar2").expectedBodiesReceived("Bye World");
getMockEndpoint("mock:baz").expectedBodiesReceived("Hi World");
template.sendBodyAndHeader("direct:start", "Hello World", "foo", "123");
template.sendBodyAndHeader("direct:start", "Bye World", "bar", "123");
template.sendBody("direct:start", "Hi World");
assertMockEndpointsSatisfied();
}
@Test
public void testAdviceToStringCBR() throws Exception {
RouteDefinition route = context.getRouteDefinitions().get(0);
AdviceWith.adviceWith(route, context, new AdviceWithRouteBuilder() {
@Override
public void configure() {
weaveByToString("To[mock:foo]").after().to("mock:foo2");
weaveByToString("To[mock:bar]").after().to("mock:bar2");
}
});
getMockEndpoint("mock:foo").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:foo2").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:bar").expectedBodiesReceived("Bye World");
getMockEndpoint("mock:bar2").expectedBodiesReceived("Bye World");
getMockEndpoint("mock:baz").expectedBodiesReceived("Hi World");
template.sendBodyAndHeader("direct:start", "Hello World", "foo", "123");
template.sendBodyAndHeader("direct:start", "Bye World", "bar", "123");
template.sendBody("direct:start", "Hi World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").choice().when(header("foo")).to("mock:foo").id("foo").when(header("bar")).to("mock:bar")
.id("bar").otherwise().to("mock:baz").id("baz");
}
};
}
}
| AdviceWithCBRTest |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/csv/BindySimpleCsvMarshallDslTest.java | {
"start": 1616,
"end": 3282
} | class ____ {
private List<Map<String, Object>> models = new ArrayList<>();
private String result = "1,B2,Keira,Knightley,ISIN,XX23456789,BUY,Share,450.45,EUR,14-01-2009,17-05-2010 23:21:59\r\n";
@Produce("direct:start")
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint resultEndpoint;
@Test
public void testMarshallMessage() throws Exception {
resultEndpoint.expectedBodiesReceived(result);
template.sendBody(generateModel());
resultEndpoint.assertIsSatisfied();
}
public List<Map<String, Object>> generateModel() {
Map<String, Object> modelObjects = new HashMap<>();
Order order = new Order();
order.setOrderNr(1);
order.setOrderType("BUY");
order.setClientNr("B2");
order.setFirstName("Keira");
order.setLastName("Knightley");
order.setAmount(new BigDecimal("450.45").setScale(2));
order.setInstrumentCode("ISIN");
order.setInstrumentNumber("XX23456789");
order.setInstrumentType("Share");
order.setCurrency("EUR");
Calendar calendar = new GregorianCalendar();
calendar.set(2009, 0, 14);
order.setOrderDate(calendar.getTime());
calendar = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
// 4 hour shift
// 17-05-2010 23:21:59 by GMT+4
calendar.set(2010, 4, 17, 19, 21, 59);
order.setOrderDateTime(calendar.getTime());
modelObjects.put(order.getClass().getName(), order);
models.add(modelObjects);
return models;
}
public static | BindySimpleCsvMarshallDslTest |
java | google__auto | value/src/main/java/com/google/auto/value/processor/KotlinMetadata.java | {
"start": 9083,
"end": 9491
} | class ____ {
final Object /* KmConstructor */ wrapped;
KmConstructor(Object wrapped) {
this.wrapped = wrapped;
}
List<KmValueParameter> getValueParameters() throws ReflectiveOperationException {
return ((List<?>) KM_CONSTRUCTOR_GET_VALUE_PARAMETERS.invoke(wrapped))
.stream().map(KmValueParameter::new).collect(toImmutableList());
}
}
private static | KmConstructor |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/configuration/interfaces/ActiveProfilesInterfaceTests.java | {
"start": 1284,
"end": 1553
} | class ____ implements ActiveProfilesTestInterface {
@Autowired
Employee employee;
@Test
void profileFromTestInterface() {
assertThat(employee).isNotNull();
assertThat(employee.getName()).isEqualTo("dev");
}
@Configuration
static | ActiveProfilesInterfaceTests |
java | google__guice | core/src/com/google/inject/internal/InternalFlags.java | {
"start": 2096,
"end": 2296
} | enum ____ {
/** No stack trace collection */
OFF,
/** Minimum stack trace collection (Default) */
ONLY_FOR_DECLARING_SOURCE,
}
/** The options for Guice custom | IncludeStackTraceOption |
java | apache__camel | components/camel-whatsapp/src/main/java/org/apache/camel/component/whatsapp/model/TemplateMessage.java | {
"start": 886,
"end": 1520
} | class ____ {
private String name;
private Language language;
private List<Component> components;
public TemplateMessage() {
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Language getLanguage() {
return language;
}
public void setLanguage(Language language) {
this.language = language;
}
public List<Component> getComponents() {
return components;
}
public void setComponents(List<Component> components) {
this.components = components;
}
}
| TemplateMessage |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/ConvertHeaderDefinition.java | {
"start": 1309,
"end": 5458
} | class ____ extends NoOutputDefinition<ConvertHeaderDefinition> {
@XmlTransient
private Class<?> typeClass;
@XmlAttribute(required = true)
private String name;
@XmlAttribute(required = true)
private String type;
@XmlAttribute
private String toName;
@XmlAttribute
@Metadata(label = "advanced", javaType = "java.lang.Boolean", defaultValue = "true")
private String mandatory;
@XmlAttribute
@Metadata(label = "advanced")
private String charset;
public ConvertHeaderDefinition() {
}
protected ConvertHeaderDefinition(ConvertHeaderDefinition source) {
super(source);
this.typeClass = source.typeClass;
this.name = source.name;
this.type = source.type;
this.toName = source.toName;
this.mandatory = source.mandatory;
this.charset = source.charset;
}
@Override
public ConvertHeaderDefinition copyDefinition() {
return new ConvertHeaderDefinition(this);
}
public ConvertHeaderDefinition(String name, String type) {
setName(name);
setType(type);
}
public ConvertHeaderDefinition(String name, Class<?> typeClass) {
setName(name);
setTypeClass(typeClass);
setType(typeClass.getCanonicalName());
}
public ConvertHeaderDefinition(String name, String toName, Class<?> typeClass) {
setName(name);
setToName(toName);
setTypeClass(typeClass);
setType(typeClass.getCanonicalName());
}
public ConvertHeaderDefinition(String name, Class<?> typeClass, boolean mandatory) {
setName(name);
setTypeClass(typeClass);
setType(typeClass.getCanonicalName());
setMandatory(mandatory ? "true" : "false");
}
public ConvertHeaderDefinition(String name, Class<?> typeClass, String charset) {
setName(name);
setTypeClass(typeClass);
setType(typeClass.getCanonicalName());
setCharset(charset);
}
@Override
public String toString() {
return "ConvertHeaderTo[" + getName() + ": " + getType() + "]";
}
@Override
public String getShortName() {
return "convertHeaderTo";
}
@Override
public String getLabel() {
return "convertHeaderTo[" + getType() + "]";
}
/**
* Name of message header to convert its value
* <p/>
* The <tt>simple</tt> language can be used to define a dynamic evaluated header name to be used. Otherwise a
* constant name will be used.
*/
public void setName(String name) {
this.name = name;
}
public String getName() {
return name;
}
public String getToName() {
return toName;
}
/**
* To use another header to store the result. By default, the result is stored in the same header. This option
* allows to use another header.
* <p/>
* The <tt>simple</tt> language can be used to define a dynamic evaluated header name to be used. Otherwise a
* constant name will be used.
*/
public void setToName(String toName) {
this.toName = toName;
}
public String getType() {
return type;
}
/**
* The java type to convert to
*/
public void setType(String type) {
this.type = type;
}
public Class<?> getTypeClass() {
return typeClass;
}
public void setTypeClass(Class<?> typeClass) {
this.typeClass = typeClass;
}
public String getCharset() {
return charset;
}
/**
* To use a specific charset when converting
*/
public void setCharset(String charset) {
this.charset = charset;
}
public String getMandatory() {
return mandatory;
}
/**
* When mandatory then the conversion must return a value (cannot be null), if this is not possible then
* NoTypeConversionAvailableException is thrown. Setting this to false could mean conversion is not possible and the
* value is null.
*/
public void setMandatory(String mandatory) {
this.mandatory = mandatory;
}
}
| ConvertHeaderDefinition |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/ResumeCheckpointManuallyITCase.java | {
"start": 21149,
"end": 21977
} | class ____
extends ManualWindowSpeedITCase.InfiniteTupleSource {
private static final long serialVersionUID = 8120981235081181746L;
private static CountDownLatch countDownLatch;
public NotifyingInfiniteTupleSource(int numKeys) {
super(numKeys);
}
@Override
public void run(SourceContext<Tuple2<String, Integer>> out) throws Exception {
if (countDownLatch != null) {
countDownLatch.countDown();
}
super.run(out);
}
}
/**
* This {@link WatermarkStrategy} assigns the current system time as the event-time timestamp.
* In a real use case you should use proper timestamps and an appropriate {@link
* WatermarkStrategy}.
*/
private static | NotifyingInfiniteTupleSource |
java | elastic__elasticsearch | libs/plugin-scanner/src/test/java/org/elasticsearch/plugin/scanner/ClassReadersTests.java | {
"start": 3692,
"end": 3814
} | class ____ {}
""")));
InMemoryJavaCompiler.compile("p.E", """
package p;
public | D |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/support/destination/BeanFactoryDestinationResolver.java | {
"start": 2114,
"end": 3020
} | class ____ the context of a Spring IoC container.
* @param beanFactory the bean factory to be used to lookup {@link jakarta.jms.Destination Destination}
*/
public BeanFactoryDestinationResolver(BeanFactory beanFactory) {
Assert.notNull(beanFactory, "BeanFactory is required");
this.beanFactory = beanFactory;
}
@Override
public void setBeanFactory(BeanFactory beanFactory) {
this.beanFactory = beanFactory;
}
@Override
public Destination resolveDestinationName(@Nullable Session session, String destinationName, boolean pubSubDomain)
throws JMSException {
Assert.state(this.beanFactory != null, "BeanFactory is required");
try {
return this.beanFactory.getBean(destinationName, Destination.class);
}
catch (BeansException ex) {
throw new DestinationResolutionException(
"Failed to look up Destination bean with name '" + destinationName + "'", ex);
}
}
}
| outside |
java | quarkusio__quarkus | extensions/kubernetes-client/deployment-internal/src/main/java/io/quarkus/kubernetes/client/deployment/internal/KubernetesClientBuildStep.java | {
"start": 516,
"end": 957
} | class ____ {
private KubernetesClientBuildConfig buildConfig;
@BuildStep
public KubernetesClientBuildItem process(QuarkusBuildCloseablesBuildItem closeablesBuildItem) {
QuarkusHttpClientFactory httpClientFactory = new QuarkusHttpClientFactory();
closeablesBuildItem.add(httpClientFactory);
return new KubernetesClientBuildItem(createConfig(buildConfig), httpClientFactory);
}
}
| KubernetesClientBuildStep |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnit4SetUpNotRunTest.java | {
"start": 5771,
"end": 6279
} | interface ____ {}\
""")
.doTest();
}
@Test
public void negativeCases() {
compilationHelper
.addSourceLines(
"JUnit4SetUpNotRunNegativeCases.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Not a JUnit 4 test (no @RunWith annotation on the class). */
public | Before |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/convention/TestBeanTests.java | {
"start": 6749,
"end": 6916
} | class ____ {
@TestBean(methodName = "createExample")
private String example;
// NO createExample() method
}
abstract static | FailureMissingExplicitOverrideMethod |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationClassLoaderTests.java | {
"start": 1045,
"end": 1115
} | class ____ is
* used.
*
* @author Phillip Webb
* @since 5.2
*/
| loader |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/stats/StatsPersister.java | {
"start": 940,
"end": 2690
} | class ____ {
private static final Logger LOGGER = LogManager.getLogger(StatsPersister.class);
private final String jobId;
private final ResultsPersisterService resultsPersisterService;
private final DataFrameAnalyticsAuditor auditor;
public StatsPersister(String jobId, ResultsPersisterService resultsPersisterService, DataFrameAnalyticsAuditor auditor) {
this.jobId = Objects.requireNonNull(jobId);
this.resultsPersisterService = Objects.requireNonNull(resultsPersisterService);
this.auditor = Objects.requireNonNull(auditor);
}
public void persistWithRetry(ToXContentObject result, Function<String, String> docIdSupplier) {
try {
resultsPersisterService.indexWithRetry(
jobId,
MlStatsIndex.writeAlias(),
result,
new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true")),
WriteRequest.RefreshPolicy.NONE,
docIdSupplier.apply(jobId),
true,
() -> true,
retryMessage -> LOGGER.debug(
"[{}] failed to persist result with id [{}]; {}",
jobId,
docIdSupplier.apply(jobId),
retryMessage
)
);
} catch (IOException ioe) {
LOGGER.error(() -> "[" + jobId + "] Failed serializing stats result", ioe);
} catch (Exception e) {
LOGGER.error(() -> "[" + jobId + "] Failed indexing stats result", e);
auditor.error(jobId, "Failed indexing stats result with id [" + docIdSupplier.apply(jobId) + "]; " + e.getMessage());
}
}
}
| StatsPersister |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/support/AbstractApplicationContext.java | {
"start": 28112,
"end": 45572
} | interface ____ registered as resolvable type in a plain factory.
// MessageSource registered (and found for autowiring) as a bean.
beanFactory.registerResolvableDependency(BeanFactory.class, beanFactory);
beanFactory.registerResolvableDependency(ResourceLoader.class, this);
beanFactory.registerResolvableDependency(ApplicationEventPublisher.class, this);
beanFactory.registerResolvableDependency(ApplicationContext.class, this);
// Register early post-processor for detecting inner beans as ApplicationListeners.
beanFactory.addBeanPostProcessor(new ApplicationListenerDetector(this));
// Detect a LoadTimeWeaver and prepare for weaving, if found.
if (!NativeDetector.inNativeImage() && beanFactory.containsBean(LOAD_TIME_WEAVER_BEAN_NAME)) {
beanFactory.addBeanPostProcessor(new LoadTimeWeaverAwareProcessor(beanFactory));
// Set a temporary ClassLoader for type matching.
beanFactory.setTempClassLoader(new ContextTypeMatchClassLoader(beanFactory.getBeanClassLoader()));
}
// Register default environment beans.
if (!beanFactory.containsLocalBean(ENVIRONMENT_BEAN_NAME)) {
beanFactory.registerSingleton(ENVIRONMENT_BEAN_NAME, getEnvironment());
}
if (!beanFactory.containsLocalBean(SYSTEM_PROPERTIES_BEAN_NAME)) {
beanFactory.registerSingleton(SYSTEM_PROPERTIES_BEAN_NAME, getEnvironment().getSystemProperties());
}
if (!beanFactory.containsLocalBean(SYSTEM_ENVIRONMENT_BEAN_NAME)) {
beanFactory.registerSingleton(SYSTEM_ENVIRONMENT_BEAN_NAME, getEnvironment().getSystemEnvironment());
}
if (!beanFactory.containsLocalBean(APPLICATION_STARTUP_BEAN_NAME)) {
beanFactory.registerSingleton(APPLICATION_STARTUP_BEAN_NAME, getApplicationStartup());
}
}
/**
* Modify the application context's internal bean factory after its standard
* initialization. The initial definition resources will have been loaded but no
* post-processors will have run and no derived bean definitions will have been
* registered, and most importantly, no beans will have been instantiated yet.
* <p>This template method allows for registering special BeanPostProcessors
* etc in certain AbstractApplicationContext subclasses.
* @param beanFactory the bean factory used by the application context
*/
protected void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) {
}
/**
* Instantiate and invoke all registered BeanFactoryPostProcessor beans,
* respecting explicit order if given.
* <p>Must be called before singleton instantiation.
*/
protected void invokeBeanFactoryPostProcessors(ConfigurableListableBeanFactory beanFactory) {
PostProcessorRegistrationDelegate.invokeBeanFactoryPostProcessors(beanFactory, getBeanFactoryPostProcessors());
// Detect a LoadTimeWeaver and prepare for weaving, if found in the meantime
// (for example, through an @Bean method registered by ConfigurationClassPostProcessor)
if (!NativeDetector.inNativeImage() && beanFactory.getTempClassLoader() == null &&
beanFactory.containsBean(LOAD_TIME_WEAVER_BEAN_NAME)) {
beanFactory.addBeanPostProcessor(new LoadTimeWeaverAwareProcessor(beanFactory));
beanFactory.setTempClassLoader(new ContextTypeMatchClassLoader(beanFactory.getBeanClassLoader()));
}
}
/**
* Instantiate and register all BeanPostProcessor beans,
* respecting explicit order if given.
* <p>Must be called before any instantiation of application beans.
*/
protected void registerBeanPostProcessors(ConfigurableListableBeanFactory beanFactory) {
PostProcessorRegistrationDelegate.registerBeanPostProcessors(beanFactory, this);
}
/**
* Initialize the {@link MessageSource}.
* <p>Uses parent's {@code MessageSource} if none defined in this context.
* @see #MESSAGE_SOURCE_BEAN_NAME
*/
protected void initMessageSource() {
ConfigurableListableBeanFactory beanFactory = getBeanFactory();
if (beanFactory.containsLocalBean(MESSAGE_SOURCE_BEAN_NAME)) {
this.messageSource = beanFactory.getBean(MESSAGE_SOURCE_BEAN_NAME, MessageSource.class);
// Make MessageSource aware of parent MessageSource.
if (this.parent != null && this.messageSource instanceof HierarchicalMessageSource hms &&
hms.getParentMessageSource() == null) {
// Only set parent context as parent MessageSource if no parent MessageSource
// registered already.
hms.setParentMessageSource(getInternalParentMessageSource());
}
if (logger.isTraceEnabled()) {
logger.trace("Using MessageSource [" + this.messageSource + "]");
}
}
else {
// Use empty MessageSource to be able to accept getMessage calls.
DelegatingMessageSource dms = new DelegatingMessageSource();
dms.setParentMessageSource(getInternalParentMessageSource());
this.messageSource = dms;
beanFactory.registerSingleton(MESSAGE_SOURCE_BEAN_NAME, this.messageSource);
if (logger.isTraceEnabled()) {
logger.trace("No '" + MESSAGE_SOURCE_BEAN_NAME + "' bean, using [" + this.messageSource + "]");
}
}
}
/**
* Initialize the {@link ApplicationEventMulticaster}.
* <p>Uses {@link SimpleApplicationEventMulticaster} if none defined in the context.
* @see #APPLICATION_EVENT_MULTICASTER_BEAN_NAME
* @see org.springframework.context.event.SimpleApplicationEventMulticaster
*/
protected void initApplicationEventMulticaster() {
ConfigurableListableBeanFactory beanFactory = getBeanFactory();
if (beanFactory.containsLocalBean(APPLICATION_EVENT_MULTICASTER_BEAN_NAME)) {
this.applicationEventMulticaster =
beanFactory.getBean(APPLICATION_EVENT_MULTICASTER_BEAN_NAME, ApplicationEventMulticaster.class);
if (logger.isTraceEnabled()) {
logger.trace("Using ApplicationEventMulticaster [" + this.applicationEventMulticaster + "]");
}
}
else {
this.applicationEventMulticaster = new SimpleApplicationEventMulticaster(beanFactory);
beanFactory.registerSingleton(APPLICATION_EVENT_MULTICASTER_BEAN_NAME, this.applicationEventMulticaster);
if (logger.isTraceEnabled()) {
logger.trace("No '" + APPLICATION_EVENT_MULTICASTER_BEAN_NAME + "' bean, using " +
"[" + this.applicationEventMulticaster.getClass().getSimpleName() + "]");
}
}
}
/**
* Initialize the {@link LifecycleProcessor}.
* <p>Uses {@link DefaultLifecycleProcessor} if none defined in the context.
* @since 3.0
* @see #LIFECYCLE_PROCESSOR_BEAN_NAME
* @see org.springframework.context.support.DefaultLifecycleProcessor
*/
protected void initLifecycleProcessor() {
ConfigurableListableBeanFactory beanFactory = getBeanFactory();
if (beanFactory.containsLocalBean(LIFECYCLE_PROCESSOR_BEAN_NAME)) {
this.lifecycleProcessor = beanFactory.getBean(LIFECYCLE_PROCESSOR_BEAN_NAME, LifecycleProcessor.class);
if (logger.isTraceEnabled()) {
logger.trace("Using LifecycleProcessor [" + this.lifecycleProcessor + "]");
}
}
else {
DefaultLifecycleProcessor defaultProcessor = new DefaultLifecycleProcessor();
defaultProcessor.setBeanFactory(beanFactory);
this.lifecycleProcessor = defaultProcessor;
beanFactory.registerSingleton(LIFECYCLE_PROCESSOR_BEAN_NAME, this.lifecycleProcessor);
if (logger.isTraceEnabled()) {
logger.trace("No '" + LIFECYCLE_PROCESSOR_BEAN_NAME + "' bean, using " +
"[" + this.lifecycleProcessor.getClass().getSimpleName() + "]");
}
}
}
/**
* Template method which can be overridden to add context-specific refresh work.
* Called on initialization of special beans, before instantiation of singletons.
* <p>This implementation is empty.
* @throws BeansException in case of errors
* @see #refresh()
*/
protected void onRefresh() throws BeansException {
// For subclasses: do nothing by default.
}
/**
* Add beans that implement ApplicationListener as listeners.
* Doesn't affect other listeners, which can be added without being beans.
*/
protected void registerListeners() {
// Register statically specified listeners first.
for (ApplicationListener<?> listener : getApplicationListeners()) {
getApplicationEventMulticaster().addApplicationListener(listener);
}
// Do not initialize FactoryBeans here: We need to leave all regular beans
// uninitialized to let post-processors apply to them!
String[] listenerBeanNames = getBeanNamesForType(ApplicationListener.class, true, false);
for (String listenerBeanName : listenerBeanNames) {
getApplicationEventMulticaster().addApplicationListenerBean(listenerBeanName);
}
// Publish early application events now that we finally have a multicaster...
Set<ApplicationEvent> earlyEventsToProcess = this.earlyApplicationEvents;
this.earlyApplicationEvents = null;
if (!CollectionUtils.isEmpty(earlyEventsToProcess)) {
for (ApplicationEvent earlyEvent : earlyEventsToProcess) {
getApplicationEventMulticaster().multicastEvent(earlyEvent);
}
}
}
/**
* Finish the initialization of this context's bean factory,
* initializing all remaining singleton beans.
*/
@SuppressWarnings("unchecked")
protected void finishBeanFactoryInitialization(ConfigurableListableBeanFactory beanFactory) {
// Mark current thread for singleton instantiation with applied bootstrap locking.
beanFactory.prepareSingletonBootstrap();
// Initialize bootstrap executor for this context.
if (beanFactory.containsBean(BOOTSTRAP_EXECUTOR_BEAN_NAME) &&
beanFactory.isTypeMatch(BOOTSTRAP_EXECUTOR_BEAN_NAME, Executor.class)) {
beanFactory.setBootstrapExecutor(
beanFactory.getBean(BOOTSTRAP_EXECUTOR_BEAN_NAME, Executor.class));
}
// Initialize conversion service for this context.
if (beanFactory.containsBean(CONVERSION_SERVICE_BEAN_NAME) &&
beanFactory.isTypeMatch(CONVERSION_SERVICE_BEAN_NAME, ConversionService.class)) {
beanFactory.setConversionService(
beanFactory.getBean(CONVERSION_SERVICE_BEAN_NAME, ConversionService.class));
}
// Register a default embedded value resolver if no BeanFactoryPostProcessor
// (such as a PropertySourcesPlaceholderConfigurer bean) registered any before:
// at this point, primarily for resolution in annotation attribute values.
if (!beanFactory.hasEmbeddedValueResolver()) {
beanFactory.addEmbeddedValueResolver(strVal -> getEnvironment().resolvePlaceholders(strVal));
}
// Call BeanFactoryInitializer beans early to allow for initializing specific other beans early.
String[] initializerNames = beanFactory.getBeanNamesForType(BeanFactoryInitializer.class, false, false);
for (String initializerName : initializerNames) {
beanFactory.getBean(initializerName, BeanFactoryInitializer.class).initialize(beanFactory);
}
// Initialize LoadTimeWeaverAware beans early to allow for registering their transformers early.
String[] weaverAwareNames = beanFactory.getBeanNamesForType(LoadTimeWeaverAware.class, false, false);
for (String weaverAwareName : weaverAwareNames) {
try {
beanFactory.getBean(weaverAwareName, LoadTimeWeaverAware.class);
}
catch (BeanNotOfRequiredTypeException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Failed to initialize LoadTimeWeaverAware bean '" + weaverAwareName +
"' due to unexpected type mismatch: " + ex.getMessage());
}
}
}
// Stop using the temporary ClassLoader for type matching.
beanFactory.setTempClassLoader(null);
// Allow for caching all bean definition metadata, not expecting further changes.
beanFactory.freezeConfiguration();
// Instantiate all remaining (non-lazy-init) singletons.
beanFactory.preInstantiateSingletons();
}
/**
* Finish the refresh of this context, invoking the LifecycleProcessor's
* onRefresh() method and publishing the
* {@link org.springframework.context.event.ContextRefreshedEvent}.
*/
protected void finishRefresh() {
// Reset common introspection caches in Spring's core infrastructure.
resetCommonCaches();
// Clear context-level resource caches (such as ASM metadata from scanning).
clearResourceCaches();
// Initialize lifecycle processor for this context.
initLifecycleProcessor();
// Propagate refresh to lifecycle processor first.
getLifecycleProcessor().onRefresh();
// Publish the final event.
publishEvent(new ContextRefreshedEvent(this));
}
/**
* Cancel this context's refresh attempt, resetting the {@code active} flag
* after an exception got thrown.
* @param ex the exception that led to the cancellation
*/
protected void cancelRefresh(Throwable ex) {
this.active.set(false);
// Reset common introspection caches in Spring's core infrastructure.
resetCommonCaches();
}
/**
* Reset Spring's common reflection metadata caches, in particular the
* {@link ReflectionUtils}, {@link AnnotationUtils}, {@link ResolvableType}
* and {@link CachedIntrospectionResults} caches.
* @since 4.2
* @see ReflectionUtils#clearCache()
* @see AnnotationUtils#clearCache()
* @see ResolvableType#clearCache()
* @see CachedIntrospectionResults#clearClassLoader(ClassLoader)
*/
protected void resetCommonCaches() {
ReflectionUtils.clearCache();
AnnotationUtils.clearCache();
ResolvableType.clearCache();
CachedIntrospectionResults.clearClassLoader(getClassLoader());
}
@Override
public void clearResourceCaches() {
super.clearResourceCaches();
if (this.resourcePatternResolver instanceof PathMatchingResourcePatternResolver pmrpr) {
pmrpr.clearCache();
}
}
/**
* Register a shutdown hook {@linkplain Thread#getName() named}
* {@code SpringContextShutdownHook} with the JVM runtime, closing this
* context on JVM shutdown unless it has already been closed at that time.
* <p>Delegates to {@code doClose()} for the actual closing procedure.
* @see Runtime#addShutdownHook
* @see ConfigurableApplicationContext#SHUTDOWN_HOOK_THREAD_NAME
* @see #close()
* @see #doClose()
*/
@Override
public void registerShutdownHook() {
if (this.shutdownHook == null) {
// No shutdown hook registered yet.
this.shutdownHook = new Thread(SHUTDOWN_HOOK_THREAD_NAME) {
@Override
public void run() {
if (isStartupShutdownThreadStuck()) {
active.set(false);
return;
}
startupShutdownLock.lock();
try {
doClose();
}
finally {
startupShutdownLock.unlock();
}
}
};
Runtime.getRuntime().addShutdownHook(this.shutdownHook);
}
}
/**
* Determine whether an active startup/shutdown thread is currently stuck,
* for example, through a {@code System.exit} call in a user component.
*/
private boolean isStartupShutdownThreadStuck() {
Thread activeThread = this.startupShutdownThread;
if (activeThread != null && activeThread.getState() == Thread.State.WAITING) {
// Indefinitely waiting: might be Thread.join or the like, or System.exit
activeThread.interrupt();
try {
// Leave just a little bit of time for the interruption to show effect
Thread.sleep(1);
}
catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
if (activeThread.getState() == Thread.State.WAITING) {
// Interrupted but still waiting: very likely a System.exit call
return true;
}
}
return false;
}
/**
* Close this application context, destroying all beans in its bean factory.
* <p>Delegates to {@code doClose()} for the actual closing procedure.
* Also removes a JVM shutdown hook, if registered, as it's not needed anymore.
* @see #doClose()
* @see #registerShutdownHook()
*/
@Override
public void close() {
if (isStartupShutdownThreadStuck()) {
this.active.set(false);
return;
}
this.startupShutdownLock.lock();
try {
this.startupShutdownThread = Thread.currentThread();
doClose();
// If we registered a JVM shutdown hook, we don't need it anymore now:
// We've already explicitly closed the context.
if (this.shutdownHook != null) {
try {
Runtime.getRuntime().removeShutdownHook(this.shutdownHook);
}
catch (IllegalStateException ex) {
// ignore - VM is already shutting down
}
}
}
finally {
this.startupShutdownThread = null;
this.startupShutdownLock.unlock();
}
}
/**
* Actually performs context closing: publishes a ContextClosedEvent and
* destroys the singletons in the bean factory of this application context.
* <p>Called by both {@code close()} and a JVM shutdown hook, if any.
* @see org.springframework.context.event.ContextClosedEvent
* @see #destroyBeans()
* @see #close()
* @see #registerShutdownHook()
*/
protected void doClose() {
// Check whether an actual close attempt is necessary...
if (this.active.get() && this.closed.compareAndSet(false, true)) {
if (logger.isDebugEnabled()) {
logger.debug("Closing " + this);
}
try {
// Publish shutdown event.
publishEvent(new ContextClosedEvent(this));
}
catch (Throwable ex) {
logger.warn("Exception thrown from ApplicationListener handling ContextClosedEvent", ex);
}
// Stop all Lifecycle beans, to avoid delays during individual destruction.
if (this.lifecycleProcessor != null) {
try {
this.lifecycleProcessor.onClose();
}
catch (Throwable ex) {
logger.warn("Exception thrown from LifecycleProcessor on context close", ex);
}
}
// Destroy all cached singletons in the context's BeanFactory.
destroyBeans();
// Close the state of this context itself.
closeBeanFactory();
// Let subclasses do some final clean-up if they wish...
onClose();
// Reset common introspection caches to avoid | not |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/engine/DefaultSupervisingRouteControllerTest.java | {
"start": 2160,
"end": 8479
} | class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testSupervising() throws Exception {
// lets make a simple route
context.addRoutes(new MyRoute());
// configure supervising route controller
SupervisingRouteController src = context.getRouteController().supervising();
src.setBackOffDelay(25);
src.setBackOffMaxAttempts(3);
src.setInitialDelay(100);
src.setThreadPoolSize(2);
List<CamelEvent.RouteRestartingFailureEvent> failures = Collections.synchronizedList(new ArrayList<>());
List<CamelEvent.RouteRestartingEvent> events = Collections.synchronizedList(new ArrayList<>());
context.getManagementStrategy().addEventNotifier(new SimpleEventNotifierSupport() {
@Override
public void notify(CamelEvent event) throws Exception {
if (event instanceof CamelEvent.RouteRestartingFailureEvent rfe) {
failures.add(rfe);
} else if (event instanceof RouteRestartingEvent rre) {
events.add(rre);
}
}
});
context.start();
MockEndpoint mock = context.getEndpoint("mock:foo", MockEndpoint.class);
mock.expectedMinimumMessageCount(3);
MockEndpoint mock2 = context.getEndpoint("mock:cheese", MockEndpoint.class);
mock2.expectedMessageCount(0);
MockEndpoint mock3 = context.getEndpoint("mock:cake", MockEndpoint.class);
mock3.expectedMessageCount(0);
MockEndpoint mock4 = context.getEndpoint("mock:bar", MockEndpoint.class);
mock4.expectedMessageCount(0);
MockEndpoint.assertIsSatisfied(10, TimeUnit.SECONDS, mock, mock2, mock3, mock4);
assertEquals("Started", context.getRouteController().getRouteStatus("foo").toString());
// cheese was not able to start
assertEquals("Stopped", context.getRouteController().getRouteStatus("cheese").toString());
// cake was not able to start
assertEquals("Stopped", context.getRouteController().getRouteStatus("cake").toString());
await("Await all exceptions and retries finished")
.atMost(Duration.ofMillis(src.getInitialDelay() + src.getBackOffDelay() * (src.getBackOffMaxAttempts() + 1)))
.untilAsserted(() -> assertNotNull(src.getRestartException("cake")));
Throwable e = src.getRestartException("cake");
assertEquals("Cannot start", e.getMessage());
boolean b = e instanceof IllegalArgumentException;
assertTrue(b);
// bar is no auto startup
assertEquals("Stopped", context.getRouteController().getRouteStatus("bar").toString());
assertEquals(10, failures.size(),
"There should have 2 x 1 initial + 2 x 3 restart failure + 2 x 1 exhausted failures.");
assertEquals(6, events.size(), "There should have been 2 x 3 restart attempts.");
assertEquals(2, failures.stream().filter(failure -> failure.isExhausted()).count(),
"There should be 2 exhausted failure. Current state of failure list: " + getFailureStatus(failures));
}
private String getFailureStatus(List<RouteRestartingFailureEvent> failure) {
StringBuilder sb = new StringBuilder();
for (RouteRestartingFailureEvent routeRestartingFailureEvent : failure) {
sb.append("\nAttempt: " + routeRestartingFailureEvent.getAttempt());
sb.append(", Is exhausted: " + routeRestartingFailureEvent.isExhausted());
sb.append(", Cause: " + routeRestartingFailureEvent.getCause() != null
? routeRestartingFailureEvent.getCause().getMessage() : "No exception");
sb.append(", timestamp: " + routeRestartingFailureEvent.getTimestamp());
}
return sb.toString();
}
@Test
public void testSupervisingOk() throws Exception {
// lets make a simple route
context.addRoutes(new MyRoute());
// configure supervising
SupervisingRouteController src = context.getRouteController().supervising();
src.setBackOffDelay(25);
src.setBackOffMaxAttempts(10);
src.setInitialDelay(100);
src.setThreadPoolSize(2);
List<CamelEvent.RouteRestartingFailureEvent> failure = Collections.synchronizedList(new ArrayList<>());
List<CamelEvent.RouteRestartingEvent> events = Collections.synchronizedList(new ArrayList<>());
context.getManagementStrategy().addEventNotifier(new SimpleEventNotifierSupport() {
@Override
public void notify(CamelEvent event) throws Exception {
if (event instanceof CamelEvent.RouteRestartingFailureEvent rfe) {
failure.add(rfe);
} else if (event instanceof RouteRestartingEvent rre) {
events.add(rre);
}
}
});
context.start();
MockEndpoint mock = context.getEndpoint("mock:foo", MockEndpoint.class);
mock.expectedMinimumMessageCount(3);
MockEndpoint mock2 = context.getEndpoint("mock:cheese", MockEndpoint.class);
mock2.expectedMessageCount(0);
MockEndpoint mock3 = context.getEndpoint("mock:cake", MockEndpoint.class);
mock3.expectedMessageCount(0);
MockEndpoint mock4 = context.getEndpoint("mock:bar", MockEndpoint.class);
mock4.expectedMessageCount(0);
MockEndpoint.assertIsSatisfied(10, TimeUnit.SECONDS, mock, mock2, mock3, mock4);
// these should all start
assertEquals("Started", context.getRouteController().getRouteStatus("foo").toString());
assertEquals("Started", context.getRouteController().getRouteStatus("cheese").toString());
assertEquals("Started", context.getRouteController().getRouteStatus("cake").toString());
// bar is no auto startup
assertEquals("Stopped", context.getRouteController().getRouteStatus("bar").toString());
// 2 x 1 initial + 2 x 4 restart failure attempts
assertEquals(10, failure.size());
// 2 x 5 restart attempts
assertEquals(10, events.size());
}
private static | DefaultSupervisingRouteControllerTest |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/discovery/IncludeClassNameFilter.java | {
"start": 827,
"end": 872
} | class ____ be included.
*
* @since 1.0
*/
| will |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/ChronoZonedDateTimeByInstantComparatorTest.java | {
"start": 974,
"end": 2419
} | class ____ {
private ChronoZonedDateTimeByInstantComparator comparator;
@BeforeEach
public void setUp() {
comparator = ChronoZonedDateTimeByInstantComparator.getInstance();
}
@Test
void should_have_one_instance() {
assertThat(comparator).isSameAs(ChronoZonedDateTimeByInstantComparator.getInstance());
}
@Test
void should_have_description() {
assertThat(comparator.description()).isEqualTo("ChronoZonedDateTime.timeLineOrder()");
}
@Test
void should_disregard_time_zone_difference() {
ZonedDateTime now = ZonedDateTime.now();
ZonedDateTime inParis = now.withZoneSameInstant(ZoneId.of("Europe/Paris"));
ZonedDateTime inNewYork = now.withZoneSameInstant(ZoneId.of("America/New_York"));
assertThat(inParis.compareTo(inNewYork)).as("Built-in comparison should report that they differ").isNotZero();
assertThat(comparator.compare(inParis, inNewYork)).isZero();
}
@Test
void should_disregard_chronology_difference() {
ZonedDateTime now = ZonedDateTime.now();
ZonedDateTime inTokyo = now.withZoneSameInstant(ZoneId.of("Asia/Tokyo"));
ChronoZonedDateTime<JapaneseDate> inTokyoJapanese = JapaneseChronology.INSTANCE.zonedDateTime(now);
assertThat(inTokyoJapanese.compareTo(inTokyo)).as("Built-in comparison should report that they differ").isNotZero();
assertThat(comparator.compare(inTokyoJapanese, inTokyo)).isZero();
}
}
| ChronoZonedDateTimeByInstantComparatorTest |
java | apache__dubbo | dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/context/DubboConfigApplicationListener.java | {
"start": 1644,
"end": 3586
} | class ____
implements ApplicationListener<DubboConfigInitEvent>, ApplicationContextAware {
private static final ErrorTypeAwareLogger logger =
LoggerFactory.getErrorTypeAwareLogger(DubboConfigApplicationListener.class);
private ApplicationContext applicationContext;
private ModuleModel moduleModel;
private final AtomicBoolean initialized = new AtomicBoolean();
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
this.moduleModel = DubboBeanUtils.getModuleModel(applicationContext);
}
@Override
public void onApplicationEvent(DubboConfigInitEvent event) {
if (nullSafeEquals(applicationContext, event.getSource())) {
init();
}
}
public synchronized void init() {
// It's expected to be notified at
// org.springframework.context.support.AbstractApplicationContext.registerListeners(),
// before loading non-lazy singleton beans. At this moment, all BeanFactoryPostProcessor have been processed,
if (initialized.compareAndSet(false, true)) {
initDubboConfigBeans();
}
}
private void initDubboConfigBeans() {
// load DubboConfigBeanInitializer to init config beans
if (applicationContext.containsBean(DubboConfigBeanInitializer.BEAN_NAME)) {
applicationContext.getBean(DubboConfigBeanInitializer.BEAN_NAME, DubboConfigBeanInitializer.class);
} else {
logger.warn(
CONFIG_DUBBO_BEAN_NOT_FOUND,
"",
"",
"Bean '" + DubboConfigBeanInitializer.BEAN_NAME + "' was not found");
}
// All infrastructure config beans are loaded, initialize dubbo here
moduleModel.getDeployer().prepare();
}
}
| DubboConfigApplicationListener |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1400/Issue1494.java | {
"start": 315,
"end": 616
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
String json = "{\"id\":1001,\"name\":\"wenshao\"}";
B b = JSON.parseObject(json, B.class);
assertEquals("{\"id\":1001,\"name\":\"wenshao\"}", JSON.toJSONString(b));
}
public static | Issue1494 |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/caching/CachingAndVersionTest.java | {
"start": 1882,
"end": 2473
} | class ____ {
@Id
@GeneratedValue
private Long id;
@Version
private Integer rowVersion;
@OneToMany(mappedBy = "domain")
private Set<Server> servers = new HashSet<>();
public Long getId() {
return id;
}
public Integer getRowVersion() {
return rowVersion;
}
public Set<Server> getServers() {
return servers;
}
public void addServer(Server server) {
servers.add( server );
server.setDomain( this );
}
}
@Entity(name = "Server")
@Table(name = "SERVER_TABLE")
@Cacheable
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public static | Domain |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ClassNamedLikeTypeParameterTest.java | {
"start": 1886,
"end": 2043
} | class ____<X> {
public <T> void genericMethod(X foo, T bar) {}
}
}
""")
.doTest();
}
}
| HasGeneric |
java | google__error-prone | core/src/test/java/com/google/errorprone/ErrorProneCompilerIntegrationTest.java | {
"start": 11647,
"end": 12455
} | class ____ {
public Test() {}
}
""")));
assertWithMessage("[SuperCallMatcher]")
.that(diagnosticHelper.getDiagnostics())
.comparingElementsUsing(DIAGNOSTIC_CONTAINING)
.doesNotContain("Warning should be found. " + diagnosticHelper.describe());
assertWithMessage(outputStream.toString()).that(exitCode).isEqualTo(Result.OK);
}
@Test
public void invalidFlagCausesCmdErrResult() {
String[] args = {"-Xep:"};
assertThrows(
InvalidCommandLineOptionException.class,
() ->
compiler.compile(
args,
Arrays.asList(
forSourceLines(
"Test.java",
"""
public | Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/LazyLoadingByEnhancerSetterTest.java | {
"start": 2847,
"end": 3149
} | class ____ is being compared to the persistent map (by the generated code) -- it shouldn't
item = s.find( ItemProperty.class, "P" );
} );
scope.inTransaction( s -> {
mergedItem = (Item) s.merge( item );
} );
assertEquals( 2, mergedItem.getParameters().size() );
}
// --- //
private | and |
java | apache__camel | components/camel-vertx/camel-vertx/src/generated/java/org/apache/camel/component/vertx/VertxComponentConfigurer.java | {
"start": 732,
"end": 4249
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
VertxComponent target = (VertxComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "host": target.setHost(property(camelContext, java.lang.String.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "port": target.setPort(property(camelContext, int.class, value)); return true;
case "timeout": target.setTimeout(property(camelContext, int.class, value)); return true;
case "vertx": target.setVertx(property(camelContext, io.vertx.core.Vertx.class, value)); return true;
case "vertxfactory":
case "vertxFactory": target.setVertxFactory(property(camelContext, io.vertx.core.impl.VertxBuilder.class, value)); return true;
case "vertxoptions":
case "vertxOptions": target.setVertxOptions(property(camelContext, io.vertx.core.VertxOptions.class, value)); return true;
default: return false;
}
}
@Override
public String[] getAutowiredNames() {
return new String[]{"vertx"};
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "host": return java.lang.String.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "port": return int.class;
case "timeout": return int.class;
case "vertx": return io.vertx.core.Vertx.class;
case "vertxfactory":
case "vertxFactory": return io.vertx.core.impl.VertxBuilder.class;
case "vertxoptions":
case "vertxOptions": return io.vertx.core.VertxOptions.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
VertxComponent target = (VertxComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "host": return target.getHost();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "port": return target.getPort();
case "timeout": return target.getTimeout();
case "vertx": return target.getVertx();
case "vertxfactory":
case "vertxFactory": return target.getVertxFactory();
case "vertxoptions":
case "vertxOptions": return target.getVertxOptions();
default: return null;
}
}
}
| VertxComponentConfigurer |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/ttl/HeapAsyncSnapshotTtlStateTest.java | {
"start": 1048,
"end": 1543
} | class ____ extends TtlStateTestBase {
@Override
protected StateBackendTestContext createStateBackendTestContext(TtlTimeProvider timeProvider) {
return new StateBackendTestContext(timeProvider) {
@Override
protected StateBackend createStateBackend() {
return new HashMapStateBackend();
}
};
}
@Override
protected boolean incrementalCleanupSupported() {
return true;
}
}
| HeapAsyncSnapshotTtlStateTest |
java | mapstruct__mapstruct | integrationtest/src/test/resources/springTest/src/test/java/org/mapstruct/itest/spring/SpringBasedMapperTest.java | {
"start": 1033,
"end": 2800
} | class ____ {
}
@Autowired
private SourceTargetMapper mapper;
@Autowired
private DecoratedSourceTargetMapper decoratedMapper;
@Autowired
private SecondDecoratedSourceTargetMapper secondDecoratedMapper;
@Test
public void shouldInjectSpringBasedMapper() {
Source source = new Source();
Target target = mapper.sourceToTarget( source );
assertThat( target ).isNotNull();
assertThat( target.getFoo() ).isEqualTo( Long.valueOf( 42 ) );
assertThat( target.getDate() ).isEqualTo( "1980" );
}
@Test
public void shouldInjectDecorator() {
Source source = new Source();
Target target = decoratedMapper.sourceToTarget( source );
assertThat( target ).isNotNull();
assertThat( target.getFoo() ).isEqualTo( Long.valueOf( 43 ) );
assertThat( target.getDate() ).isEqualTo( "1980" );
target = decoratedMapper.undecoratedSourceToTarget( source );
assertThat( target ).isNotNull();
assertThat( target.getFoo() ).isEqualTo( Long.valueOf( 42 ) );
assertThat( target.getDate() ).isEqualTo( "1980" );
}
@Test
public void shouldInjectSecondDecorator() {
Source source = new Source();
Target target = secondDecoratedMapper.sourceToTarget( source );
assertThat( target ).isNotNull();
assertThat( target.getFoo() ).isEqualTo( Long.valueOf( 43 ) );
assertThat( target.getDate() ).isEqualTo( "1980" );
target = secondDecoratedMapper.undecoratedSourceToTarget( source );
assertThat( target ).isNotNull();
assertThat( target.getFoo() ).isEqualTo( Long.valueOf( 42 ) );
assertThat( target.getDate() ).isEqualTo( "1980" );
}
}
| SpringTestConfig |
java | apache__maven | compat/maven-compat/src/test/java/org/apache/maven/project/EmptyLifecycleBindingsInjector.java | {
"start": 5624,
"end": 5921
} | class ____ implements PackagingRegistry {
@Override
public Optional<Packaging> lookup(String id) {
return getDelegate().lookup(id);
}
private PackagingRegistry getDelegate() {
return packagingRegistry;
}
}
}
| WrapperPackagingRegistry |
java | hibernate__hibernate-orm | tooling/hibernate-gradle-plugin/src/test/java/org/hibernate/orm/tooling/gradle/TestsBase.java | {
"start": 528,
"end": 1688
} | class ____ {
protected abstract String getProjectName();
protected abstract String getSourceSetName();
protected abstract String getLanguageName();
protected abstract String getCompileTaskName();
public void testEnhancement(Path projectDir) throws Exception {
final String buildFilePath = getProjectName() + "/build.gradle";
final String sourceSetName = getSourceSetName();
final String compileTaskName = getCompileTaskName();
final File classesDir = new File( projectDir.toFile(), "build/classes/" + getLanguageName() + "/" + sourceSetName );
Copier.copyProject( buildFilePath, projectDir );
System.out.println( "Starting execution ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" );
final GradleRunner gradleRunner = TestHelper.usingGradleRunner()
.withProjectDir( projectDir.toFile() )
.withArguments( compileTaskName, "--stacktrace", "--no-build-cache", "--configuration-cache" );
final BuildResult result = gradleRunner.build();
final BuildTask task = result.task( ":" + compileTaskName );
assertThat( task ).isNotNull();
assertThat( task.getOutcome() ).isEqualTo( TaskOutcome.SUCCESS );
// make sure the | TestsBase |
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/TestingConventionsPrecommitPlugin.java | {
"start": 1299,
"end": 5820
} | class ____ extends PrecommitPlugin {
public static final String TESTING_CONVENTIONS_TASK_NAME = "testingConventions";
@Override
public TaskProvider<? extends Task> createTask(Project project) {
project.getPlugins().apply(JavaBasePlugin.class);
var javaPluginExtension = project.getExtensions().getByType(JavaPluginExtension.class);
var sourceSets = javaPluginExtension.getSourceSets();
var tasks = project.getTasks();
project.getPlugins().withType(JavaPlugin.class, javaPlugin -> {
NamedDomainObjectProvider<SourceSet> sourceSet = sourceSets.named(SourceSet.TEST_SOURCE_SET_NAME);
setupTaskForSourceSet(project, sourceSet, t -> {
t.getSuffixes().convention(List.of("Tests"));
t.getBaseClasses().convention(List.of("org.apache.lucene.tests.util.LuceneTestCase"));
});
});
project.getPlugins().withType(LegacyYamlRestTestPlugin.class, yamlRestTestPlugin -> {
NamedDomainObjectProvider<SourceSet> sourceSet = sourceSets.named(LegacyYamlRestTestPlugin.SOURCE_SET_NAME);
setupTaskForSourceSet(project, sourceSet, t -> {
t.getSuffixes().convention(List.of("IT"));
t.getBaseClasses().convention(List.of("org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase"));
});
});
project.getPlugins().withType(InternalClusterTestPlugin.class, internalClusterTestPlugin -> {
NamedDomainObjectProvider<SourceSet> sourceSet = sourceSets.named(InternalClusterTestPlugin.SOURCE_SET_NAME);
setupTaskForSourceSet(project, sourceSet, t -> {
// Unfortunately we see both in our build, so we by default support both for now.
t.getSuffixes().convention(List.of("IT", "Tests"));
t.getBaseClasses()
.convention(
List.of(
"org.elasticsearch.test.ESIntegTestCase",
"org.elasticsearch.test.ESSingleNodeTestCase",
"org.elasticsearch.test.rest.ESRestTestCase",
"org.elasticsearch.test.AbstractMultiClustersTestCase"
)
);
});
});
project.getPlugins().withType(LegacyJavaRestTestPlugin.class, javaRestTestPlugin -> {
NamedDomainObjectProvider<SourceSet> sourceSet = sourceSets.named(LegacyJavaRestTestPlugin.SOURCE_SET_NAME);
setupTaskForSourceSet(project, sourceSet, t -> {
t.getSuffixes().convention(List.of("IT"));
t.getBaseClasses()
.convention(List.of("org.elasticsearch.test.ESIntegTestCase", "org.elasticsearch.test.rest.ESRestTestCase"));
});
});
project.getPlugins().withType(InternalJavaRestTestPlugin.class, javaRestTestPlugin -> {
NamedDomainObjectProvider<SourceSet> sourceSet = sourceSets.named(LegacyJavaRestTestPlugin.SOURCE_SET_NAME);
setupTaskForSourceSet(project, sourceSet, t -> {
t.getSuffixes().convention(List.of("IT"));
t.getBaseClasses()
.convention(List.of("org.elasticsearch.test.ESIntegTestCase", "org.elasticsearch.test.rest.ESRestTestCase"));
});
});
// Create a convenience task for all checks (this does not conflict with extension, as it has higher priority in DSL):
return tasks.register(TESTING_CONVENTIONS_TASK_NAME, task -> {
task.setDescription("Runs all testing conventions checks.");
task.dependsOn(tasks.withType(TestingConventionsCheckTask.class));
});
}
private void setupTaskForSourceSet(
Project project,
NamedDomainObjectProvider<SourceSet> sourceSetProvider,
Action<TestingConventionsCheckTask> config
) {
sourceSetProvider.configure(sourceSet -> {
String taskName = sourceSet.getTaskName(null, TESTING_CONVENTIONS_TASK_NAME);
TaskProvider<TestingConventionsCheckTask> register = project.getTasks()
.register(taskName, TestingConventionsCheckTask.class, task -> {
task.getTestClassesDirs().from(sourceSet.getOutput().getClassesDirs());
task.getClasspath().from(sourceSet.getRuntimeClasspath());
});
register.configure(config);
});
}
}
| TestingConventionsPrecommitPlugin |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/ParameterizedClassIntegrationTests.java | {
"start": 55000,
"end": 55316
} | class ____ extends BaseTestCase {
@Parameter(1)
String value;
@Test
void test(TestReporter reporter) {
reporter.publishEntry(Map.of( //
"super.value", super.value, //
"this.value", this.value //
));
}
}
@ParameterizedClass
@ValueSource(ints = 1)
static | InheritedHiddenParameterFieldTestCase |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/context/request/async/CallableProcessingInterceptor.java | {
"start": 1883,
"end": 6660
} | interface ____ {
/**
* Constant indicating that no result has been determined by this
* interceptor, giving subsequent interceptors a chance.
* @see #handleTimeout
* @see #handleError
*/
Object RESULT_NONE = new Object();
/**
* Constant indicating that the response has been handled by this interceptor
* without a result and that no further interceptors are to be invoked.
* @see #handleTimeout
* @see #handleError
*/
Object RESPONSE_HANDLED = new Object();
/**
* Invoked <em>before</em> the start of concurrent handling in the original
* thread in which the {@code Callable} is submitted for concurrent handling.
* <p>This is useful for capturing the state of the current thread just prior to
* invoking the {@link Callable}. Once the state is captured, it can then be
* transferred to the new {@link Thread} in
* {@link #preProcess(NativeWebRequest, Callable)}. Capturing the state of
* Spring Security's SecurityContextHolder and migrating it to the new Thread
* is a concrete example of where this is useful.
* <p>The default implementation is empty.
* @param request the current request
* @param task the task for the current async request
* @throws Exception in case of errors
*/
default <T> void beforeConcurrentHandling(NativeWebRequest request, Callable<T> task) throws Exception {
}
/**
* Invoked <em>after</em> the start of concurrent handling in the async
* thread in which the {@code Callable} is executed and <em>before</em> the
* actual invocation of the {@code Callable}.
* <p>The default implementation is empty.
* @param request the current request
* @param task the task for the current async request
* @throws Exception in case of errors
*/
default <T> void preProcess(NativeWebRequest request, Callable<T> task) throws Exception {
}
/**
* Invoked <em>after</em> the {@code Callable} has produced a result in the
* async thread in which the {@code Callable} is executed. This method may
* be invoked later than {@code afterTimeout} or {@code afterCompletion}
* depending on when the {@code Callable} finishes processing.
* <p>The default implementation is empty.
* @param request the current request
* @param task the task for the current async request
* @param concurrentResult the result of concurrent processing, which could
* be a {@link Throwable} if the {@code Callable} raised an exception
* @throws Exception in case of errors
*/
default <T> void postProcess(NativeWebRequest request, Callable<T> task,
@Nullable Object concurrentResult) throws Exception {
}
/**
* Invoked from a container thread when the async request times out before
* the {@code Callable} task completes. Implementations may return a value,
* including an {@link Exception}, to use instead of the value the
* {@link Callable} did not return in time.
* <p>The default implementation always returns {@link #RESULT_NONE}.
* @param request the current request
* @param task the task for the current async request
* @return a concurrent result value; if the value is anything other than
* {@link #RESULT_NONE} or {@link #RESPONSE_HANDLED}, concurrent processing
* is resumed and subsequent interceptors are not invoked
* @throws Exception in case of errors
*/
default <T> Object handleTimeout(NativeWebRequest request, Callable<T> task) throws Exception {
return RESULT_NONE;
}
/**
* Invoked from a container thread when an error occurred while processing
* the async request before the {@code Callable} task completes.
* Implementations may return a value, including an {@link Exception}, to
* use instead of the value the {@link Callable} did not return in time.
* <p>The default implementation always returns {@link #RESULT_NONE}.
* @param request the current request
* @param task the task for the current async request
* @param t the error that occurred while request processing
* @return a concurrent result value; if the value is anything other than
* {@link #RESULT_NONE} or {@link #RESPONSE_HANDLED}, concurrent processing
* is resumed and subsequent interceptors are not invoked
* @throws Exception in case of errors
* @since 5.0
*/
default <T> Object handleError(NativeWebRequest request, Callable<T> task, Throwable t) throws Exception {
return RESULT_NONE;
}
/**
* Invoked from a container thread when async processing completes for any
* reason including timeout or network error.
* <p>The default implementation is empty.
* @param request the current request
* @param task the task for the current async request
* @throws Exception in case of errors
*/
default <T> void afterCompletion(NativeWebRequest request, Callable<T> task) throws Exception {
}
}
| CallableProcessingInterceptor |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/task/TaskDecorator.java | {
"start": 1688,
"end": 2057
} | interface ____ {
/**
* Decorate the given {@code Runnable}, returning a potentially wrapped
* {@code Runnable} for actual execution, internally delegating to the
* original {@link Runnable#run()} implementation.
* @param runnable the original {@code Runnable}
* @return the decorated {@code Runnable}
*/
Runnable decorate(Runnable runnable);
}
| TaskDecorator |
java | elastic__elasticsearch | plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java | {
"start": 11323,
"end": 11492
} | class ____ use with highlighters where the content being highlighted
* needs plain text format for highlighting but marked-up format for token discovery.
* The | for |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/typesafe/ParamDeclarationDefaultValueValidationFailureTest.java | {
"start": 456,
"end": 1562
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot(root -> root
.addAsResource(new StringAsset(
"{@java.lang.String myName=1}\n{myName}"),
"templates/myName.html"))
.assertException(t -> {
Throwable e = t;
TemplateException te = null;
while (e != null) {
if (e instanceof TemplateException) {
te = (TemplateException) e;
break;
}
e = e.getCause();
}
assertNotNull(te);
assertTrue(te.getMessage().contains(
" The type of the default value [java.lang.Integer] does not match the type of the parameter declaration [java.lang.String]"),
te.getMessage());
});
@Test
public void testValidation() {
fail();
}
}
| ParamDeclarationDefaultValueValidationFailureTest |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/language/SingleInputTypedExpressionDefinition.java | {
"start": 2431,
"end": 3792
} | class ____<
T extends AbstractBuilder<T, E>, E extends SingleInputTypedExpressionDefinition>
extends TypedExpressionDefinition.AbstractBuilder<T, E> {
private String source;
/**
* Source to use, instead of message body. You can prefix with variable:, header:, or property: to specify kind
* of source. Otherwise, the source is assumed to be a variable. Use empty or null to use default source, which
* is the message body.
*/
public T source(String source) {
this.source = source;
return (T) this;
}
/**
* Name of variable to use as source, instead of the message body
*/
public T variableName(String variableName) {
this.source = "variable:" + variableName;
return (T) this;
}
/**
* Name of header to use as input, instead of the message body
*/
public T headerName(String headerName) {
this.source = "header:" + headerName;
return (T) this;
}
/**
* Name of property to use as input, instead of the message body.
*/
public T propertyName(String propertyName) {
this.source = "property:" + propertyName;
return (T) this;
}
}
}
| AbstractBuilder |
java | spring-projects__spring-framework | spring-messaging/src/test/java/org/springframework/messaging/handler/annotation/reactive/MessageMappingMessageHandlerTests.java | {
"start": 2612,
"end": 7314
} | class ____ {
private TestEncoderMethodReturnValueHandler returnValueHandler;
@Test
void handleString() {
MessageMappingMessageHandler messsageHandler = initMesssageHandler();
messsageHandler.handleMessage(message("string", "abcdef")).block(Duration.ofSeconds(5));
verifyOutputContent(Collections.singletonList("abcdef::response"));
}
@Test
void handleMonoString() {
MessageMappingMessageHandler messsageHandler = initMesssageHandler();
messsageHandler.handleMessage(message("monoString", "abcdef")).block(Duration.ofSeconds(5));
verifyOutputContent(Collections.singletonList("abcdef::response"));
}
@Test
void handleFluxString() {
MessageMappingMessageHandler messsageHandler = initMesssageHandler();
messsageHandler.handleMessage(message("fluxString", "abc", "def", "ghi")).block(Duration.ofSeconds(5));
verifyOutputContent(Arrays.asList("abc::response", "def::response", "ghi::response"));
}
@Test
void handleWithPlaceholderInMapping() {
MessageMappingMessageHandler messsageHandler = initMesssageHandler();
messsageHandler.handleMessage(message("path123", "abcdef")).block(Duration.ofSeconds(5));
verifyOutputContent(Collections.singletonList("abcdef::response"));
}
@Test
void handleWithDestinationVariable() {
MessageMappingMessageHandler messsageHandler = initMesssageHandler();
messsageHandler.handleMessage(message("destination.test", "abcdef")).block(Duration.ofSeconds(5));
verifyOutputContent(Collections.singletonList("test::abcdef::response"));
}
@Test
void handleException() {
MessageMappingMessageHandler messsageHandler = initMesssageHandler();
messsageHandler.handleMessage(message("exception", "abc")).block(Duration.ofSeconds(5));
verifyOutputContent(Collections.singletonList("rejected::handled"));
}
@Test
void handleErrorSignal() {
MessageMappingMessageHandler messsageHandler = initMesssageHandler();
messsageHandler.handleMessage(message("errorSignal", "abc")).block(Duration.ofSeconds(5));
verifyOutputContent(Collections.singletonList("rejected::handled"));
}
@Test
void unhandledExceptionShouldFlowThrough() {
GenericMessage<?> message = new GenericMessage<>(new Object(),
Collections.singletonMap(DestinationPatternsMessageCondition.LOOKUP_DESTINATION_HEADER,
new SimpleRouteMatcher(new AntPathMatcher()).parseRoute("string")));
StepVerifier.create(initMesssageHandler().handleMessage(message))
.expectErrorSatisfies(ex -> assertThat(ex.getMessage().startsWith("Could not resolve method parameter at index 0")).as("Actual: " + ex.getMessage()).isTrue())
.verify(Duration.ofSeconds(5));
}
private MessageMappingMessageHandler initMesssageHandler() {
List<Decoder<?>> decoders = Collections.singletonList(StringDecoder.allMimeTypes());
List<Encoder<?>> encoders = Collections.singletonList(CharSequenceEncoder.allMimeTypes());
ReactiveAdapterRegistry registry = ReactiveAdapterRegistry.getSharedInstance();
this.returnValueHandler = new TestEncoderMethodReturnValueHandler(encoders, registry);
PropertySource<?> source = new MapPropertySource("test", Collections.singletonMap("path", "path123"));
StaticApplicationContext context = new StaticApplicationContext();
context.getEnvironment().getPropertySources().addFirst(source);
context.registerSingleton("testController", TestController.class);
context.refresh();
MessageMappingMessageHandler messageHandler = new MessageMappingMessageHandler();
messageHandler.getReturnValueHandlerConfigurer().addCustomHandler(this.returnValueHandler);
messageHandler.setApplicationContext(context);
messageHandler.setEmbeddedValueResolver(new EmbeddedValueResolver(context.getBeanFactory()));
messageHandler.setDecoders(decoders);
messageHandler.afterPropertiesSet();
return messageHandler;
}
private Message<?> message(String destination, String... content) {
Flux<DataBuffer> payload = Flux.fromIterable(Arrays.asList(content)).map(this::toDataBuffer);
MessageHeaderAccessor headers = new MessageHeaderAccessor();
headers.setLeaveMutable(true);
headers.setHeader(DestinationPatternsMessageCondition.LOOKUP_DESTINATION_HEADER,
new SimpleRouteMatcher(new AntPathMatcher()).parseRoute(destination));
return MessageBuilder.createMessage(payload, headers.getMessageHeaders());
}
private DataBuffer toDataBuffer(String payload) {
return DefaultDataBufferFactory.sharedInstance.wrap(payload.getBytes(UTF_8));
}
private void verifyOutputContent(List<String> expected) {
Flux<String> result = this.returnValueHandler.getContentAsStrings();
StepVerifier.create(result.collectList()).expectNext(expected).verifyComplete();
}
@Controller
static | MessageMappingMessageHandlerTests |
java | apache__spark | mllib/src/test/java/org/apache/spark/mllib/regression/JavaRidgeRegressionSuite.java | {
"start": 1150,
"end": 3709
} | class ____ extends SharedSparkSession {
private static double predictionError(List<LabeledPoint> validationData,
RidgeRegressionModel model) {
double errorSum = 0;
for (LabeledPoint point : validationData) {
double prediction = model.predict(point.features());
errorSum += (prediction - point.label()) * (prediction - point.label());
}
return errorSum / validationData.size();
}
private static List<LabeledPoint> generateRidgeData(int numPoints, int numFeatures, double std) {
// Pick weights as random values distributed uniformly in [-0.5, 0.5]
Random random = new Random(42);
double[] w = new double[numFeatures];
for (int i = 0; i < w.length; i++) {
w[i] = random.nextDouble() - 0.5;
}
return LinearDataGenerator.generateLinearInputAsList(0.0, w, numPoints, 42, std);
}
@Test
public void runRidgeRegressionUsingConstructor() {
int numExamples = 50;
int numFeatures = 20;
List<LabeledPoint> data = generateRidgeData(2 * numExamples, numFeatures, 10.0);
JavaRDD<LabeledPoint> testRDD = jsc.parallelize(
new ArrayList<>(data.subList(0, numExamples)));
List<LabeledPoint> validationData = data.subList(numExamples, 2 * numExamples);
RidgeRegressionWithSGD ridgeSGDImpl = new RidgeRegressionWithSGD(1.0, 200, 0.0, 1.0);
RidgeRegressionModel model = ridgeSGDImpl.run(testRDD.rdd());
double unRegularizedErr = predictionError(validationData, model);
ridgeSGDImpl.optimizer().setRegParam(0.1);
model = ridgeSGDImpl.run(testRDD.rdd());
double regularizedErr = predictionError(validationData, model);
Assertions.assertTrue(regularizedErr < unRegularizedErr);
}
@Test
public void runRidgeRegressionUsingStaticMethods() {
int numExamples = 50;
int numFeatures = 20;
List<LabeledPoint> data = generateRidgeData(2 * numExamples, numFeatures, 10.0);
JavaRDD<LabeledPoint> testRDD = jsc.parallelize(
new ArrayList<>(data.subList(0, numExamples)));
List<LabeledPoint> validationData = data.subList(numExamples, 2 * numExamples);
RidgeRegressionModel model = new RidgeRegressionWithSGD(1.0, 200, 0.0, 1.0)
.run(testRDD.rdd());
double unRegularizedErr = predictionError(validationData, model);
model = new RidgeRegressionWithSGD(1.0, 200, 0.1, 1.0)
.run(testRDD.rdd());
double regularizedErr = predictionError(validationData, model);
Assertions.assertTrue(regularizedErr < unRegularizedErr);
}
}
| JavaRidgeRegressionSuite |
java | quarkusio__quarkus | extensions/reactive-routes/deployment/src/main/java/io/quarkus/vertx/web/deployment/ReactiveRoutesProcessor.java | {
"start": 78208,
"end": 79604
} | class ____ implements ValueProvider {
private final DotName annotationName;
private final MethodDesc multiMapAccessor;
private final MethodDesc valueAccessor;
public ParamAndHeaderProvider(DotName annotationName, MethodDesc multiMapAccessor,
MethodDesc valueAccessor) {
this.annotationName = annotationName;
this.multiMapAccessor = multiMapAccessor;
this.valueAccessor = valueAccessor;
}
@Override
public Expr get(MethodParameterInfo methodParam, Set<AnnotationInstance> annotations, Var routingContext,
BlockCreator b0, BuildProducer<ReflectiveHierarchyBuildItem> reflectiveHierarchy) {
AnnotationValue paramAnnotationValue = Annotations.find(annotations, annotationName).value();
String paramName = paramAnnotationValue != null ? paramAnnotationValue.asString() : null;
if (paramName == null || paramName.equals(Param.ELEMENT_NAME)) {
paramName = methodParam.name();
}
if (paramName == null) {
throw new IllegalStateException("Unable to determine the name of parameter #" + methodParam.position()
+ " of " + methodParam.method().declaringClass().name() + "." + methodParam.method().name()
+ "() - compile the | ParamAndHeaderProvider |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/callbacks/returning/NodeDto.java | {
"start": 262,
"end": 1143
} | class ____ {
private NodeDto parent;
private String name;
private List<NodeDto> children;
private List<AttributeDto> attributes;
public NodeDto getParent() {
return parent;
}
public void setParent(NodeDto parent) {
this.parent = parent;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<NodeDto> getChildren() {
return children;
}
public void setChildren(List<NodeDto> children) {
this.children = children;
}
public List<AttributeDto> getAttributes() {
return attributes;
}
public void setAttributes(List<AttributeDto> attributes) {
this.attributes = attributes;
}
@Override
public String toString() {
return "NodeDto [name=" + name + "]";
}
}
| NodeDto |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaCreatorImpl.java | {
"start": 20898,
"end": 21201
} | class ____ implements GenerationTarget {
private final ArrayList<String> commands = new ArrayList<>();
@Override
public void prepare() {
}
@Override
public void accept(String command) {
commands.add( command );
}
@Override
public void release() {
}
}
}
| JournalingGenerationTarget |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/ConsumerProtocolUtils.java | {
"start": 1250,
"end": 1351
} | class ____ common assignment or consumer protocol utility methods such as de/serialization
*/
public | for |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/TypeExtractorTest.java | {
"start": 22059,
"end": 28493
} | class ____ {
private String myField1;
private int myField2;
public CustomChainingPojoType() {}
public CustomChainingPojoType setMyField1(String myField1) {
this.myField1 = myField1;
return this;
}
public CustomChainingPojoType setMyField2(int myField2) {
this.myField2 = myField2;
return this;
}
public String getMyField1() {
return myField1;
}
public int getMyField2() {
return myField2;
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void testTupleWithPojo() {
// use getMapReturnTypes()
RichMapFunction<?, ?> function =
new RichMapFunction<Tuple2<Long, CustomType>, Tuple2<Long, CustomType>>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<Long, CustomType> map(Tuple2<Long, CustomType> value)
throws Exception {
return null;
}
};
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(
function,
(TypeInformation)
TypeInformation.of(new TypeHint<Tuple2<Long, CustomType>>() {}));
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(2);
TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti;
assertThat(tti.getTypeClass()).isEqualTo(Tuple2.class);
List<FlatFieldDescriptor> ffd = new ArrayList<FlatFieldDescriptor>();
tti.getFlatFields("f0", 0, ffd);
assertThat(ffd).hasSize(1);
assertThat(ffd.get(0).getPosition()).isZero(); // Long
assertThat(ffd.get(0).getType().getTypeClass()).isEqualTo(Long.class);
ffd.clear();
tti.getFlatFields("f1.myField1", 0, ffd);
assertThat(ffd.get(0).getPosition()).isOne();
assertThat(ffd.get(0).getType().getTypeClass()).isEqualTo(String.class);
ffd.clear();
tti.getFlatFields("f1.myField2", 0, ffd);
assertThat(ffd.get(0).getPosition()).isEqualTo(2);
assertThat(ffd.get(0).getType().getTypeClass()).isEqualTo(Integer.class);
assertThat(tti.getTypeAt(0).getTypeClass()).isEqualTo(Long.class);
assertThat(tti.getTypeAt(1) instanceof PojoTypeInfo).isTrue();
assertThat(tti.getTypeAt(1).getTypeClass()).isEqualTo(CustomType.class);
// use getForObject()
Tuple2<?, ?> t = new Tuple2<Long, CustomType>(1L, new CustomType("Hello", 1));
TypeInformation<?> ti2 = TypeExtractor.getForObject(t);
assertThat(ti2.isTupleType()).isTrue();
assertThat(ti2.getArity()).isEqualTo(2);
TupleTypeInfo<?> tti2 = (TupleTypeInfo<?>) ti2;
assertThat(tti2.getTypeClass()).isEqualTo(Tuple2.class);
assertThat(tti2.getTypeAt(0).getTypeClass()).isEqualTo(Long.class);
assertThat(tti2.getTypeAt(1) instanceof PojoTypeInfo).isTrue();
assertThat(tti2.getTypeAt(1).getTypeClass()).isEqualTo(CustomType.class);
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void testValue() {
// use getKeyExtractorType()
KeySelector<?, ?> function =
new KeySelector<StringValue, StringValue>() {
private static final long serialVersionUID = 1L;
@Override
public StringValue getKey(StringValue value) {
return null;
}
};
TypeInformation<?> ti =
TypeExtractor.getKeySelectorTypes(
function,
(TypeInformation) TypeInformation.of(new TypeHint<StringValue>() {}));
assertThat(ti.isBasicType()).isFalse();
assertThat(ti.isTupleType()).isFalse();
assertThat(ti).isInstanceOf(ValueTypeInfo.class);
assertThat(ti.getTypeClass()).isEqualTo(StringValue.class);
// use getForClass()
assertThat(TypeExtractor.getForClass(StringValue.class) instanceof ValueTypeInfo).isTrue();
assertThat(ti.getTypeClass())
.isEqualTo(TypeExtractor.getForClass(StringValue.class).getTypeClass());
// use getForObject()
StringValue v = new StringValue("Hello");
assertThat(TypeExtractor.getForObject(v) instanceof ValueTypeInfo).isTrue();
assertThat(ti.getTypeClass()).isEqualTo(TypeExtractor.getForObject(v).getTypeClass());
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void testTupleOfValues() {
// use getMapReturnTypes()
RichMapFunction<?, ?> function =
new RichMapFunction<
Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<StringValue, IntValue> map(Tuple2<StringValue, IntValue> value)
throws Exception {
return null;
}
};
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(
function,
(TypeInformation)
TypeInformation.of(
new TypeHint<Tuple2<StringValue, IntValue>>() {}));
assertThat(ti.isBasicType()).isFalse();
assertThat(ti.isTupleType()).isTrue();
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(0).getTypeClass())
.isEqualTo(StringValue.class);
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(1).getTypeClass()).isEqualTo(IntValue.class);
// use getForObject()
Tuple2<StringValue, IntValue> t =
new Tuple2<StringValue, IntValue>(new StringValue("x"), new IntValue(1));
TypeInformation<?> ti2 = TypeExtractor.getForObject(t);
assertThat(ti2.isBasicType()).isFalse();
assertThat(ti2.isTupleType()).isTrue();
assertThat(((TupleTypeInfo<?>) ti2).getTypeAt(0).getTypeClass())
.isEqualTo(StringValue.class);
assertThat(((TupleTypeInfo<?>) ti2).getTypeAt(1).getTypeClass()).isEqualTo(IntValue.class);
}
public static | CustomChainingPojoType |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/DefaultErrorHandlerFailureRouteIdTest.java | {
"start": 1186,
"end": 2415
} | class ____ extends ContextTestSupport {
@Test
public void testFailureRouteId() throws Exception {
getMockEndpoint("mock:foo").expectedMessageCount(1);
getMockEndpoint("mock:bar").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(0);
Exchange out = template.send("direct:foo", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setBody("Hello World");
}
});
assertNotNull(out);
assertMockEndpointsSatisfied();
assertTrue(out.isFailed());
assertNotNull(out.getException(IllegalArgumentException.class));
assertEquals("bar", out.getProperty(Exchange.FAILURE_ROUTE_ID));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:foo").routeId("foo").to("mock:foo").to("direct:bar").to("mock:result");
from("direct:bar").routeId("bar").to("mock:bar").throwException(new IllegalArgumentException("Forced"));
}
};
}
}
| DefaultErrorHandlerFailureRouteIdTest |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/protocol/types/Type.java | {
"start": 2938,
"end": 40743
} | class ____ extends Type {
/**
* Short name of the type to identify it in documentation;
* @return the name of the type
*/
public abstract String typeName();
/**
* Documentation of the Type.
*
* @return details about valid values, representation
*/
public abstract String documentation();
@Override
public String toString() {
return typeName();
}
}
/**
* The Boolean type represents a boolean value in a byte by using
* the value of 0 to represent false, and 1 to represent true.
*
* If for some reason a value that is not 0 or 1 is read,
* then any non-zero value will return true.
*/
public static final DocumentedType BOOLEAN = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
if ((Boolean) o)
buffer.put((byte) 1);
else
buffer.put((byte) 0);
}
@Override
public Object read(ByteBuffer buffer) {
byte value = buffer.get();
return value != 0;
}
@Override
public int sizeOf(Object o) {
return 1;
}
@Override
public String typeName() {
return "BOOLEAN";
}
@Override
public Boolean validate(Object item) {
if (item instanceof Boolean)
return (Boolean) item;
else
throw new SchemaException(item + " is not a Boolean.");
}
@Override
public String documentation() {
return "Represents a boolean value in a byte. " +
"Values 0 and 1 are used to represent false and true respectively. " +
"When reading a boolean value, any non-zero value is considered true.";
}
};
public static final DocumentedType INT8 = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
buffer.put((Byte) o);
}
@Override
public Object read(ByteBuffer buffer) {
return buffer.get();
}
@Override
public int sizeOf(Object o) {
return 1;
}
@Override
public String typeName() {
return "INT8";
}
@Override
public Byte validate(Object item) {
if (item instanceof Byte)
return (Byte) item;
else
throw new SchemaException(item + " is not a Byte.");
}
@Override
public String documentation() {
return "Represents an integer between -2<sup>7</sup> and 2<sup>7</sup>-1 inclusive.";
}
};
public static final DocumentedType INT16 = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
buffer.putShort((Short) o);
}
@Override
public Object read(ByteBuffer buffer) {
return buffer.getShort();
}
@Override
public int sizeOf(Object o) {
return 2;
}
@Override
public String typeName() {
return "INT16";
}
@Override
public Short validate(Object item) {
if (item instanceof Short)
return (Short) item;
else
throw new SchemaException(item + " is not a Short.");
}
@Override
public String documentation() {
return "Represents an integer between -2<sup>15</sup> and 2<sup>15</sup>-1 inclusive. " +
"The values are encoded using two bytes in network byte order (big-endian).";
}
};
public static final DocumentedType UINT16 = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
Integer value = (Integer) o;
buffer.putShort((short) value.intValue());
}
@Override
public Object read(ByteBuffer buffer) {
short value = buffer.getShort();
return Short.toUnsignedInt(value);
}
@Override
public int sizeOf(Object o) {
return 2;
}
@Override
public String typeName() {
return "UINT16";
}
@Override
public Integer validate(Object item) {
if (item instanceof Integer)
return (Integer) item;
else
throw new SchemaException(item + " is not an a Integer (encoding an unsigned short)");
}
@Override
public String documentation() {
return "Represents an integer between 0 and 65535 inclusive. " +
"The values are encoded using two bytes in network byte order (big-endian).";
}
};
public static final DocumentedType INT32 = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
buffer.putInt((Integer) o);
}
@Override
public Object read(ByteBuffer buffer) {
return buffer.getInt();
}
@Override
public int sizeOf(Object o) {
return 4;
}
@Override
public String typeName() {
return "INT32";
}
@Override
public Integer validate(Object item) {
if (item instanceof Integer)
return (Integer) item;
else
throw new SchemaException(item + " is not an Integer.");
}
@Override
public String documentation() {
return "Represents an integer between -2<sup>31</sup> and 2<sup>31</sup>-1 inclusive. " +
"The values are encoded using four bytes in network byte order (big-endian).";
}
};
public static final DocumentedType UNSIGNED_INT32 = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
ByteUtils.writeUnsignedInt(buffer, (long) o);
}
@Override
public Object read(ByteBuffer buffer) {
return ByteUtils.readUnsignedInt(buffer);
}
@Override
public int sizeOf(Object o) {
return 4;
}
@Override
public String typeName() {
return "UINT32";
}
@Override
public Long validate(Object item) {
if (item instanceof Long)
return (Long) item;
else
throw new SchemaException(item + " is not an a Long (encoding an unsigned integer).");
}
@Override
public String documentation() {
return "Represents an integer between 0 and 2<sup>32</sup>-1 inclusive. " +
"The values are encoded using four bytes in network byte order (big-endian).";
}
};
public static final DocumentedType INT64 = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
buffer.putLong((Long) o);
}
@Override
public Object read(ByteBuffer buffer) {
return buffer.getLong();
}
@Override
public int sizeOf(Object o) {
return 8;
}
@Override
public String typeName() {
return "INT64";
}
@Override
public Long validate(Object item) {
if (item instanceof Long)
return (Long) item;
else
throw new SchemaException(item + " is not a Long.");
}
@Override
public String documentation() {
return "Represents an integer between -2<sup>63</sup> and 2<sup>63</sup>-1 inclusive. " +
"The values are encoded using eight bytes in network byte order (big-endian).";
}
};
public static final DocumentedType UUID = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
final Uuid uuid = (Uuid) o;
buffer.putLong(uuid.getMostSignificantBits());
buffer.putLong(uuid.getLeastSignificantBits());
}
@Override
public Object read(ByteBuffer buffer) {
return new Uuid(buffer.getLong(), buffer.getLong());
}
@Override
public int sizeOf(Object o) {
return 16;
}
@Override
public String typeName() {
return "UUID";
}
@Override
public Uuid validate(Object item) {
if (item instanceof Uuid)
return (Uuid) item;
else
throw new SchemaException(item + " is not a Uuid.");
}
@Override
public String documentation() {
return "Represents a type 4 immutable universally unique identifier (Uuid). " +
"The values are encoded using sixteen bytes in network byte order (big-endian).";
}
};
public static final DocumentedType FLOAT64 = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
ByteUtils.writeDouble((Double) o, buffer);
}
@Override
public Object read(ByteBuffer buffer) {
return ByteUtils.readDouble(buffer);
}
@Override
public int sizeOf(Object o) {
return 8;
}
@Override
public String typeName() {
return "FLOAT64";
}
@Override
public Double validate(Object item) {
if (item instanceof Double)
return (Double) item;
else
throw new SchemaException(item + " is not a Double.");
}
@Override
public String documentation() {
return "Represents a double-precision 64-bit format IEEE 754 value. " +
"The values are encoded using eight bytes in network byte order (big-endian).";
}
};
public static final DocumentedType STRING = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
byte[] bytes = Utils.utf8((String) o);
if (bytes.length > Short.MAX_VALUE)
throw new SchemaException("String length " + bytes.length + " is larger than the maximum string length.");
buffer.putShort((short) bytes.length);
buffer.put(bytes);
}
@Override
public String read(ByteBuffer buffer) {
short length = buffer.getShort();
if (length < 0)
throw new SchemaException("String length " + length + " cannot be negative");
if (length > buffer.remaining())
throw new SchemaException("Error reading string of length " + length + ", only " + buffer.remaining() + " bytes available");
String result = Utils.utf8(buffer, length);
buffer.position(buffer.position() + length);
return result;
}
@Override
public int sizeOf(Object o) {
return 2 + Utils.utf8Length((String) o);
}
@Override
public String typeName() {
return "STRING";
}
@Override
public String validate(Object item) {
if (item instanceof String)
return (String) item;
else
throw new SchemaException(item + " is not a String.");
}
@Override
public String documentation() {
return "Represents a sequence of characters. First the length N is given as an " + INT16 +
". Then N bytes follow which are the UTF-8 encoding of the character sequence. " +
"Length must not be negative.";
}
};
public static final DocumentedType COMPACT_STRING = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
byte[] bytes = Utils.utf8((String) o);
if (bytes.length > Short.MAX_VALUE)
throw new SchemaException("String length " + bytes.length + " is larger than the maximum string length.");
ByteUtils.writeUnsignedVarint(bytes.length + 1, buffer);
buffer.put(bytes);
}
@Override
public String read(ByteBuffer buffer) {
int length = ByteUtils.readUnsignedVarint(buffer) - 1;
if (length < 0)
throw new SchemaException("String length " + length + " cannot be negative");
if (length > Short.MAX_VALUE)
throw new SchemaException("String length " + length + " is larger than the maximum string length.");
if (length > buffer.remaining())
throw new SchemaException("Error reading string of length " + length + ", only " + buffer.remaining() + " bytes available");
String result = Utils.utf8(buffer, length);
buffer.position(buffer.position() + length);
return result;
}
@Override
public int sizeOf(Object o) {
int length = Utils.utf8Length((String) o);
return ByteUtils.sizeOfUnsignedVarint(length + 1) + length;
}
@Override
public String typeName() {
return "COMPACT_STRING";
}
@Override
public String validate(Object item) {
if (item instanceof String)
return (String) item;
else
throw new SchemaException(item + " is not a String.");
}
@Override
public String documentation() {
return "Represents a sequence of characters. First the length N + 1 is given as an UNSIGNED_VARINT " +
". Then N bytes follow which are the UTF-8 encoding of the character sequence.";
}
};
public static final DocumentedType NULLABLE_STRING = new DocumentedType() {
@Override
public boolean isNullable() {
return true;
}
@Override
public void write(ByteBuffer buffer, Object o) {
if (o == null) {
buffer.putShort((short) -1);
return;
}
byte[] bytes = Utils.utf8((String) o);
if (bytes.length > Short.MAX_VALUE)
throw new SchemaException("String length " + bytes.length + " is larger than the maximum string length.");
buffer.putShort((short) bytes.length);
buffer.put(bytes);
}
@Override
public String read(ByteBuffer buffer) {
short length = buffer.getShort();
if (length < 0)
return null;
if (length > buffer.remaining())
throw new SchemaException("Error reading string of length " + length + ", only " + buffer.remaining() + " bytes available");
String result = Utils.utf8(buffer, length);
buffer.position(buffer.position() + length);
return result;
}
@Override
public int sizeOf(Object o) {
if (o == null)
return 2;
return 2 + Utils.utf8Length((String) o);
}
@Override
public String typeName() {
return "NULLABLE_STRING";
}
@Override
public String validate(Object item) {
if (item == null)
return null;
if (item instanceof String)
return (String) item;
else
throw new SchemaException(item + " is not a String.");
}
@Override
public String documentation() {
return "Represents a sequence of characters or null. For non-null strings, first the length N is given as an " + INT16 +
". Then N bytes follow which are the UTF-8 encoding of the character sequence. " +
"A null value is encoded with length of -1 and there are no following bytes.";
}
};
public static final DocumentedType COMPACT_NULLABLE_STRING = new DocumentedType() {
@Override
public boolean isNullable() {
return true;
}
@Override
public void write(ByteBuffer buffer, Object o) {
if (o == null) {
ByteUtils.writeUnsignedVarint(0, buffer);
} else {
byte[] bytes = Utils.utf8((String) o);
if (bytes.length > Short.MAX_VALUE)
throw new SchemaException("String length " + bytes.length + " is larger than the maximum string length.");
ByteUtils.writeUnsignedVarint(bytes.length + 1, buffer);
buffer.put(bytes);
}
}
@Override
public String read(ByteBuffer buffer) {
int length = ByteUtils.readUnsignedVarint(buffer) - 1;
if (length < 0) {
return null;
} else if (length > Short.MAX_VALUE) {
throw new SchemaException("String length " + length + " is larger than the maximum string length.");
} else if (length > buffer.remaining()) {
throw new SchemaException("Error reading string of length " + length + ", only " + buffer.remaining() + " bytes available");
} else {
String result = Utils.utf8(buffer, length);
buffer.position(buffer.position() + length);
return result;
}
}
@Override
public int sizeOf(Object o) {
if (o == null) {
return 1;
}
int length = Utils.utf8Length((String) o);
return ByteUtils.sizeOfUnsignedVarint(length + 1) + length;
}
@Override
public String typeName() {
return "COMPACT_NULLABLE_STRING";
}
@Override
public String validate(Object item) {
if (item == null) {
return null;
} else if (item instanceof String) {
return (String) item;
} else {
throw new SchemaException(item + " is not a String.");
}
}
@Override
public String documentation() {
return "Represents a sequence of characters. First the length N + 1 is given as an UNSIGNED_VARINT " +
". Then N bytes follow which are the UTF-8 encoding of the character sequence. " +
"A null string is represented with a length of 0.";
}
};
public static final DocumentedType BYTES = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
ByteBuffer arg = (ByteBuffer) o;
int pos = arg.position();
buffer.putInt(arg.remaining());
buffer.put(arg);
arg.position(pos);
}
@Override
public Object read(ByteBuffer buffer) {
int size = buffer.getInt();
if (size < 0)
throw new SchemaException("Bytes size " + size + " cannot be negative");
if (size > buffer.remaining())
throw new SchemaException("Error reading bytes of size " + size + ", only " + buffer.remaining() + " bytes available");
int limit = buffer.limit();
int newPosition = buffer.position() + size;
buffer.limit(newPosition);
ByteBuffer val = buffer.slice();
buffer.limit(limit);
buffer.position(newPosition);
return val;
}
@Override
public int sizeOf(Object o) {
ByteBuffer buffer = (ByteBuffer) o;
return 4 + buffer.remaining();
}
@Override
public String typeName() {
return "BYTES";
}
@Override
public ByteBuffer validate(Object item) {
if (item instanceof ByteBuffer)
return (ByteBuffer) item;
else
throw new SchemaException(item + " is not a java.nio.ByteBuffer.");
}
@Override
public String documentation() {
return "Represents a raw sequence of bytes. First the length N is given as an " + INT32 +
". Then N bytes follow.";
}
};
public static final DocumentedType COMPACT_BYTES = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
ByteBuffer arg = (ByteBuffer) o;
int pos = arg.position();
ByteUtils.writeUnsignedVarint(arg.remaining() + 1, buffer);
buffer.put(arg);
arg.position(pos);
}
@Override
public Object read(ByteBuffer buffer) {
int size = ByteUtils.readUnsignedVarint(buffer) - 1;
if (size < 0)
throw new SchemaException("Bytes size " + size + " cannot be negative");
if (size > buffer.remaining())
throw new SchemaException("Error reading bytes of size " + size + ", only " + buffer.remaining() + " bytes available");
int limit = buffer.limit();
int newPosition = buffer.position() + size;
buffer.limit(newPosition);
ByteBuffer val = buffer.slice();
buffer.limit(limit);
buffer.position(newPosition);
return val;
}
@Override
public int sizeOf(Object o) {
ByteBuffer buffer = (ByteBuffer) o;
int remaining = buffer.remaining();
return ByteUtils.sizeOfUnsignedVarint(remaining + 1) + remaining;
}
@Override
public String typeName() {
return "COMPACT_BYTES";
}
@Override
public ByteBuffer validate(Object item) {
if (item instanceof ByteBuffer)
return (ByteBuffer) item;
else
throw new SchemaException(item + " is not a java.nio.ByteBuffer.");
}
@Override
public String documentation() {
return "Represents a raw sequence of bytes. First the length N+1 is given as an UNSIGNED_VARINT." +
" Then N bytes follow.";
}
};
public static final DocumentedType NULLABLE_BYTES = new DocumentedType() {
@Override
public boolean isNullable() {
return true;
}
@Override
public void write(ByteBuffer buffer, Object o) {
if (o == null) {
buffer.putInt(-1);
return;
}
ByteBuffer arg = (ByteBuffer) o;
int pos = arg.position();
buffer.putInt(arg.remaining());
buffer.put(arg);
arg.position(pos);
}
@Override
public Object read(ByteBuffer buffer) {
int size = buffer.getInt();
if (size < 0)
return null;
if (size > buffer.remaining())
throw new SchemaException("Error reading bytes of size " + size + ", only " + buffer.remaining() + " bytes available");
int limit = buffer.limit();
int newPosition = buffer.position() + size;
buffer.limit(newPosition);
ByteBuffer val = buffer.slice();
buffer.limit(limit);
buffer.position(newPosition);
return val;
}
@Override
public int sizeOf(Object o) {
if (o == null)
return 4;
ByteBuffer buffer = (ByteBuffer) o;
return 4 + buffer.remaining();
}
@Override
public String typeName() {
return "NULLABLE_BYTES";
}
@Override
public ByteBuffer validate(Object item) {
if (item == null)
return null;
if (item instanceof ByteBuffer)
return (ByteBuffer) item;
throw new SchemaException(item + " is not a java.nio.ByteBuffer.");
}
@Override
public String documentation() {
return "Represents a raw sequence of bytes or null. For non-null values, first the length N is given as an " + INT32 +
". Then N bytes follow. A null value is encoded with length of -1 and there are no following bytes.";
}
};
public static final DocumentedType COMPACT_NULLABLE_BYTES = new DocumentedType() {
@Override
public boolean isNullable() {
return true;
}
@Override
public void write(ByteBuffer buffer, Object o) {
if (o == null) {
ByteUtils.writeUnsignedVarint(0, buffer);
} else {
ByteBuffer arg = (ByteBuffer) o;
int pos = arg.position();
ByteUtils.writeUnsignedVarint(arg.remaining() + 1, buffer);
buffer.put(arg);
arg.position(pos);
}
}
@Override
public Object read(ByteBuffer buffer) {
int size = ByteUtils.readUnsignedVarint(buffer) - 1;
if (size < 0)
return null;
if (size > buffer.remaining())
throw new SchemaException("Error reading bytes of size " + size + ", only " + buffer.remaining() + " bytes available");
int limit = buffer.limit();
int newPosition = buffer.position() + size;
buffer.limit(newPosition);
ByteBuffer val = buffer.slice();
buffer.limit(limit);
buffer.position(newPosition);
return val;
}
@Override
public int sizeOf(Object o) {
if (o == null) {
return 1;
}
ByteBuffer buffer = (ByteBuffer) o;
int remaining = buffer.remaining();
return ByteUtils.sizeOfUnsignedVarint(remaining + 1) + remaining;
}
@Override
public String typeName() {
return "COMPACT_NULLABLE_BYTES";
}
@Override
public ByteBuffer validate(Object item) {
if (item == null)
return null;
if (item instanceof ByteBuffer)
return (ByteBuffer) item;
throw new SchemaException(item + " is not a java.nio.ByteBuffer.");
}
@Override
public String documentation() {
return "Represents a raw sequence of bytes. First the length N+1 is given as an UNSIGNED_VARINT." +
" Then N bytes follow. A null object is represented with a length of 0.";
}
};
public static final DocumentedType RECORDS = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
if (o instanceof MemoryRecords) {
MemoryRecords records = (MemoryRecords) o;
BYTES.write(buffer, records.buffer().duplicate());
} else {
throw new IllegalArgumentException("Unexpected record type: " + o.getClass());
}
}
@Override
public MemoryRecords read(ByteBuffer buffer) {
ByteBuffer recordsBuffer = (ByteBuffer) BYTES.read(buffer);
return MemoryRecords.readableRecords(recordsBuffer);
}
@Override
public int sizeOf(Object o) {
BaseRecords records = (BaseRecords) o;
return 4 + records.sizeInBytes();
}
@Override
public String typeName() {
return "RECORDS";
}
@Override
public BaseRecords validate(Object item) {
if (item instanceof MemoryRecords)
return (BaseRecords) item;
throw new SchemaException(item + " is not an instance of " + MemoryRecords.class.getName());
}
@Override
public String documentation() {
return "Represents a sequence of Kafka records as " + BYTES + ". " +
"For a detailed description of records see " +
"<a href=\"/documentation/#messageformat\">Message Sets</a>.";
}
};
public static final DocumentedType COMPACT_RECORDS = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
if (o instanceof MemoryRecords) {
MemoryRecords records = (MemoryRecords) o;
COMPACT_BYTES.write(buffer, records.buffer().duplicate());
} else {
throw new IllegalArgumentException("Unexpected record type: " + o.getClass());
}
}
@Override
public MemoryRecords read(ByteBuffer buffer) {
ByteBuffer recordsBuffer = (ByteBuffer) COMPACT_BYTES.read(buffer);
return MemoryRecords.readableRecords(recordsBuffer);
}
@Override
public int sizeOf(Object o) {
BaseRecords records = (BaseRecords) o;
int recordsSize = records.sizeInBytes();
return ByteUtils.sizeOfUnsignedVarint(recordsSize + 1) + recordsSize;
}
@Override
public String typeName() {
return "COMPACT_RECORDS";
}
@Override
public BaseRecords validate(Object item) {
if (item instanceof BaseRecords)
return (BaseRecords) item;
throw new SchemaException(item + " is not an instance of " + BaseRecords.class.getName());
}
@Override
public String documentation() {
return "Represents a sequence of Kafka records as " + COMPACT_BYTES + ". " +
"For a detailed description of records see " +
"<a href=\"/documentation/#messageformat\">Message Sets</a>.";
}
};
public static final DocumentedType NULLABLE_RECORDS = new DocumentedType() {
@Override
public boolean isNullable() {
return true;
}
@Override
public void write(ByteBuffer buffer, Object o) {
if (o == null) {
NULLABLE_BYTES.write(buffer, null);
} else if (o instanceof MemoryRecords) {
MemoryRecords records = (MemoryRecords) o;
NULLABLE_BYTES.write(buffer, records.buffer().duplicate());
} else {
throw new IllegalArgumentException("Unexpected record type: " + o.getClass());
}
}
@Override
public MemoryRecords read(ByteBuffer buffer) {
ByteBuffer recordsBuffer = (ByteBuffer) NULLABLE_BYTES.read(buffer);
if (recordsBuffer == null) {
return null;
} else {
return MemoryRecords.readableRecords(recordsBuffer);
}
}
@Override
public int sizeOf(Object o) {
if (o == null)
return 4;
BaseRecords records = (BaseRecords) o;
return 4 + records.sizeInBytes();
}
@Override
public String typeName() {
return "NULLABLE_RECORDS";
}
@Override
public BaseRecords validate(Object item) {
if (item == null)
return null;
if (item instanceof BaseRecords)
return (BaseRecords) item;
throw new SchemaException(item + " is not an instance of " + BaseRecords.class.getName());
}
@Override
public String documentation() {
return "Represents a sequence of Kafka records as " + NULLABLE_BYTES + ". " +
"For a detailed description of records see " +
"<a href=\"/documentation/#messageformat\">Message Sets</a>.";
}
};
public static final DocumentedType COMPACT_NULLABLE_RECORDS = new DocumentedType() {
@Override
public boolean isNullable() {
return true;
}
@Override
public void write(ByteBuffer buffer, Object o) {
if (o == null) {
COMPACT_NULLABLE_BYTES.write(buffer, null);
} else if (o instanceof MemoryRecords) {
MemoryRecords records = (MemoryRecords) o;
COMPACT_NULLABLE_BYTES.write(buffer, records.buffer().duplicate());
} else {
throw new IllegalArgumentException("Unexpected record type: " + o.getClass());
}
}
@Override
public MemoryRecords read(ByteBuffer buffer) {
ByteBuffer recordsBuffer = (ByteBuffer) COMPACT_NULLABLE_BYTES.read(buffer);
if (recordsBuffer == null) {
return null;
} else {
return MemoryRecords.readableRecords(recordsBuffer);
}
}
@Override
public int sizeOf(Object o) {
if (o == null) {
return 1;
}
BaseRecords records = (BaseRecords) o;
int recordsSize = records.sizeInBytes();
return ByteUtils.sizeOfUnsignedVarint(recordsSize + 1) + recordsSize;
}
@Override
public String typeName() {
return "COMPACT_NULLABLE_RECORDS";
}
@Override
public BaseRecords validate(Object item) {
if (item == null)
return null;
if (item instanceof BaseRecords)
return (BaseRecords) item;
throw new SchemaException(item + " is not an instance of " + BaseRecords.class.getName());
}
@Override
public String documentation() {
return "Represents a sequence of Kafka records as " + COMPACT_NULLABLE_BYTES + ". " +
"For a detailed description of records see " +
"<a href=\"/documentation/#messageformat\">Message Sets</a>.";
}
};
public static final DocumentedType VARINT = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
ByteUtils.writeVarint((Integer) o, buffer);
}
@Override
public Integer read(ByteBuffer buffer) {
return ByteUtils.readVarint(buffer);
}
@Override
public Integer validate(Object item) {
if (item instanceof Integer)
return (Integer) item;
throw new SchemaException(item + " is not an integer");
}
public String typeName() {
return "VARINT";
}
@Override
public int sizeOf(Object o) {
return ByteUtils.sizeOfVarint((Integer) o);
}
@Override
public String documentation() {
return "Represents an integer between -2<sup>31</sup> and 2<sup>31</sup>-1 inclusive. " +
"Encoding follows the variable-length zig-zag encoding from " +
" <a href=\"https://code.google.com/apis/protocolbuffers/docs/encoding.html\"> Google Protocol Buffers</a>.";
}
};
public static final DocumentedType VARLONG = new DocumentedType() {
@Override
public void write(ByteBuffer buffer, Object o) {
ByteUtils.writeVarlong((Long) o, buffer);
}
@Override
public Long read(ByteBuffer buffer) {
return ByteUtils.readVarlong(buffer);
}
@Override
public Long validate(Object item) {
if (item instanceof Long)
return (Long) item;
throw new SchemaException(item + " is not a long");
}
public String typeName() {
return "VARLONG";
}
@Override
public int sizeOf(Object o) {
return ByteUtils.sizeOfVarlong((Long) o);
}
@Override
public String documentation() {
return "Represents an integer between -2<sup>63</sup> and 2<sup>63</sup>-1 inclusive. " +
"Encoding follows the variable-length zig-zag encoding from " +
" <a href=\"https://code.google.com/apis/protocolbuffers/docs/encoding.html\"> Google Protocol Buffers</a>.";
}
};
private static String toHtml() {
DocumentedType[] types = {
BOOLEAN, INT8, INT16, INT32, INT64,
UINT16, UNSIGNED_INT32, VARINT, VARLONG, UUID, FLOAT64,
STRING, COMPACT_STRING, NULLABLE_STRING, COMPACT_NULLABLE_STRING,
BYTES, COMPACT_BYTES, NULLABLE_BYTES, COMPACT_NULLABLE_BYTES,
RECORDS, COMPACT_RECORDS, NULLABLE_RECORDS, COMPACT_NULLABLE_RECORDS,
new ArrayOf(STRING), new CompactArrayOf(COMPACT_STRING), ArrayOf.nullable(STRING), CompactArrayOf.nullable(STRING),
new Schema(), new NullableSchema(new Schema())};
final StringBuilder b = new StringBuilder();
b.append("<table class=\"data-table\"><tbody>\n");
b.append("<tr>");
b.append("<th>Type</th>\n");
b.append("<th>Description</th>\n");
b.append("</tr>\n");
for (DocumentedType type : types) {
b.append("<tr>");
b.append("<td>");
b.append(type.typeName());
b.append("</td>");
b.append("<td>");
b.append(type.documentation());
b.append("</td>");
b.append("</tr>\n");
}
b.append("</tbody></table>\n");
return b.toString();
}
public static void main(String[] args) {
System.out.println(toHtml());
}
}
| DocumentedType |
java | quarkusio__quarkus | test-framework/common/src/main/java/io/quarkus/test/common/TestResourceManager.java | {
"start": 33146,
"end": 34605
} | class ____ implements QuarkusTestResourceLifecycleManager.TestInjector {
// visible for testing
final Object testInstance;
private DefaultTestInjector(Object testInstance) {
this.testInstance = testInstance;
}
@Override
public void injectIntoFields(Object fieldValue, Predicate<Field> predicate) {
Class<?> c = testInstance.getClass();
while (c != Object.class) {
for (Field f : c.getDeclaredFields()) {
if (predicate.test(f)) {
f.setAccessible(true);
try {
f.set(testInstance, fieldValue);
return;
} catch (Exception e) {
throw new RuntimeException("Unable to set field '" + f.getName()
+ "' using 'QuarkusTestResourceLifecycleManager.TestInjector' ", e);
}
}
}
c = c.getSuperclass();
}
// no need to warn here because it's perfectly valid to have tests that don't use the injected fields
}
}
/**
* The entry point to handling the differences between {@link QuarkusTestResource} and {@link WithTestResource}
* (and whatever else we potentially come up with in the future).
*/
private sealed | DefaultTestInjector |
java | elastic__elasticsearch | modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java | {
"start": 5489,
"end": 5703
} | class ____ {
static final String RESPONSES = "responses";
static final String STATUS = "status";
}
@Override
public String toString() {
return Strings.toString(this);
}
}
| Fields |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/scheduler/ExecutorScheduler.java | {
"start": 4353,
"end": 5359
} | class ____ extends AtomicBoolean
implements Runnable, Disposable {
/** */
private static final long serialVersionUID = 3503344795919906192L;
final Runnable task;
final WorkerDelete parent;
final boolean callRemoveOnFinish;
ExecutorTrackedRunnable(Runnable task,
WorkerDelete parent,
boolean callRemoveOnFinish) {
this.task = task;
this.parent = parent;
this.callRemoveOnFinish = callRemoveOnFinish;
}
@Override
public void run() {
if (!get()) {
try {
task.run();
}
catch (Throwable ex) {
Schedulers.handleError(ex);
}
finally {
if (callRemoveOnFinish) {
dispose();
}
else {
lazySet(true);
}
}
}
}
@Override
public void dispose() {
if (compareAndSet(false, true)) {
parent.delete(this);
}
}
@Override
public boolean isDisposed() {
return get();
}
}
/**
* A non-trampolining worker that tracks tasks.
*/
static final | ExecutorTrackedRunnable |
java | apache__camel | components/camel-azure/camel-azure-servicebus/src/main/java/org/apache/camel/component/azure/servicebus/ServiceBusType.java | {
"start": 864,
"end": 910
} | enum ____ {
queue,
topic;
}
| ServiceBusType |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AbstractBooleanAssert.java | {
"start": 1446,
"end": 4720
} | class ____<SELF extends AbstractBooleanAssert<SELF>> extends AbstractAssert<SELF, Boolean> {
protected AbstractBooleanAssert(Boolean actual, Class<?> selfType) {
super(actual, selfType);
}
/**
* Verifies that the actual value is {@code true}.
* <p>
* Example:
* <pre><code class='java'> // assertions succeed:
* assertThat(true).isTrue();
* assertThat(Boolean.TRUE).isTrue();
*
* // assertions fail:
* assertThat(false).isTrue();
* assertThat(Boolean.FALSE).isTrue();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not {@code true}.
*/
public SELF isTrue() {
objects.assertNotNull(info, actual);
if (actual) return myself;
throw Failures.instance().failure(info, shouldBeTrue(actual), actual, true);
}
/**
* Verifies that the actual value is {@code false}.
* <p>
* Example:
* <pre><code class='java'> // assertions succeed:
* assertThat(false).isFalse();
* assertThat(Boolean.FALSE).isFalse();
*
* // assertions fail:
* assertThat(true).isFalse();
* assertThat(Boolean.TRUE).isFalse();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not {@code false}.
*/
public SELF isFalse() {
objects.assertNotNull(info, actual);
if (!actual) return myself;
throw Failures.instance().failure(info, shouldBeFalse(actual), actual, false);
}
/**
* Verifies that the actual value is equal to the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions succeed:
* assertThat(true).isEqualTo(true);
* assertThat(Boolean.FALSE).isEqualTo(false);
*
* // assertions fail:
* assertThat(true).isEqualTo(false);
* assertThat(Boolean.TRUE).isEqualTo(false);</code></pre>
*
* @param expected the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not equal to the given one.
*/
public SELF isEqualTo(boolean expected) {
if (actual == null || actual != expected)
throw Failures.instance().failure(info, shouldBeEqual(actual, expected, info.representation()));
return myself;
}
/**
* Verifies that the actual value is not equal to the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions succeed:
* assertThat(true).isNotEqualTo(false);
* assertThat(Boolean.FALSE).isNotEqualTo(true);
*
* // assertions fail:
* assertThat(true).isNotEqualTo(true);
* assertThat(Boolean.FALSE).isNotEqualTo(false);</code></pre>
*
* @param other the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is equal to the given one.
*/
public SELF isNotEqualTo(boolean other) {
if (actual != null && actual == other) throwAssertionError(shouldNotBeEqual(actual, other));
return myself;
}
}
| AbstractBooleanAssert |
java | apache__camel | components/camel-jgroups/src/generated/java/org/apache/camel/component/jgroups/JGroupsComponentConfigurer.java | {
"start": 734,
"end": 3619
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
JGroupsComponent target = (JGroupsComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "channel": target.setChannel(property(camelContext, org.jgroups.JChannel.class, value)); return true;
case "channelproperties":
case "channelProperties": target.setChannelProperties(property(camelContext, java.lang.String.class, value)); return true;
case "enableviewmessages":
case "enableViewMessages": target.setEnableViewMessages(property(camelContext, boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "channel": return org.jgroups.JChannel.class;
case "channelproperties":
case "channelProperties": return java.lang.String.class;
case "enableviewmessages":
case "enableViewMessages": return boolean.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
JGroupsComponent target = (JGroupsComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "channel": return target.getChannel();
case "channelproperties":
case "channelProperties": return target.getChannelProperties();
case "enableviewmessages":
case "enableViewMessages": return target.isEnableViewMessages();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
default: return null;
}
}
}
| JGroupsComponentConfigurer |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java | {
"start": 905,
"end": 4581
} | class ____ extends OperatorTestCase {
@Override
protected SourceOperator simpleInput(BlockFactory blockFactory, int end) {
List<BytesRef> input = LongStream.range(0, end)
.mapToObj(l -> new BytesRef("word1_" + l + " word2_" + l + " word3_" + l))
.collect(Collectors.toList());
return new BytesRefBlockSourceOperator(blockFactory, input);
}
record FirstWord(int channelA) implements ColumnExtractOperator.Evaluator {
@Override
public void computeRow(BytesRefBlock inputBlock, int index, Block.Builder[] target, BytesRef spare) {
BytesRef input = inputBlock.getBytesRef(index, spare);
((BytesRefBlock.Builder) target[channelA]).appendBytesRef(BytesRefs.toBytesRef(input.utf8ToString().split(" ")[0]));
}
@Override
public String toString() {
return "FirstWord";
}
}
@Override
protected Operator.OperatorFactory simple(SimpleOptions options) {
Supplier<ColumnExtractOperator.Evaluator> expEval = () -> new FirstWord(0);
return new ColumnExtractOperator.Factory(
new ElementType[] { ElementType.BYTES_REF },
dvrCtx -> new EvalOperator.ExpressionEvaluator() {
@Override
public Block eval(Page page) {
BytesRefBlock input = page.getBlock(0);
for (int i = 0; i < input.getPositionCount(); i++) {
if (input.getBytesRef(i, new BytesRef()).utf8ToString().startsWith("no_")) {
return input.blockFactory().newConstantNullBlock(input.getPositionCount());
}
}
input.incRef();
return input;
}
@Override
public long baseRamBytesUsed() {
return 0;
}
@Override
public void close() {}
},
expEval
);
}
@Override
protected Matcher<String> expectedDescriptionOfSimple() {
return equalTo("ColumnExtractOperator[evaluator=FirstWord]");
}
@Override
protected Matcher<String> expectedToStringOfSimple() {
return expectedDescriptionOfSimple();
}
@Override
protected void assertSimpleOutput(List<Page> input, List<Page> results) {
BytesRef buffer = new BytesRef();
int pos = 0;
for (var page : results) {
BytesRefBlock block1 = page.getBlock(1);
for (int i = 0; i < page.getPositionCount(); i++) {
assertEquals(new BytesRef("word1_" + pos).utf8ToString(), block1.getBytesRef(i, buffer).utf8ToString());
pos++;
}
}
}
public void testAllNullValues() {
DriverContext driverContext = driverContext();
BytesRef scratch = new BytesRef();
Block input1 = driverContext.blockFactory().newBytesRefBlockBuilder(1).appendBytesRef(new BytesRef("can_match")).build();
Block input2 = driverContext.blockFactory().newBytesRefBlockBuilder(1).appendBytesRef(new BytesRef("no_match")).build();
List<Page> inputPages = List.of(new Page(input1), new Page(input2));
List<Page> outputPages = drive(simple().get(driverContext), inputPages.iterator(), driverContext);
BytesRefBlock output1 = outputPages.get(0).getBlock(1);
BytesRefBlock output2 = outputPages.get(1).getBlock(1);
assertThat(output1.getBytesRef(0, scratch), equalTo(new BytesRef("can_match")));
assertTrue(output2.areAllValuesNull());
}
}
| ColumnExtractOperatorTests |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/json/JsonReadFeature.java | {
"start": 269,
"end": 9744
} | enum ____
implements FormatFeature
{
// // // Support for non-standard data format constructs: comments
/**
* Feature that determines whether parser will allow use
* of Java/C/C++ style comments (both '/'+'*' and
* '//' varieties) within parsed content or not.
*<p>
* Since JSON specification does not mention comments as legal
* construct,
* this is a non-standard feature; however, in the wild
* this is extensively used. As such, feature is
* <b>disabled by default</b> for parsers and must be
* explicitly enabled.
*/
ALLOW_JAVA_COMMENTS(false),
/**
* Feature that determines whether parser will allow use
* of YAML comments, ones starting with '#' and continuing
* until the end of the line. This commenting style is common
* with scripting languages as well.
*<p>
* Since JSON specification does not mention comments as legal
* construct,
* this is a non-standard feature. As such, feature is
* <b>disabled by default</b> for parsers and must be
* explicitly enabled.
*/
ALLOW_YAML_COMMENTS(false),
// // // Support for non-standard data format constructs: quoting/escaping
/**
* Feature that can be enabled to accept quoting of all character
* using backslash quoting mechanism: if not enabled, only characters
* that are explicitly listed by JSON specification can be thus
* escaped (see JSON spec for small list of these characters)
*<p>
* Since JSON specification requires quoting for all control characters,
* this is a non-standard feature, and as such disabled by default.
*/
ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER(false),
/**
* Feature that determines whether parser will allow use
* of single quotes (apostrophe, character '\'') for
* quoting Strings (names and String values). If so,
* this is in addition to other acceptable markers.
*<p>
* Since JSON specification requires use of double quotes for
* property names,
* this is a non-standard feature, and as such disabled by default.
*/
ALLOW_SINGLE_QUOTES(false),
/**
* Feature that determines whether parser will allow
* Record Separator (RS) control character ({@code 0x1E})
* as part of ignorable whitespace in JSON input, similar to the TAB character.
* <p>
* Since the official JSON specification permits only a limited set of control
* characters as whitespace, this is a non-standard feature and is disabled by default.
*/
ALLOW_RS_CONTROL_CHAR(false),
/**
* Feature that determines whether parser will allow
* JSON Strings to contain unescaped control characters
* (ASCII characters with value less than 32, including
* tab and line feed characters) or not.
* If feature is set false, an exception is thrown if such a
* character is encountered.
*<p>
* Since JSON specification requires quoting for all control characters,
* this is a non-standard feature, and as such disabled by default.
*/
ALLOW_UNESCAPED_CONTROL_CHARS(false),
/**
* Feature that determines whether parser will allow use
* of unquoted Object property names (which is allowed by Javascript,
* but not by JSON specification).
*<p>
* Since JSON specification requires use of double quotes for
* Object property names,
* this is a non-standard feature, and as such disabled by default.
*<p>
* NOTE: in Jackson 2.x, was called {@code ALLOW_UNQUOTED_FIELD_NAMES}
*/
ALLOW_UNQUOTED_PROPERTY_NAMES(false),
// // // Support for non-standard data format constructs: number representations
/**
* Feature that determines whether parser will allow
* JSON decimal numbers to start with a decimal point
* (like: {@code .123}). If enabled, no exception is thrown, and the number
* is parsed as though a leading 0 had been present.
*<p>
* Since JSON specification does not allow leading decimal points,
* this is a non-standard feature, and as such disabled by default.
*/
ALLOW_LEADING_DECIMAL_POINT_FOR_NUMBERS(false),
/**
* Feature that determines whether parser will allow
* JSON decimal numbers to start with a plus sign
* (like: {@code +123}). If enabled, no exception is thrown, and the number
* is parsed as though a leading sign had not been present.
*<p>
* Since JSON specification does not allow leading plus signs,
* this is a non-standard feature, and as such disabled by default.
*/
ALLOW_LEADING_PLUS_SIGN_FOR_NUMBERS(false),
/**
* Feature that determines whether parser will allow
* JSON integral numbers to start with additional (ignorable)
* zeroes (like: {@code 000001}). If enabled, no exception is thrown, and extra
* nulls are silently ignored (and not included in textual representation
* exposed via {@link JsonParser#getString()}).
*<p>
* Since JSON specification does not allow leading zeroes,
* this is a non-standard feature, and as such disabled by default.
*/
ALLOW_LEADING_ZEROS_FOR_NUMBERS(false),
/**
* Feature that allows parser to recognize set of
* "Not-a-Number" (NaN) tokens as legal floating number
* values (similar to how many other data formats and
* programming language source code allows it).
* Specific subset contains values that
* <a href="http://www.w3.org/TR/xmlschema-2/">XML Schema</a>
* (see section 3.2.4.1, Lexical Representation)
* allows (tokens are quoted contents, not including quotes):
*<ul>
* <li>"INF" (for positive infinity), as well as alias of "Infinity"
* <li>"-INF" (for negative infinity), alias "-Infinity"
* <li>"NaN" (for other not-a-numbers, like result of division by zero)
*</ul>
*<p>
* Since JSON specification does not allow use of such values,
* this is a non-standard feature, and as such disabled by default.
*/
ALLOW_NON_NUMERIC_NUMBERS(false),
/**
* Feature that determines whether parser will allow
* JSON decimal numbers to end with a decimal point
* (like: {@code 123.}). If enabled, no exception is thrown, and the number
* is parsed as though the trailing decimal point had not been present.
*<p>
* Since JSON specification does not allow trailing decimal points,
* this is a non-standard feature, and as such disabled by default.
*/
ALLOW_TRAILING_DECIMAL_POINT_FOR_NUMBERS(false),
// // // Support for non-standard data format constructs: array/value separators
/**
* Feature allows the support for "missing" values in a JSON array: missing
* value meaning sequence of two commas, without value in-between but only
* optional white space.
* Enabling this feature will expose "missing" values as {@link JsonToken#VALUE_NULL}
* tokens, which typically become Java nulls in arrays and {@link java.util.Collection}
* in data-binding.
* <p>
* For example, enabling this feature will represent a JSON array <code>["value1",,"value3",]</code>
* as <code>["value1", null, "value3", null]</code>
* <p>
* Since the JSON specification does not allow missing values this is a non-compliant JSON
* feature and is disabled by default.
*/
ALLOW_MISSING_VALUES(false),
/**
* Feature that determines whether {@link JsonParser} will allow for a single trailing
* comma following the final value (in an Array) or member (in an Object). These commas
* will simply be ignored.
* <p>
* For example, when this feature is enabled, <code>[true,true,]</code> is equivalent to
* <code>[true, true]</code> and <code>{"a": true,}</code> is equivalent to
* <code>{"a": true}</code>.
* <p>
* When combined with <code>ALLOW_MISSING_VALUES</code>, this feature takes priority, and
* the final trailing comma in an array declaration does not imply a missing
* (<code>null</code>) value. For example, when both <code>ALLOW_MISSING_VALUES</code>
* and <code>ALLOW_TRAILING_COMMA</code> are enabled, <code>[true,true,]</code> is
* equivalent to <code>[true, true]</code>, and <code>[true,true,,]</code> is equivalent to
* <code>[true, true, null]</code>.
* <p>
* Since the JSON specification does not permit trailing commas, this is a non-standard
* feature, and as such disabled by default.
*/
ALLOW_TRAILING_COMMA(false),
;
private final boolean _defaultState;
private final int _mask;
/**
* Method that calculates bit set (flags) of all features that
* are enabled by default.
*
* @return Bit mask of all features that are enabled by default
*/
public static int collectDefaults()
{
int flags = 0;
for (JsonReadFeature f : values()) {
if (f.enabledByDefault()) {
flags |= f.getMask();
}
}
return flags;
}
private JsonReadFeature(boolean defaultState) {
_defaultState = defaultState;
_mask = (1 << ordinal());
}
@Override
public boolean enabledByDefault() { return _defaultState; }
@Override
public int getMask() { return _mask; }
@Override
public boolean enabledIn(int flags) { return (flags & _mask) != 0; }
}
| JsonReadFeature |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/injection/staticfield/StaticFieldInjectionTest.java | {
"start": 1069,
"end": 1305
} | class ____ {
// This one is ignored
@Inject
static Head head;
// The parameter is injected
CombineHarvester(Head head) {
CombineHarvester.head = head;
}
}
}
| CombineHarvester |
java | quarkusio__quarkus | extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/ValidationSupport.java | {
"start": 306,
"end": 1154
} | class ____ {
private ValidationSupport() {
}
@SuppressWarnings("unused") // this is called by transformed code
public static ValidatorFactory buildDefaultValidatorFactory() {
ArcContainer container = Arc.container();
if (container == null) {
return fallback();
}
InstanceHandle<HibernateValidatorFactory> instance = container.instance(HibernateValidatorFactory.class);
if (!instance.isAvailable()) {
return fallback();
}
return new CloseAsNoopValidatorFactoryWrapper(instance.get());
}
// the point of having this is to support non-Quarkus tests that could be using Hibernate Validator
private static ValidatorFactory fallback() {
return Validation.byDefaultProvider().configure().buildValidatorFactory();
}
}
| ValidationSupport |
java | greenrobot__greendao | tests/DaoTest/src/test/java/org/greenrobot/greendao/unittest/MinimalEntityDao.java | {
"start": 753,
"end": 3266
} | class ____ {
public final static Property Id = new Property(0, Long.class, "id", true, "_id");
}
public MinimalEntityDao(DaoConfig config) {
super(config);
}
public MinimalEntityDao(DaoConfig config, DaoSession daoSession) {
super(config, daoSession);
}
/** Creates the underlying database table. */
public static void createTable(Database db, boolean ifNotExists) {
String constraint = ifNotExists? "IF NOT EXISTS ": "";
db.execSQL("CREATE TABLE " + constraint + "\"MINIMAL_ENTITY\" (" + //
"\"_id\" INTEGER PRIMARY KEY );"); // 0: id
}
/** Drops the underlying database table. */
public static void dropTable(Database db, boolean ifExists) {
String sql = "DROP TABLE " + (ifExists ? "IF EXISTS " : "") + "\"MINIMAL_ENTITY\"";
db.execSQL(sql);
}
@Override
protected final void bindValues(DatabaseStatement stmt, MinimalEntity entity) {
stmt.clearBindings();
Long id = entity.getId();
if (id != null) {
stmt.bindLong(1, id);
}
}
@Override
protected final void bindValues(SQLiteStatement stmt, MinimalEntity entity) {
stmt.clearBindings();
Long id = entity.getId();
if (id != null) {
stmt.bindLong(1, id);
}
}
@Override
public Long readKey(Cursor cursor, int offset) {
return cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0);
}
@Override
public MinimalEntity readEntity(Cursor cursor, int offset) {
MinimalEntity entity = new MinimalEntity( //
cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0) // id
);
return entity;
}
@Override
public void readEntity(Cursor cursor, MinimalEntity entity, int offset) {
entity.setId(cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0));
}
@Override
protected final Long updateKeyAfterInsert(MinimalEntity entity, long rowId) {
entity.setId(rowId);
return rowId;
}
@Override
public Long getKey(MinimalEntity entity) {
if(entity != null) {
return entity.getId();
} else {
return null;
}
}
@Override
public boolean hasKey(MinimalEntity entity) {
return entity.getId() != null;
}
@Override
protected final boolean isEntityUpdateable() {
return true;
}
}
| Properties |
java | apache__camel | components/camel-mdc/src/test/java/org/apache/camel/mdc/MyAsyncComponent.java | {
"start": 992,
"end": 1816
} | class ____ extends DefaultComponent {
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
MyAsyncEndpoint answer = new MyAsyncEndpoint(uri, this);
answer.setReply(prepareReply(remaining));
setProperties(answer, parameters);
return answer;
}
private String prepareReply(String value) {
// to make URIs valid we make the conventions of using ':' for ' ' and
// capitalize words
String[] words = value.split(":");
StringBuilder result = new StringBuilder();
for (String word : words) {
result.append(result.isEmpty() ? "" : " ");
result.append(StringHelper.capitalize(word));
}
return result.toString();
}
}
| MyAsyncComponent |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.