language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/FieldsToNullPropertyFilter.java | {
"start": 1227,
"end": 2409
} | class ____ extends SimpleBeanPropertyFilter {
@Override
public void serializeAsField(Object pojo, JsonGenerator jgen, SerializerProvider provider, PropertyWriter writer)
throws Exception {
AbstractSObjectBase sob = (AbstractSObjectBase) pojo;
String fieldName = writer.getName();
Object fieldValue = null;
boolean failedToReadFieldValue = false;
try {
fieldValue = FieldUtils.readField(pojo, fieldName, true);
} catch (IllegalArgumentException e) {
// This happens if the backing field for the getter doesn't match the name provided to @JsonProperty
// This is expected to happen in the case of blob fields, e.g., ContentVersion.getVersionDataUrl(),
// whose backing property is specified as @JsonData("VersionData")
failedToReadFieldValue = true;
}
if (sob.getFieldsToNull().contains(writer.getName()) || fieldValue != null || failedToReadFieldValue) {
writer.serializeAsField(pojo, jgen, provider);
} else {
writer.serializeAsOmittedField(pojo, jgen, provider);
}
}
}
| FieldsToNullPropertyFilter |
java | junit-team__junit5 | junit-jupiter-migrationsupport/src/main/java/org/junit/jupiter/migrationsupport/rules/member/TestRuleAnnotatedMember.java | {
"start": 563,
"end": 628
} | interface ____ {
TestRule getTestRule();
}
| TestRuleAnnotatedMember |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/BalancerBandwidthCommand.java | {
"start": 1051,
"end": 1462
} | class ____ to define the command which sends the new bandwidth value to
* each datanode.
*/
/**
* Balancer bandwidth command instructs each datanode to change its value for
* the max amount of network bandwidth it may use during the block balancing
* operation.
*
* The Balancer Bandwidth Command contains the new bandwidth value as its
* payload. The bandwidth value is in bytes per second.
*/
public | is |
java | spring-projects__spring-framework | spring-orm/src/main/java/org/springframework/orm/jpa/vendor/HibernateJpaDialect.java | {
"start": 9983,
"end": 10340
} | class ____ implements ConnectionHandle {
private final SessionImplementor session;
public HibernateConnectionHandle(SessionImplementor session) {
this.session = session;
}
@Override
public Connection getConnection() {
return this.session.getJdbcCoordinator().getLogicalConnection().getPhysicalConnection();
}
}
}
| HibernateConnectionHandle |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/operators/Driver.java | {
"start": 1926,
"end": 3211
} | class ____ the stub type run by the task.
*/
Class<S> getStubType();
/**
* This method is called before the user code is opened. An exception thrown by this method
* signals failure of the task.
*
* @throws Exception Exceptions may be forwarded and signal task failure.
*/
void prepare() throws Exception;
/**
* The main operation method of the task. It should call the user code with the data subsets
* until the input is depleted.
*
* @throws Exception Any exception thrown by this method signals task failure. Because
* exceptions in the user code typically signal situations where this instance in unable to
* proceed, exceptions from the user code should be forwarded.
*/
void run() throws Exception;
/**
* This method is invoked in any case (clean termination and exception) at the end of the tasks
* operation.
*
* @throws Exception Exceptions may be forwarded.
*/
void cleanup() throws Exception;
/**
* This method is invoked when the driver must aborted in mid processing. It is invoked
* asynchronously by a different thread.
*
* @throws Exception Exceptions may be forwarded.
*/
void cancel() throws Exception;
}
| of |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryRawValueData.java | {
"start": 1639,
"end": 5570
} | class ____<T> extends LazyBinaryFormat<T> implements RawValueData<T> {
public BinaryRawValueData(T javaObject) {
super(javaObject);
}
public BinaryRawValueData(MemorySegment[] segments, int offset, int sizeInBytes) {
super(segments, offset, sizeInBytes);
}
public BinaryRawValueData(MemorySegment[] segments, int offset, int sizeInBytes, T javaObject) {
super(segments, offset, sizeInBytes, javaObject);
}
// ------------------------------------------------------------------------------------------
// Public Interfaces
// ------------------------------------------------------------------------------------------
@Override
public T toObject(TypeSerializer<T> serializer) {
if (javaObject == null) {
try {
javaObject =
InstantiationUtil.deserializeFromByteArray(serializer, toBytes(serializer));
} catch (IOException e) {
throw new FlinkRuntimeException(e);
}
}
return javaObject;
}
@Override
public byte[] toBytes(TypeSerializer<T> serializer) {
ensureMaterialized(serializer);
return BinarySegmentUtils.copyToBytes(getSegments(), getOffset(), getSizeInBytes());
}
@Override
public boolean equals(Object o) {
if (o instanceof BinaryRawValueData) {
BinaryRawValueData<?> other = (BinaryRawValueData<?>) o;
if (binarySection != null && other.binarySection != null) {
return binarySection.equals(other.binarySection);
}
throw new UnsupportedOperationException(
"Unmaterialized BinaryRawValueData cannot be compared.");
} else {
return false;
}
}
@Override
public int hashCode() {
if (binarySection != null) {
return binarySection.hashCode();
}
throw new UnsupportedOperationException(
"Unmaterialized BinaryRawValueData does not have a hashCode.");
}
@Override
public String toString() {
return String.format("SqlRawValue{%s}", javaObject == null ? "?" : javaObject);
}
// ------------------------------------------------------------------------------------
// Internal methods
// ------------------------------------------------------------------------------------
@Override
protected BinarySection materialize(TypeSerializer<T> serializer) {
try {
byte[] bytes = InstantiationUtil.serializeToByteArray(serializer, javaObject);
return new BinarySection(
new MemorySegment[] {MemorySegmentFactory.wrap(bytes)}, 0, bytes.length);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// ------------------------------------------------------------------------------------------
// Construction Utilities
// ------------------------------------------------------------------------------------------
/** Creates a {@link BinaryRawValueData} instance from the given Java object. */
public static <T> BinaryRawValueData<T> fromObject(T javaObject) {
if (javaObject == null) {
return null;
}
return new BinaryRawValueData<>(javaObject);
}
/** Creates a {@link BinaryStringData} instance from the given bytes. */
public static <T> BinaryRawValueData<T> fromBytes(byte[] bytes) {
return fromBytes(bytes, 0, bytes.length);
}
/**
* Creates a {@link BinaryStringData} instance from the given bytes with offset and number of
* bytes.
*/
public static <T> BinaryRawValueData<T> fromBytes(byte[] bytes, int offset, int numBytes) {
return new BinaryRawValueData<>(
new MemorySegment[] {MemorySegmentFactory.wrap(bytes)}, offset, numBytes);
}
}
| BinaryRawValueData |
java | quarkusio__quarkus | integration-tests/oidc-client/src/test/java/io/quarkus/it/keycloak/KeycloakRealmResourceManager.java | {
"start": 772,
"end": 4769
} | class ____ implements QuarkusTestResourceLifecycleManager {
private static final String KEYCLOAK_SERVER_URL = System.getProperty("keycloak.url", "http://localhost:8180");
private static final String KEYCLOAK_REALM = "quarkus";
static {
RestAssured.useRelaxedHTTPSValidation();
}
@Override
public Map<String, String> start() {
RealmRepresentation realm = createRealm(KEYCLOAK_REALM);
realm.setRevokeRefreshToken(true);
realm.setRefreshTokenMaxReuse(0);
realm.setAccessTokenLifespan(3);
realm.setRequiredActions(List.of());
realm.getClients().add(createClient("quarkus-app"));
realm.getUsers().add(createUser("alice", "user"));
realm.getUsers().add(createUser("bob", "user"));
try {
RestAssured
.given()
.auth().oauth2(getAdminAccessToken())
.contentType("application/json")
.body(JsonSerialization.writeValueAsBytes(realm))
.when()
.post(KEYCLOAK_SERVER_URL + "/admin/realms").then()
.statusCode(201);
} catch (IOException e) {
throw new RuntimeException(e);
}
return Collections.emptyMap();
}
private static String getAdminAccessToken() {
return RestAssured
.given()
.param("grant_type", "password")
.param("username", "admin")
.param("password", "admin")
.param("client_id", "admin-cli")
.when()
.post(KEYCLOAK_SERVER_URL + "/realms/master/protocol/openid-connect/token")
.as(AccessTokenResponse.class).getToken();
}
private static RealmRepresentation createRealm(String name) {
RealmRepresentation realm = new RealmRepresentation();
realm.setRealm(name);
realm.setEnabled(true);
realm.setUsers(new ArrayList<>());
realm.setClients(new ArrayList<>());
realm.setAccessTokenLifespan(3);
realm.setRequiredActions(List.of());
RolesRepresentation roles = new RolesRepresentation();
List<RoleRepresentation> realmRoles = new ArrayList<>();
roles.setRealm(realmRoles);
realm.setRoles(roles);
realm.getRoles().getRealm().add(new RoleRepresentation("user", null, false));
realm.getRoles().getRealm().add(new RoleRepresentation("admin", null, false));
return realm;
}
private static ClientRepresentation createClient(String clientId) {
ClientRepresentation client = new ClientRepresentation();
client.setClientId(clientId);
client.setPublicClient(false);
client.setSecret("secret");
client.setDirectAccessGrantsEnabled(true);
client.setServiceAccountsEnabled(true);
client.setEnabled(true);
return client;
}
private static UserRepresentation createUser(String username, String... realmRoles) {
UserRepresentation user = new UserRepresentation();
user.setUsername(username);
user.setEnabled(true);
user.setCredentials(new ArrayList<>());
user.setRealmRoles(Arrays.asList(realmRoles));
user.setEmail(username + "@gmail.com");
user.setEmailVerified(true);
user.setRequiredActions(List.of());
CredentialRepresentation credential = new CredentialRepresentation();
credential.setType(CredentialRepresentation.PASSWORD);
credential.setValue(username);
credential.setTemporary(false);
user.getCredentials().add(credential);
return user;
}
@Override
public void stop() {
RestAssured
.given()
.auth().oauth2(getAdminAccessToken())
.when()
.delete(KEYCLOAK_SERVER_URL + "/admin/realms/" + KEYCLOAK_REALM).then().statusCode(204);
}
}
| KeycloakRealmResourceManager |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/relational/QualifiedTableName.java | {
"start": 233,
"end": 1147
} | class ____ extends QualifiedNameImpl {
public QualifiedTableName(Identifier catalogName, Identifier schemaName, Identifier tableName) {
super( catalogName, schemaName, tableName );
}
public QualifiedTableName(Namespace.Name schemaName, Identifier tableName) {
super( schemaName, tableName );
}
public Identifier getTableName() {
return getObjectName();
}
public QualifiedTableName quote() {
Identifier catalogName = getCatalogName();
if ( catalogName != null ) {
catalogName = new Identifier( catalogName.getText(), true );
}
Identifier schemaName = getSchemaName();
if ( schemaName != null ) {
schemaName = new Identifier( schemaName.getText(), true );
}
Identifier tableName = getTableName();
if ( tableName != null ) {
tableName = new Identifier( tableName.getText(), true );
}
return new QualifiedTableName( catalogName, schemaName, tableName );
}
}
| QualifiedTableName |
java | apache__camel | components/camel-aws/camel-aws2-ddb/src/test/java/org/apache/camel/component/aws2/ddbstream/ShardFixtures.java | {
"start": 998,
"end": 3477
} | class ____ {
public static final String STREAM_ARN = "arn:aws:dynamodb:eu-west-1:1234:table/some-table/stream/2021-05-07T09:03:40.295";
//
// shard 0
// / \
// shard 1 shard 2
// / \ / \
// shard 3 shard 4 shard 5 shard 6
//
public static final Shard SHARD_0 = Shard.builder()
.shardId("SHARD_0")
.sequenceNumberRange(SequenceNumberRange.builder().startingSequenceNumber("4100000000019118544662")
.endingSequenceNumber("4100000000019118559460").build())
.build();
public static final Shard SHARD_1 = Shard.builder()
.shardId("SHARD_1")
.parentShardId("SHARD_0")
.sequenceNumberRange(SequenceNumberRange.builder().startingSequenceNumber("52700000000007125283545")
.endingSequenceNumber("52700000000007125283545").build())
.build();
public static final Shard SHARD_2 = Shard.builder()
.shardId("SHARD_2")
.parentShardId("SHARD_0")
.sequenceNumberRange(SequenceNumberRange.builder().startingSequenceNumber("52700000000020262580503")
.endingSequenceNumber("52700000000020262580503").build())
.build();
public static final Shard SHARD_3 = Shard.builder()
.shardId("SHARD_3")
.parentShardId("SHARD_1")
.sequenceNumberRange(SequenceNumberRange.builder().startingSequenceNumber("74100000000005516202603").build())
.build();
public static final Shard SHARD_4 = Shard.builder()
.shardId("SHARD_4")
.parentShardId("SHARD_1")
.sequenceNumberRange(SequenceNumberRange.builder().startingSequenceNumber("74100000000044018483325").build())
.build();
public static final Shard SHARD_5 = Shard.builder()
.shardId("SHARD_5")
.parentShardId("SHARD_2")
.sequenceNumberRange(SequenceNumberRange.builder().startingSequenceNumber("105800000000033207048658").build())
.build();
public static final Shard SHARD_6 = Shard.builder()
.shardId("SHARD_6")
.parentShardId("SHARD_2")
.sequenceNumberRange(SequenceNumberRange.builder().startingSequenceNumber("105800000000025199618049").build())
.build();
private ShardFixtures() {
// Utility class, not called.
}
}
| ShardFixtures |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/masterslave/MasterSlave.java | {
"start": 3617,
"end": 8979
} | class ____ {
/**
* Open a new connection to a Redis Master-Slave server/servers using the supplied {@link RedisURI} and the supplied
* {@link RedisCodec codec} to encode/decode keys.
* <p>
* This {@link MasterSlave} performs auto-discovery of nodes using either Redis Sentinel or Master/Slave. A {@link RedisURI}
* can point to either a master or a replica host.
* </p>
*
* @param redisClient the Redis client.
* @param codec Use this codec to encode/decode keys and values, must not be {@code null}.
* @param redisURI the Redis server to connect to, must not be {@code null}.
* @param <K> Key type.
* @param <V> Value type.
* @return a new connection.
*/
public static <K, V> StatefulRedisMasterSlaveConnection<K, V> connect(RedisClient redisClient, RedisCodec<K, V> codec,
RedisURI redisURI) {
LettuceAssert.notNull(redisClient, "RedisClient must not be null");
LettuceAssert.notNull(codec, "RedisCodec must not be null");
LettuceAssert.notNull(redisURI, "RedisURI must not be null");
return new MasterSlaveConnectionWrapper<>(MasterReplica.connect(redisClient, codec, redisURI));
}
/**
* Open asynchronously a new connection to a Redis Master-Slave server/servers using the supplied {@link RedisURI} and the
* supplied {@link RedisCodec codec} to encode/decode keys.
* <p>
* This {@link MasterSlave} performs auto-discovery of nodes using either Redis Sentinel or Master/Slave. A {@link RedisURI}
* can point to either a master or a replica host.
* </p>
*
* @param redisClient the Redis client.
* @param codec Use this codec to encode/decode keys and values, must not be {@code null}.
* @param redisURI the Redis server to connect to, must not be {@code null}.
* @param <K> Key type.
* @param <V> Value type.
* @return {@link CompletableFuture} that is notified once the connect is finished.
* @since
*/
public static <K, V> CompletableFuture<StatefulRedisMasterSlaveConnection<K, V>> connectAsync(RedisClient redisClient,
RedisCodec<K, V> codec, RedisURI redisURI) {
return MasterReplica.connectAsync(redisClient, codec, redisURI).thenApply(MasterSlaveConnectionWrapper::new);
}
/**
* Open a new connection to a Redis Master-Slave server/servers using the supplied {@link RedisURI} and the supplied
* {@link RedisCodec codec} to encode/decode keys.
* <p>
* This {@link MasterSlave} performs auto-discovery of nodes if the URI is a Redis Sentinel URI. Master/Slave URIs will be
* treated as static topology and no additional hosts are discovered in such case. Redis Standalone Master/Slave will
* discover the roles of the supplied {@link RedisURI URIs} and issue commands to the appropriate node.
* </p>
* <p>
* When using Redis Sentinel, ensure that {@link Iterable redisURIs} contains only a single entry as only the first URI is
* considered. {@link RedisURI} pointing to multiple Sentinels can be configured through
* {@link RedisURI.Builder#withSentinel}.
* </p>
*
* @param redisClient the Redis client.
* @param codec Use this codec to encode/decode keys and values, must not be {@code null}.
* @param redisURIs the Redis server(s) to connect to, must not be {@code null}.
* @param <K> Key type.
* @param <V> Value type.
* @return a new connection.
*/
public static <K, V> StatefulRedisMasterSlaveConnection<K, V> connect(RedisClient redisClient, RedisCodec<K, V> codec,
Iterable<RedisURI> redisURIs) {
return new MasterSlaveConnectionWrapper<>(MasterReplica.connect(redisClient, codec, redisURIs));
}
/**
* Open asynchronously a new connection to a Redis Master-Slave server/servers using the supplied {@link RedisURI} and the
* supplied {@link RedisCodec codec} to encode/decode keys.
* <p>
* This {@link MasterSlave} performs auto-discovery of nodes if the URI is a Redis Sentinel URI. Master/Slave URIs will be
* treated as static topology and no additional hosts are discovered in such case. Redis Standalone Master/Slave will
* discover the roles of the supplied {@link RedisURI URIs} and issue commands to the appropriate node.
* </p>
* <p>
* When using Redis Sentinel, ensure that {@link Iterable redisURIs} contains only a single entry as only the first URI is
* considered. {@link RedisURI} pointing to multiple Sentinels can be configured through
* {@link RedisURI.Builder#withSentinel}.
* </p>
*
* @param redisClient the Redis client.
* @param codec Use this codec to encode/decode keys and values, must not be {@code null}.
* @param redisURIs the Redis server(s) to connect to, must not be {@code null}.
* @param <K> Key type.
* @param <V> Value type.
* @return {@link CompletableFuture} that is notified once the connect is finished.
*/
public static <K, V> CompletableFuture<StatefulRedisMasterSlaveConnection<K, V>> connectAsync(RedisClient redisClient,
RedisCodec<K, V> codec, Iterable<RedisURI> redisURIs) {
return MasterReplica.connectAsync(redisClient, codec, redisURIs).thenApply(MasterSlaveConnectionWrapper::new);
}
}
| MasterSlave |
java | apache__kafka | streams/src/test/java/org/apache/kafka/test/GenericInMemoryKeyValueStore.java | {
"start": 4628,
"end": 5443
} | class ____<K, V> implements KeyValueIterator<K, V> {
private final Iterator<Entry<K, V>> iter;
private GenericInMemoryKeyValueIterator(final Iterator<Map.Entry<K, V>> iter) {
this.iter = iter;
}
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public KeyValue<K, V> next() {
final Map.Entry<K, V> entry = iter.next();
return new KeyValue<>(entry.getKey(), entry.getValue());
}
@Override
public void close() {
// do nothing
}
@Override
public K peekNextKey() {
throw new UnsupportedOperationException("peekNextKey() not supported in " + getClass().getName());
}
}
} | GenericInMemoryKeyValueIterator |
java | spring-projects__spring-boot | build-plugin/spring-boot-antlib/src/it/sample/src/main/java/org/test/SampleApplication.java | {
"start": 685,
"end": 816
} | class ____ {
public static void main(String[] args) {
System.out.println(LocalDate.class.getSimpleName());
}
}
| SampleApplication |
java | apache__flink | flink-core-api/src/main/java/org/apache/flink/api/java/tuple/Tuple17.java | {
"start": 1892,
"end": 2744
} | class ____ extends Tuple17", then don't use
* instances of Foo in a DataStream<Tuple17> / DataSet<Tuple17>, but declare it as
* DataStream<Foo> / DataSet<Foo>.)
* </ul>
*
* @see Tuple
* @param <T0> The type of field 0
* @param <T1> The type of field 1
* @param <T2> The type of field 2
* @param <T3> The type of field 3
* @param <T4> The type of field 4
* @param <T5> The type of field 5
* @param <T6> The type of field 6
* @param <T7> The type of field 7
* @param <T8> The type of field 8
* @param <T9> The type of field 9
* @param <T10> The type of field 10
* @param <T11> The type of field 11
* @param <T12> The type of field 12
* @param <T13> The type of field 13
* @param <T14> The type of field 14
* @param <T15> The type of field 15
* @param <T16> The type of field 16
*/
@Public
public | Foo |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java | {
"start": 25037,
"end": 25508
} | class ____ {
public String[][] getMessage(boolean b, String[][] s) {
return b ? null : s;
}
}
""")
.addOutputLines(
"out/com/google/errorprone/bugpatterns/nullness/LiteralNullReturnTest.java",
"""
package com.google.errorprone.bugpatterns.nullness;
import org.checkerframework.checker.nullness.qual.Nullable;
public | LiteralNullReturnTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java | {
"start": 659,
"end": 824
} | interface ____ {
boolean canHighlight(MappedFieldType fieldType);
HighlightField highlight(FieldHighlightContext fieldContext) throws IOException;
}
| Highlighter |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_3668/ParentDto.java | {
"start": 548,
"end": 626
} | class ____ extends ParentDto<ChildDto.ChildDtoA> { }
public static | ParentDtoA |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/h3/Helper.java | {
"start": 1268,
"end": 1771
} | class ____ {
private Helper() {}
public static HttpMetadata encodeHttpMetadata(boolean endStream) {
HttpHeaders headers = new NettyHttpHeaders<>(new DefaultHttp3Headers(false, 8));
headers.set(HttpHeaderNames.TE.getKey(), HttpConstants.TRAILERS);
return new Http2MetadataFrame(headers, endStream);
}
public static HttpMetadata encodeTrailers() {
return new Http2MetadataFrame(new NettyHttpHeaders<>(new DefaultHttp3Headers(false, 4)), true);
}
}
| Helper |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/saml2/Saml2LoginConfigurerTests.java | {
"start": 30210,
"end": 31007
} | class ____ {
private final Saml2AuthenticationRequestRepository<AbstractSaml2AuthenticationRequest> repository = mock(
Saml2AuthenticationRequestRepository.class);
@Bean
Saml2AuthenticationRequestRepository<AbstractSaml2AuthenticationRequest> authenticationRequestRepository() {
return this.repository;
}
@Bean
SecurityFilterChain securityFilterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((authz) -> authz.anyRequest().authenticated())
.saml2Login((saml2) -> saml2.authenticationRequestUri("/custom/auth/{registrationId}"));
// @formatter:on
return http.build();
}
}
@Configuration
@EnableWebSecurity
@Import(Saml2LoginConfigBeans.class)
static | CustomAuthenticationRequestUriCustomAuthenticationConverter |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppAttempts.java | {
"start": 4153,
"end": 20445
} | class ____ extends AbstractBinder {
@Override
protected void configure() {
Configuration conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS);
conf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class,
ResourceScheduler.class);
rm = new MockRM(conf);
final HttpServletRequest request = mock(HttpServletRequest.class);
when(request.getScheme()).thenReturn("http");
final HttpServletResponse response = mock(HttpServletResponse.class);
bind(rm).to(ResourceManager.class).named("rm");
bind(conf).to(Configuration.class).named("conf");
bind(request).to(HttpServletRequest.class);
bind(response).to(HttpServletResponse.class);
}
}
public TestRMWebServicesAppAttempts() {
}
@Test
public void testAppAttempts() throws Exception {
rm.start();
MockNM amNodeManager = rm.registerNode("127.0.0.1:1234", 2048);
MockRMAppSubmissionData data =
MockRMAppSubmissionData.Builder.createWithMemory(CONTAINER_MB, rm)
.withAppName("testwordcount")
.withUser("user1")
.build();
RMApp app1 = MockRMAppSubmitter.submit(rm, data);
amNodeManager.nodeHeartbeat(true);
testAppAttemptsHelper(app1.getApplicationId().toString(), app1,
MediaType.APPLICATION_JSON);
rm.stop();
}
@Test
@Timeout(value = 20)
public void testCompletedAppAttempt() throws Exception {
Configuration conf = rm.getConfig();
String logServerUrl = "http://localhost:19888/jobhistory/logs";
conf.set(YarnConfiguration.YARN_LOG_SERVER_URL, logServerUrl);
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 1);
rm.start();
MockNM amNodeManager = rm.registerNode("127.0.0.1:1234", 8192);
MockRMAppSubmissionData data =
MockRMAppSubmissionData.Builder.createWithMemory(CONTAINER_MB, rm)
.withAppName("testwordcount")
.withUser("user1")
.build();
RMApp app1 = MockRMAppSubmitter.submit(rm, data);
MockAM am = MockRM.launchAndRegisterAM(app1, rm, amNodeManager);
// fail the AM by sending CONTAINER_FINISHED event without registering.
amNodeManager.nodeHeartbeat(am.getApplicationAttemptId(), 1,
ContainerState.COMPLETE);
rm.waitForState(am.getApplicationAttemptId(), RMAppAttemptState.FAILED);
rm.waitForState(app1.getApplicationId(), RMAppState.FAILED);
WebTarget r = targetWithJsonObject();
Response response = r.path("ws").path("v1").path("cluster")
.path("apps").path(app1.getApplicationId().toString())
.path("appattempts").request(MediaType.APPLICATION_JSON)
.get(Response.class);
JSONObject json = response.readEntity(JSONObject.class);
JSONObject jsonAppAttempts = json.getJSONObject("appAttempts");
JSONObject jsonAppAttempt = jsonAppAttempts.getJSONObject("appAttempt");
JSONArray jsonArray = new JSONArray();
jsonArray.put(jsonAppAttempt);
JSONObject info = jsonArray.getJSONObject(0);
String logsLink = info.getString("logsLink");
String containerId = app1.getCurrentAppAttempt().getMasterContainer()
.getId().toString();
assertThat(logsLink).isEqualTo(logServerUrl
+ "/127.0.0.1:1234/" + containerId + "/" + containerId + "/"
+ "user1");
rm.stop();
}
@Test
@Timeout(value = 20)
public void testMultipleAppAttempts() throws Exception {
rm.start();
MockNM amNodeManager = rm.registerNode("127.0.0.1:1234", 8192);
MockRMAppSubmissionData data =
MockRMAppSubmissionData.Builder.createWithMemory(CONTAINER_MB, rm)
.withAppName("testwordcount")
.withUser("user1")
.build();
RMApp app1 = MockRMAppSubmitter.submit(rm, data);
MockAM am = MockRM.launchAndRegisterAM(app1, rm, amNodeManager);
int maxAppAttempts = rm.getConfig().getInt(
YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS);
assertTrue(maxAppAttempts > 1);
int numAttempt = 1;
while (true) {
// fail the AM by sending CONTAINER_FINISHED event without registering.
amNodeManager.nodeHeartbeat(am.getApplicationAttemptId(), 1,
ContainerState.COMPLETE);
rm.waitForState(am.getApplicationAttemptId(), RMAppAttemptState.FAILED);
if (numAttempt == maxAppAttempts) {
rm.waitForState(app1.getApplicationId(), RMAppState.FAILED);
break;
}
// wait for app to start a new attempt.
rm.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
am = MockRM.launchAndRegisterAM(app1, rm, amNodeManager);
numAttempt++;
}
assertEquals(maxAppAttempts,
app1.getAppAttempts().values().size(), "incorrect number of attempts");
testAppAttemptsHelper(app1.getApplicationId().toString(), app1,
MediaType.APPLICATION_JSON);
rm.stop();
}
@Test
public void testAppAttemptsSlash() throws Exception {
rm.start();
MockNM amNodeManager = rm.registerNode("127.0.0.1:1234", 2048);
RMApp app1 = MockRMAppSubmitter.submitWithMemory(CONTAINER_MB, rm);
amNodeManager.nodeHeartbeat(true);
testAppAttemptsHelper(app1.getApplicationId().toString() + "/", app1,
MediaType.APPLICATION_JSON);
rm.stop();
}
@Test
public void testAppAttemptsDefault() throws Exception {
rm.start();
MockNM amNodeManager = rm.registerNode("127.0.0.1:1234", 2048);
RMApp app1 = MockRMAppSubmitter.submitWithMemory(CONTAINER_MB, rm);
amNodeManager.nodeHeartbeat(true);
testAppAttemptsHelper(app1.getApplicationId().toString() + "/", app1, "");
rm.stop();
}
@Test
public void testInvalidAppIdGetAttempts() throws Exception {
rm.start();
MockNM amNodeManager = rm.registerNode("127.0.0.1:1234", 2048);
RMApp app = MockRMAppSubmitter.submitWithMemory(CONTAINER_MB, rm);
amNodeManager.nodeHeartbeat(true);
WebTarget r = targetWithJsonObject();
try {
Response response = r.path("ws").path("v1").path("cluster").path("apps")
.path("application_invalid_12").path("appattempts")
.request(MediaType.APPLICATION_JSON)
.get();
throw new BadRequestException(response);
} catch (BadRequestException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.BAD_REQUEST, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
checkStringMatch("exception message",
"Invalid ApplicationId: application_invalid_12", message);
checkStringMatch("exception type", "BadRequestException", type);
checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
} finally {
rm.stop();
}
}
@Test
public void testInvalidAppAttemptId() throws Exception {
rm.start();
MockNM amNodeManager = rm.registerNode("127.0.0.1:1234", 2048);
RMApp app = MockRMAppSubmitter.submitWithMemory(CONTAINER_MB, rm);
amNodeManager.nodeHeartbeat(true);
WebTarget r = targetWithJsonObject();
try {
Response response = r.path("ws").path("v1").path("cluster").path("apps")
.path(app.getApplicationId().toString()).path("appattempts")
.path("appattempt_invalid_12_000001")
.request(MediaType.APPLICATION_JSON).get();
throw new BadRequestException(response);
} catch (BadRequestException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.BAD_REQUEST,
response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
checkStringMatch("exception message",
"Invalid AppAttemptId: appattempt_invalid_12_000001", message);
checkStringMatch("exception type", "BadRequestException", type);
checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
} finally {
rm.stop();
}
}
@Test
public void testNonexistAppAttempts() throws Exception {
rm.start();
MockNM amNodeManager = rm.registerNode("127.0.0.1:1234", 2048);
MockRMAppSubmissionData data =
MockRMAppSubmissionData.Builder.createWithMemory(CONTAINER_MB, rm)
.withAppName("testwordcount")
.withUser("user1")
.build();
MockRMAppSubmitter.submit(rm, data);
amNodeManager.nodeHeartbeat(true);
WebTarget r = targetWithJsonObject();
try {
Response response = r.path("ws").path("v1").path("cluster").path("apps")
.path("application_00000_0099").request(MediaType.APPLICATION_JSON)
.get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
checkStringMatch("exception message",
"app with id: application_00000_0099 not found", message);
checkStringMatch("exception type", "NotFoundException", type);
checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
} finally {
rm.stop();
}
}
private void testAppAttemptsHelper(String path, RMApp app, String media)
throws Exception {
WebTarget r = targetWithJsonObject();
Response response = r.path("ws").path("v1").path("cluster")
.path("apps").path(path).path("appattempts").request(media)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject jsonAppAttempts = json.getJSONObject("appAttempts");
assertEquals(1, jsonAppAttempts.length(), "incorrect number of elements");
JSONArray jsonArray = parseJsonAppAttempt(jsonAppAttempts);
Collection<RMAppAttempt> attempts = app.getAppAttempts().values();
assertEquals(attempts.size(), jsonArray.length(),
"incorrect number of elements");
// Verify these parallel arrays are the same
int i = 0;
for (RMAppAttempt attempt : attempts) {
verifyAppAttemptsInfo(jsonArray.getJSONObject(i), attempt, app.getUser());
++i;
}
}
public JSONArray parseJsonAppAttempt(JSONObject jsonAppAttempts) throws JSONException {
Object appAttempt = jsonAppAttempts.get("appAttempt");
if (appAttempt instanceof JSONArray) {
return jsonAppAttempts.getJSONArray("appAttempt");
}
if (appAttempt instanceof JSONObject) {
JSONObject jsonAppAttempt = jsonAppAttempts.getJSONObject("appAttempt");
JSONArray jsonArray = new JSONArray();
jsonArray.put(jsonAppAttempt);
return jsonArray;
}
return null;
}
@Test
public void testAppAttemptsXML() throws Exception {
rm.start();
String user = "user1";
MockNM amNodeManager = rm.registerNode("127.0.0.1:1234", 2048);
MockRMAppSubmissionData data =
MockRMAppSubmissionData.Builder.createWithMemory(CONTAINER_MB, rm)
.withAppName("testwordcount")
.withUser(user)
.build();
RMApp app1 = MockRMAppSubmitter.submit(rm, data);
amNodeManager.nodeHeartbeat(true);
WebTarget r = target();
Response response = r.path("ws").path("v1").path("cluster")
.path("apps").path(app1.getApplicationId().toString())
.path("appattempts").request(MediaType.APPLICATION_XML)
.get(Response.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String xml = response.readEntity(String.class);
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("appAttempts");
assertEquals(1, nodes.getLength(), "incorrect number of elements");
NodeList attempt = dom.getElementsByTagName("appAttempt");
assertEquals(1, attempt.getLength(), "incorrect number of elements");
verifyAppAttemptsXML(attempt, app1.getCurrentAppAttempt(), user);
rm.stop();
}
private void verifyAppAttemptsXML(NodeList nodes, RMAppAttempt appAttempt,
String user) {
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyAppAttemptInfoGeneric(appAttempt,
WebServicesTestUtils.getXmlInt(element, "id"),
WebServicesTestUtils.getXmlLong(element, "startTime"),
WebServicesTestUtils.getXmlString(element, "containerId"),
WebServicesTestUtils.getXmlString(element, "nodeHttpAddress"),
WebServicesTestUtils.getXmlString(element, "nodeId"),
WebServicesTestUtils.getXmlString(element, "logsLink"), user,
WebServicesTestUtils.getXmlString(element, "exportPorts"),
WebServicesTestUtils.getXmlString(element, "appAttemptState"));
}
}
private void verifyAppAttemptsInfo(JSONObject info, RMAppAttempt appAttempt,
String user)
throws Exception {
assertEquals(12, info.length(), "incorrect number of elements");
verifyAppAttemptInfoGeneric(appAttempt, info.getInt("id"),
info.getLong("startTime"), info.getString("containerId"),
info.getString("nodeHttpAddress"), info.getString("nodeId"),
info.getString("logsLink"), user, info.getString("exportPorts"),
info.getString("appAttemptState"));
}
private void verifyAppAttemptInfoGeneric(RMAppAttempt appAttempt, int id,
long startTime, String containerId, String nodeHttpAddress, String
nodeId, String logsLink, String user, String exportPorts,
String appAttemptState) {
assertEquals(appAttempt.getAppAttemptId()
.getAttemptId(), id, "id doesn't match");
assertEquals(appAttempt.getStartTime(),
startTime, "startedTime doesn't match");
checkStringMatch("containerId", appAttempt
.getMasterContainer().getId().toString(), containerId);
checkStringMatch("nodeHttpAddress", appAttempt
.getMasterContainer().getNodeHttpAddress(), nodeHttpAddress);
checkStringMatch("nodeId", appAttempt
.getMasterContainer().getNodeId().toString(), nodeId);
assertTrue(logsLink.startsWith("http://"), "logsLink doesn't match ");
assertTrue(logsLink.endsWith("/" + user), "logsLink doesn't contain user info");
assertEquals(appAttemptState, appAttempt.getAppAttemptState().toString(),
"appAttemptState doesn't match");
}
}
| JerseyBinder |
java | spring-projects__spring-boot | module/spring-boot-jdbc-test/src/test/java/org/springframework/boot/jdbc/test/autoconfigure/TestDatabaseAutoConfigurationNoEmbeddedTests.java | {
"start": 1482,
"end": 2556
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withUserConfiguration(ExistingDataSourceConfiguration.class)
.withConfiguration(AutoConfigurations.of(TestDatabaseAutoConfiguration.class));
@Test
void applyAnyReplace() {
this.contextRunner.run((context) -> assertThat(context).getFailure()
.isInstanceOf(BeanCreationException.class)
.hasMessageContaining("Failed to replace DataSource with an embedded database for tests.")
.hasMessageContaining("If you want an embedded database please put a supported one on the classpath")
.hasMessageContaining("or tune the replace attribute of @AutoConfigureTestDatabase."));
}
@Test
void applyNoReplace() {
this.contextRunner.withPropertyValues("spring.test.database.replace=NONE").run((context) -> {
assertThat(context).hasSingleBean(DataSource.class);
assertThat(context).getBean(DataSource.class).isSameAs(context.getBean("myCustomDataSource"));
});
}
@Configuration(proxyBeanMethods = false)
static | TestDatabaseAutoConfigurationNoEmbeddedTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/dispatcher/cleanup/DefaultResourceCleaner.java | {
"start": 1666,
"end": 3437
} | class ____<T> implements ResourceCleaner {
private static final Logger LOG = LoggerFactory.getLogger(DefaultResourceCleaner.class);
private final ComponentMainThreadExecutor mainThreadExecutor;
private final Executor cleanupExecutor;
private final CleanupFn<T> cleanupFn;
private final Collection<CleanupWithLabel<T>> prioritizedCleanup;
private final Collection<CleanupWithLabel<T>> regularCleanup;
private final RetryStrategy retryStrategy;
public static Builder<LocallyCleanableResource> forLocallyCleanableResources(
ComponentMainThreadExecutor mainThreadExecutor,
Executor cleanupExecutor,
RetryStrategy retryStrategy) {
return forCleanableResources(
mainThreadExecutor,
cleanupExecutor,
LocallyCleanableResource::localCleanupAsync,
retryStrategy);
}
public static Builder<GloballyCleanableResource> forGloballyCleanableResources(
ComponentMainThreadExecutor mainThreadExecutor,
Executor cleanupExecutor,
RetryStrategy retryStrategy) {
return forCleanableResources(
mainThreadExecutor,
cleanupExecutor,
GloballyCleanableResource::globalCleanupAsync,
retryStrategy);
}
@VisibleForTesting
static <T> Builder<T> forCleanableResources(
ComponentMainThreadExecutor mainThreadExecutor,
Executor cleanupExecutor,
CleanupFn<T> cleanupFunction,
RetryStrategy retryStrategy) {
return new Builder<>(mainThreadExecutor, cleanupExecutor, cleanupFunction, retryStrategy);
}
@VisibleForTesting
@FunctionalInterface
| DefaultResourceCleaner |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/convert/support/GenericConversionServiceTests.java | {
"start": 34405,
"end": 34484
} | class ____<T> {
}
@SuppressWarnings("rawtypes")
private static | GenericBaseClass |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/files/Files_assertHasDigest_DigestString_Test.java | {
"start": 2343,
"end": 6560
} | class ____ extends FilesBaseTest {
private final MessageDigest digest = mock(MessageDigest.class);
private final String expected = "";
@Test
void should_fail_if_actual_is_null() {
// GIVEN
File actual = null;
// WHEN
var error = expectAssertionError(() -> underTest.assertHasDigest(INFO, actual, digest, expected));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_with_should_exist_error_if_actual_does_not_exist() {
// GIVEN
File actual = new File("xyz");
// WHEN
expectAssertionError(() -> underTest.assertHasDigest(INFO, actual, digest, expected));
// THEN
verify(failures).failure(INFO, shouldExist(actual));
}
@Test
void should_fail_if_actual_exists_but_is_not_file() {
// GIVEN
File actual = newFolder(tempDir.getAbsolutePath() + "/tmp");
// WHEN
expectAssertionError(() -> underTest.assertHasDigest(INFO, actual, digest, expected));
// THEN
verify(failures).failure(INFO, shouldBeFile(actual));
}
@DisabledOnOs(OS.WINDOWS)
@Test
void should_fail_if_actual_exists_but_is_not_readable() {
// GIVEN
File actual = newFile(tempDir.getAbsolutePath() + "/Test.java");
actual.setReadable(false);
// WHEN
expectAssertionError(() -> underTest.assertHasDigest(INFO, actual, digest, expected));
// THEN
verify(failures).failure(INFO, shouldBeReadable(actual));
}
@Test
void should_throw_error_if_digest_is_null() {
// GIVEN
MessageDigest digest = null;
// WHEN
NullPointerException npe = catchNullPointerException(() -> underTest.assertHasDigest(INFO, actual, digest, expected));
// THEN
then(npe).hasMessage("The message digest algorithm should not be null");
}
@Test
void should_throw_error_if_expected_is_null() {
// GIVEN
byte[] expected = null;
// WHEN
NullPointerException npe = catchNullPointerException(() -> underTest.assertHasDigest(INFO, actual, digest, expected));
// THEN
then(npe).hasMessage("The binary representation of digest to compare to should not be null");
}
@Test
void should_throw_error_wrapping_caught_IOException() throws IOException {
// GIVEN
File actual = newFile(tempDir.getAbsolutePath() + "/tmp.txt");
IOException cause = new IOException();
given(nioFilesWrapper.newInputStream(any())).willThrow(cause);
// WHEN
Throwable error = catchThrowableOfType(UncheckedIOException.class,
() -> underTest.assertHasDigest(INFO, actual, digest, expected));
// THEN
then(error).hasCause(cause);
}
@Test
void should_throw_error_wrapping_caught_NoSuchAlgorithmException() {
// GIVEN
String unknownDigestAlgorithm = "UnknownDigestAlgorithm";
// WHEN
Throwable error = catchThrowable(() -> underTest.assertHasDigest(INFO, actual, unknownDigestAlgorithm, expected));
// THEN
then(error).isInstanceOf(IllegalStateException.class)
.hasMessage("Unable to find digest implementation for: <UnknownDigestAlgorithm>");
}
@Test
void should_fail_if_actual_does_not_have_expected_digest() throws Exception {
// GIVEN
File actual = newFile(tempDir.getAbsolutePath() + "/tmp.txt");
writeByteArrayToFile(actual, "Bad Content".getBytes());
MessageDigest digest = MessageDigest.getInstance("MD5");
String expected = toHex(digest.digest("Content".getBytes()));
DigestDiff digestDiff = new DigestDiff(toHex(digest.digest(readAllBytes(actual.toPath()))), expected, digest);
// WHEN
expectAssertionError(() -> unMockedFiles.assertHasDigest(INFO, actual, digest, expected));
// THEN
verify(failures).failure(INFO, shouldHaveDigest(actual, digestDiff));
}
@Test
void should_pass_if_actual_has_expected_digest() throws Exception {
// GIVEN
File actual = newFile(tempDir.getAbsolutePath() + "/tmp.txt");
byte[] data = "Content".getBytes();
writeByteArrayToFile(actual, data);
MessageDigest digest = MessageDigest.getInstance("MD5");
String expected = toHex(digest.digest(data));
// WHEN/THEN
unMockedFiles.assertHasDigest(INFO, actual, digest, expected);
}
}
| Files_assertHasDigest_DigestString_Test |
java | quarkusio__quarkus | integration-tests/reactive-oracle-client/src/main/java/io/quarkus/it/reactive/oracle/client/FruitResource.java | {
"start": 395,
"end": 1738
} | class ____ {
@Inject
Pool client;
@PostConstruct
void setupDb() {
client.query("DROP TABLE fruits").execute()
.onFailure().recoverWithNull()
.flatMap(r -> client.query("CREATE TABLE fruits (id INT PRIMARY KEY, name VARCHAR(500) NOT NULL)")
.execute())
.flatMap(r -> client.query("INSERT INTO fruits (id, name) VALUES (1, 'Orange')").execute())
.flatMap(r -> client.query("INSERT INTO fruits (id, name) VALUES (2, 'Pear')").execute())
.flatMap(r -> client.query("INSERT INTO fruits (id, name) VALUES (3, 'Apple')").execute())
.await().indefinitely();
}
@GET
public CompletionStage<JsonArray> listFruits() {
return client.query("SELECT * FROM fruits").execute()
.map(pgRowSet -> {
JsonArray jsonArray = new JsonArray();
for (Row row : pgRowSet) {
jsonArray.add(toJson(row));
}
return jsonArray;
})
.subscribeAsCompletionStage();
}
private JsonObject toJson(Row row) {
return new JsonObject()
.put("id", row.getLong("id"))
.put("name", row.getString("name"));
}
}
| FruitResource |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/util/SerializationHelper.java | {
"start": 5218,
"end": 6902
} | class ____ used.
* <p>
* The stream will be closed once the object is read. This
* avoids the need for a finally clause, and maybe also for
* exception handling, in the application code.
* <p>
* The stream passed in is not buffered internally within this
* method. This is the responsibility of the caller, if desired.
*
* @param inputStream the serialized object input stream, must not be null
* @param loader The classloader to use
*
* @return the deserialized object
*
* @throws IllegalArgumentException if <code>inputStream</code> is <code>null</code>
* @throws SerializationException (runtime) if the serialization fails
*/
public static Object deserialize(InputStream inputStream, ClassLoader loader) throws SerializationException {
return doDeserialize( inputStream, loader, defaultClassLoader(), hibernateClassLoader() );
}
public static <T> T doDeserialize(
InputStream inputStream,
ClassLoader loader,
ClassLoader fallbackLoader1,
ClassLoader fallbackLoader2) throws SerializationException {
if ( inputStream == null ) {
throw new IllegalArgumentException( "The InputStream must not be null" );
}
CORE_LOGGER.trace( "Starting deserialization of object" );
try ( var in = new CustomObjectInputStream( inputStream, loader, fallbackLoader1, fallbackLoader2 ) ) {
//noinspection unchecked
return (T) in.readObject();
}
catch (ClassNotFoundException | IOException e) {
throw new SerializationException( "could not deserialize", e );
}
}
/**
* Deserializes an object from an array of bytes using the
* Thread Context ClassLoader (TCCL). If there is no TCCL set,
* the classloader of the calling | is |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/fixed/link/BindySimpleFixedLengthWithLinkTest.java | {
"start": 5499,
"end": 5762
} | class ____ {
@DataField(pos = 4, length = 3)
private String fieldB;
public String getFieldB() {
return fieldB;
}
public void setFieldB(String fieldB) {
this.fieldB = fieldB;
}
}
}
| SubRec |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/checkpoints/CheckpointStatisticDetailsHandler.java | {
"start": 2445,
"end": 4797
} | class ____
extends AbstractCheckpointHandler<CheckpointStatistics, CheckpointMessageParameters>
implements OnlyExecutionGraphJsonArchivist {
public CheckpointStatisticDetailsHandler(
GatewayRetriever<? extends RestfulGateway> leaderRetriever,
Duration timeout,
Map<String, String> responseHeaders,
MessageHeaders<EmptyRequestBody, CheckpointStatistics, CheckpointMessageParameters>
messageHeaders,
Executor executor,
Cache<JobID, CompletableFuture<CheckpointStatsSnapshot>> checkpointStatsSnapshotCache,
CheckpointStatsCache checkpointStatsCache) {
super(
leaderRetriever,
timeout,
responseHeaders,
messageHeaders,
executor,
checkpointStatsSnapshotCache,
checkpointStatsCache);
}
@Override
protected CheckpointStatistics handleCheckpointRequest(
HandlerRequest<EmptyRequestBody> ignored, AbstractCheckpointStats checkpointStats) {
return CheckpointStatistics.generateCheckpointStatistics(checkpointStats, true);
}
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph)
throws IOException {
CheckpointStatsSnapshot stats = graph.getCheckpointStatsSnapshot();
if (stats == null) {
return Collections.emptyList();
}
CheckpointStatsHistory history = stats.getHistory();
List<ArchivedJson> archive = new ArrayList<>(history.getCheckpoints().size());
for (AbstractCheckpointStats checkpoint : history.getCheckpoints()) {
ResponseBody json = CheckpointStatistics.generateCheckpointStatistics(checkpoint, true);
String path =
getMessageHeaders()
.getTargetRestEndpointURL()
.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
.replace(
':' + CheckpointIdPathParameter.KEY,
String.valueOf(checkpoint.getCheckpointId()));
archive.add(new ArchivedJson(path, json));
}
return archive;
}
}
| CheckpointStatisticDetailsHandler |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/logging/LoggingApplicationListenerIntegrationTests.java | {
"start": 3830,
"end": 4123
} | class ____ {
private final LoggingSystem loggingSystem;
private final @Nullable LogFile logFile;
SampleService(LoggingSystem loggingSystem, ObjectProvider<LogFile> logFile) {
this.loggingSystem = loggingSystem;
this.logFile = logFile.getIfAvailable();
}
}
static | SampleService |
java | apache__kafka | metadata/src/main/java/org/apache/kafka/metadata/authorizer/StandardAuthorizer.java | {
"start": 8173,
"end": 10086
} | class ____ {
private final Sensor authorizationAllowedSensor;
private final Sensor authorizationDeniedSensor;
private final Sensor authorizationRequestSensor;
private AuthorizerMetrics(PluginMetrics metrics) {
authorizationAllowedSensor = metrics.addSensor("authorizer-authorization-allowed");
authorizationAllowedSensor.add(
metrics.metricName("authorization-allowed-rate-per-minute", "The number of authorization allowed per minute", new LinkedHashMap<>()),
new Rate(TimeUnit.MINUTES, new WindowedCount()));
authorizationDeniedSensor = metrics.addSensor("authorizer-authorization-denied");
authorizationDeniedSensor.add(
metrics.metricName("authorization-denied-rate-per-minute", "The number of authorization denied per minute", new LinkedHashMap<>()),
new Rate(TimeUnit.MINUTES, new WindowedCount()));
authorizationRequestSensor = metrics.addSensor("authorizer-authorization-request");
authorizationRequestSensor.add(
metrics.metricName("authorization-request-rate-per-minute", "The number of authorization request per minute", new LinkedHashMap<>()),
new Rate(TimeUnit.MINUTES, new WindowedCount()));
metrics.addMetric(
metrics.metricName("acls-total-count", "The number of acls defined", new LinkedHashMap<>()),
(Gauge<Integer>) (config, now) -> aclCount());
}
private void recordAuthorizerMetrics(AuthorizationResult authorizationResult) {
if (authorizationResult == ALLOWED) {
authorizationAllowedSensor.record();
} else {
authorizationDeniedSensor.record();
}
authorizationRequestSensor.record();
}
}
}
| AuthorizerMetrics |
java | apache__camel | components/camel-stitch/src/main/java/org/apache/camel/component/stitch/StitchConstants.java | {
"start": 899,
"end": 3007
} | class ____ {
private static final String HEADER_PREFIX = "CamelStitch";
// headers evaluated by producer
@Metadata(label = "producer", description = "The name of the destination table the data is being pushed to. " +
"Table names must be unique in each destination schema, or loading issues will occur. "
+
"Note: The number of characters in the table name should be within the destinations allowed limits or data will rejected.",
javaType = "String")
public static final String TABLE_NAME = HEADER_PREFIX + "TableName";
@Metadata(label = "producer", description = "The schema that describes the Stitch message",
javaType = "StitchSchema or Map")
public static final String SCHEMA = HEADER_PREFIX + "Schema";
@Metadata(label = "producer",
description = "A collection of strings representing the Primary Key fields in the source table. " +
"Stitch use these Primary Keys to de-dupe data during loading If not provided, the table will be loaded in an append-only manner.",
javaType = "Collection<String>")
public static final String KEY_NAMES = HEADER_PREFIX + "KeyNames";
// headers set by producer
@Metadata(label = "producer", description = "HTTP Status code that is returned from Stitch Import HTTP API.",
javaType = "Integer")
public static final String CODE = HEADER_PREFIX + "Code";
@Metadata(label = "producer", description = "HTTP headers that are returned from Stitch Import HTTP API.",
javaType = "Map<String, Object>")
public static final String HEADERS = HEADER_PREFIX + "Headers";
@Metadata(label = "producer",
description = "The status message that Stitch returns after sending the data through Stitch Import API.",
javaType = "String")
public static final String STATUS = HEADER_PREFIX + "Status";
private StitchConstants() {
}
}
| StitchConstants |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/ScanCursor.java | {
"start": 163,
"end": 1714
} | class ____ {
/**
* Finished cursor.
*/
public static final ScanCursor FINISHED = new ImmutableScanCursor("0", true);
/**
* Initial cursor.
*/
public static final ScanCursor INITIAL = new ImmutableScanCursor("0", false);
private String cursor;
private boolean finished;
/**
* Creates a new {@link ScanCursor}.
*/
public ScanCursor() {
}
/**
* Creates a new {@link ScanCursor}.
*
* @param cursor
* @param finished
*/
public ScanCursor(String cursor, boolean finished) {
this.cursor = cursor;
this.finished = finished;
}
/**
*
* @return cursor id
*/
public String getCursor() {
return cursor;
}
/**
* Set the cursor
*
* @param cursor the cursor id
*/
public void setCursor(String cursor) {
LettuceAssert.notEmpty(cursor, "Cursor must not be empty");
this.cursor = cursor;
}
/**
*
* @return true if the scan operation of this cursor is finished.
*/
public boolean isFinished() {
return finished;
}
public void setFinished(boolean finished) {
this.finished = finished;
}
/**
* Creates a Scan-Cursor reference.
*
* @param cursor the cursor id
* @return ScanCursor
*/
public static ScanCursor of(String cursor) {
ScanCursor scanCursor = new ScanCursor();
scanCursor.setCursor(cursor);
return scanCursor;
}
private static | ScanCursor |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/support/DirtiesContextTestExecutionListenerTests.java | {
"start": 17181,
"end": 17284
} | class ____ {
void test() {
}
}
@MetaDirtyAfterClass
static | DirtiesContextDeclaredLocallyAfterClass |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/BrowseEndpointBuilderFactory.java | {
"start": 9998,
"end": 10350
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedBrowseEndpointProducerBuilder advanced() {
return (AdvancedBrowseEndpointProducerBuilder) this;
}
}
/**
* Advanced builder for endpoint producers for the Browse component.
*/
public | BrowseEndpointProducerBuilder |
java | spring-projects__spring-boot | module/spring-boot-webmvc-test/src/test/java/org/springframework/boot/webmvc/test/autoconfigure/mockmvc/MockMvcTesterSpringBootTestIntegrationTests.java | {
"start": 2177,
"end": 3567
} | class ____ {
@MockitoBean
private ExampleMockableService service;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private MockMvcTester mvc;
@Test
void shouldFindController1(CapturedOutput output) {
assertThat(this.mvc.get().uri("/one")).hasStatusOk().hasBodyTextEqualTo("one");
assertThat(output).contains("Request URI = /one");
}
@Test
void shouldFindController2() {
assertThat(this.mvc.get().uri("/two")).hasStatusOk().hasBodyTextEqualTo("hellotwo");
}
@Test
void shouldFindControllerAdvice() {
assertThat(this.mvc.get().uri("/error")).hasStatusOk().hasBodyTextEqualTo("recovered");
}
@Test
void shouldHaveRealService() {
assertThat(this.applicationContext.getBean(ExampleRealService.class)).isNotNull();
}
@Test
void shouldTestWithRestTestClient(@Autowired RestTestClient restTestClient) {
ResponseSpec spec = restTestClient.get().uri("/one").exchange();
assertThat(RestTestClientResponse.from(spec)).hasStatusOk().bodyText().isEqualTo("one");
}
@Test
void shouldNotFailIfFormattingValueThrowsException(CapturedOutput output) {
assertThat(this.mvc.get().uri("/formatting")).hasStatusOk().hasBodyTextEqualTo("formatting");
assertThat(output).contains(
"Session Attrs = << Exception 'java.lang.IllegalStateException: Formatting failed' occurred while formatting >>");
}
}
| MockMvcTesterSpringBootTestIntegrationTests |
java | apache__camel | components/camel-cxf/camel-cxf-soap/src/test/java/org/apache/camel/component/cxf/jaxws/CxfConsumerPayloadXPathTest.java | {
"start": 3570,
"end": 3942
} | class ____ extends BaseRouteBuilder {
@Override
public void configure() {
from("cxf://" + testAddress + "?dataFormat=PAYLOAD")
.streamCaching()
.process(new XPathStringResultProcessor())
.process(new ResponseProcessor());
}
}
private | TestRouteWithXPathStringResultBuilder |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/util/subpackage/PersonEntity.java | {
"start": 911,
"end": 2655
} | class ____ extends PersistentEntity implements Person {
protected String name;
private int age;
String eyeColor;
boolean likesPets = false;
private Number favoriteNumber;
private String puzzle;
private String privateEye;
@Override
public String getName() {
return this.name;
}
@SuppressWarnings("unused")
private void setName(String name) {
this.name = name;
}
@Override
public int getAge() {
return this.age;
}
protected void setAge(int age) {
this.age = age;
}
@Override
public String getEyeColor() {
return this.eyeColor;
}
void setEyeColor(String eyeColor) {
this.eyeColor = eyeColor;
}
@Override
public boolean likesPets() {
return this.likesPets;
}
protected void setLikesPets(boolean likesPets) {
this.likesPets = likesPets;
}
@Override
public Number getFavoriteNumber() {
return this.favoriteNumber;
}
@SuppressWarnings("unused")
private void setFavoriteNumber(Number favoriteNumber) {
this.favoriteNumber = favoriteNumber;
}
public String getPuzzle() {
return this.puzzle;
}
public final void setPuzzle(String puzzle) {
this.puzzle = puzzle;
}
@SuppressWarnings("unused")
private String getPrivateEye() {
return this.privateEye;
}
public void setPrivateEye(String privateEye) {
this.privateEye = privateEye;
}
@Override
public String toString() {
// @formatter:off
return new ToStringCreator(this)
.append("id", getId())
.append("name", this.name)
.append("age", this.age)
.append("eyeColor", this.eyeColor)
.append("likesPets", this.likesPets)
.append("favoriteNumber", this.favoriteNumber)
.append("puzzle", this.puzzle)
.append("privateEye", this.privateEye)
.toString();
// @formatter:on
}
}
| PersonEntity |
java | micronaut-projects__micronaut-core | inject-java/src/main/java/io/micronaut/annotation/processing/visitor/AbstractJavaElement.java | {
"start": 2871,
"end": 3006
} | class ____ other elements to extend from.
*
* @author James Kleeh
* @author graemerocher
* @since 1.0
*/
@Internal
public abstract | for |
java | elastic__elasticsearch | x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java | {
"start": 3734,
"end": 3782
} | class ____ autoscaling functionality.
*/
public | for |
java | apache__flink | flink-dstl/flink-dstl-dfs/src/main/java/org/apache/flink/changelog/fs/OutputStreamWithPos.java | {
"start": 1331,
"end": 3625
} | class ____ extends OutputStream {
protected final Path path;
protected OutputStream outputStream;
protected long pos;
protected boolean compression;
protected final OutputStream originalStream;
public OutputStreamWithPos(OutputStream outputStream, Path path) {
this.outputStream = Preconditions.checkNotNull(outputStream);
this.originalStream = Preconditions.checkNotNull(outputStream);
this.path = Preconditions.checkNotNull(path);
this.pos = 0;
this.compression = false;
}
protected OutputStream wrapInternal(boolean compression, int bufferSize, OutputStream fsStream)
throws IOException {
fsStream.write(compression ? 1 : 0);
StreamCompressionDecorator instance =
compression
? SnappyStreamCompressionDecorator.INSTANCE
: UncompressedStreamCompressionDecorator.INSTANCE;
return new BufferedOutputStream(instance.decorateWithCompression(fsStream), bufferSize);
}
public void wrap(boolean compression, int bufferSize) throws IOException {
this.compression = compression;
this.outputStream = wrapInternal(compression, bufferSize, this.originalStream);
}
@Override
public void write(int b) throws IOException {
outputStream.write(b);
pos++;
}
@Override
public void write(byte[] b) throws IOException {
outputStream.write(b);
pos += b.length;
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
outputStream.write(b, off, len);
pos += len;
}
@Override
public void flush() throws IOException {
outputStream.flush();
}
@Override
public void close() throws IOException {
try {
outputStream.close();
originalStream.close();
} catch (IOException e) {
getPath().getFileSystem().delete(getPath(), true);
}
}
public long getPos() {
return pos;
}
public Path getPath() {
return path;
}
public StreamStateHandle getHandle(BiFunction<Path, Long, StreamStateHandle> handleFactory) {
return handleFactory.apply(path, this.pos);
}
}
| OutputStreamWithPos |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/jdk8/ContextualOptionalTest.java | {
"start": 660,
"end": 1970
} | class ____
{
public Optional<Date> date;
@JsonFormat(shape=JsonFormat.Shape.STRING, pattern="yyyy+MM+dd")
public Optional<Date> date1;
@JsonFormat(shape=JsonFormat.Shape.STRING, pattern="yyyy*MM*dd")
public Optional<Date> date2;
}
/*
/**********************************************************
/* Test methods
/**********************************************************
*/
@Test
public void testContextualOptionals() throws Exception
{
SimpleDateFormat df = new SimpleDateFormat("yyyy/MM/dd");
df.setTimeZone(TimeZone.getTimeZone("UTC"));
ObjectMapper mapper = jsonMapperBuilder()
.disable(JsonWriteFeature.ESCAPE_FORWARD_SLASHES)
.defaultDateFormat(df)
.build();
ContextualOptionals input = new ContextualOptionals();
input.date = Optional.ofNullable(new Date(0L));
input.date1 = Optional.ofNullable(new Date(0L));
input.date2 = Optional.ofNullable(new Date(0L));
final String json = mapper.writeValueAsString(input);
//System.err.println("JSON:\n"+json);
assertEquals(a2q(
"{'date':'1970/01/01','date1':'1970+01+01','date2':'1970*01*01'}"),
json);
}
}
| ContextualOptionals |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/symbol/IRDecorations.java | {
"start": 1284,
"end": 1320
} | class ____ {
/** base | IRDecorations |
java | apache__dubbo | dubbo-registry/dubbo-registry-api/src/main/java/org/apache/dubbo/registry/client/migration/MigrationRuleHandler.java | {
"start": 1462,
"end": 6412
} | class ____<T> {
public static final String DUBBO_SERVICEDISCOVERY_MIGRATION = "dubbo.application.migration.step";
private static final ErrorTypeAwareLogger logger =
LoggerFactory.getErrorTypeAwareLogger(MigrationRuleHandler.class);
private final MigrationClusterInvoker<T> migrationInvoker;
private volatile MigrationStep currentStep;
private volatile Float currentThreshold = 0f;
private final URL consumerURL;
private final ReentrantLock lock = new ReentrantLock();
public MigrationRuleHandler(MigrationClusterInvoker<T> invoker, URL url) {
this.migrationInvoker = invoker;
this.consumerURL = url;
}
public void doMigrate(MigrationRule rule) {
lock.lock();
try {
if (migrationInvoker instanceof ServiceDiscoveryMigrationInvoker) {
refreshInvoker(MigrationStep.FORCE_APPLICATION, 1.0f, rule);
return;
}
// initial step : APPLICATION_FIRST
MigrationStep step = MigrationStep.APPLICATION_FIRST;
float threshold = -1f;
try {
step = rule.getStep(consumerURL);
threshold = rule.getThreshold(consumerURL);
} catch (Exception e) {
logger.error(
REGISTRY_NO_PARAMETERS_URL,
"",
"",
"Failed to get step and threshold info from rule: " + rule,
e);
}
if (refreshInvoker(step, threshold, rule)) {
// refresh success, update rule
setMigrationRule(rule);
}
} finally {
lock.unlock();
}
}
private boolean refreshInvoker(MigrationStep step, Float threshold, MigrationRule newRule) {
if (step == null || threshold == null) {
throw new IllegalStateException("Step or threshold of migration rule cannot be null");
}
MigrationStep originStep = currentStep;
if ((currentStep == null || currentStep != step) || !currentThreshold.equals(threshold)) {
boolean success = true;
switch (step) {
case APPLICATION_FIRST:
migrationInvoker.migrateToApplicationFirstInvoker(newRule);
break;
case FORCE_APPLICATION:
success = migrationInvoker.migrateToForceApplicationInvoker(newRule);
break;
case FORCE_INTERFACE:
default:
success = migrationInvoker.migrateToForceInterfaceInvoker(newRule);
}
if (success) {
setCurrentStepAndThreshold(step, threshold);
logger.info(
"Succeed Migrated to " + step + " mode. Service Name: " + consumerURL.getDisplayServiceKey());
report(step, originStep, "true");
} else {
// migrate failed, do not save new step and rule
logger.warn(
INTERNAL_ERROR,
"unknown error in registry module",
"",
"Migrate to " + step + " mode failed. Probably not satisfy the threshold you set " + threshold
+ ". Please try re-publish configuration if you still after check.");
report(step, originStep, "false");
}
return success;
}
// ignore if step is same with previous, will continue override rule for MigrationInvoker
return true;
}
private void report(MigrationStep step, MigrationStep originStep, String success) {
FrameworkStatusReportService reportService =
consumerURL.getOrDefaultApplicationModel().getBeanFactory().getBean(FrameworkStatusReportService.class);
if (reportService.hasReporter()) {
reportService.reportMigrationStepStatus(reportService.createMigrationStepReport(
consumerURL.getServiceInterface(),
consumerURL.getVersion(),
consumerURL.getGroup(),
String.valueOf(originStep),
String.valueOf(step),
success));
}
}
private void setMigrationRule(MigrationRule rule) {
this.migrationInvoker.setMigrationRule(rule);
}
private void setCurrentStepAndThreshold(MigrationStep currentStep, Float currentThreshold) {
if (currentThreshold != null) {
this.currentThreshold = currentThreshold;
}
if (currentStep != null) {
this.currentStep = currentStep;
this.migrationInvoker.setMigrationStep(currentStep);
}
}
// for test purpose
public MigrationStep getMigrationStep() {
return currentStep;
}
}
| MigrationRuleHandler |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/CounterGroupFactory.java | {
"start": 2988,
"end": 6106
} | enum ____
* @return a new framework group factory
*/
protected abstract <T extends Enum<T>>
FrameworkGroupFactory<G> newFrameworkGroupFactory(Class<T> cls);
/**
* Create a new counter group
* @param name of the group
* @param limits the counters limits policy object
* @return a new counter group
*/
public G newGroup(String name, Limits limits) {
return newGroup(name, ResourceBundles.getCounterGroupName(name, name),
limits);
}
/**
* Create a new counter group
* @param name of the group
* @param displayName of the group
* @param limits the counters limits policy object
* @return a new counter group
*/
public G newGroup(String name, String displayName, Limits limits) {
FrameworkGroupFactory<G> gf = fmap.get(name);
if (gf != null) return gf.newGroup(name);
if (name.equals(FS_GROUP_NAME)) {
return newFileSystemGroup();
} else if (s2i.get(name) != null) {
return newFrameworkGroup(s2i.get(name));
}
return newGenericGroup(name, displayName, limits);
}
/**
* Create a new framework group
* @param id of the group
* @return a new framework group
*/
public G newFrameworkGroup(int id) {
String name;
synchronized(CounterGroupFactory.class) {
if (id < 0 || id >= i2s.size()) throwBadFrameGroupIdException(id);
name = i2s.get(id); // should not throw here.
}
FrameworkGroupFactory<G> gf = fmap.get(name);
if (gf == null) throwBadFrameGroupIdException(id);
return gf.newGroup(name);
}
/**
* Get the id of a framework group
* @param name of the group
* @return the framework group id
*/
public static synchronized int getFrameworkGroupId(String name) {
Integer i = s2i.get(name);
if (i == null) throwBadFrameworkGroupNameException(name);
return i;
}
/**
* @return the counter factory version
*/
public int version() {
return VERSION;
}
/**
* Check whether a group name is a name of a framework group (including
* the filesystem group).
*
* @param name to check
* @return true for framework group names
*/
public static synchronized boolean isFrameworkGroup(String name) {
return s2i.get(name) != null || name.equals(FS_GROUP_NAME);
}
private static void throwBadFrameGroupIdException(int id) {
throw new IllegalArgumentException("bad framework group id: "+ id);
}
private static void throwBadFrameworkGroupNameException(String name) {
throw new IllegalArgumentException("bad framework group name: "+ name);
}
/**
* Abstract factory method to create a generic (vs framework) counter group
* @param name of the group
* @param displayName of the group
* @param limits limits of the counters
* @return a new generic counter group
*/
protected abstract G newGenericGroup(String name, String displayName,
Limits limits);
/**
* Abstract factory method to create a file system counter group
* @return a new file system counter group
*/
protected abstract G newFileSystemGroup();
}
| class |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageActionTests.java | {
"start": 3167,
"end": 18904
} | class ____ extends ESTestCase {
private static ThreadPool threadPool;
@Before
public void setUpThreadPool() throws Exception {
threadPool = new TestThreadPool("test");
}
@After
public void shutdownThreadPool() throws Exception {
TestThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
}
public void testSimpleLimitRequests() throws Exception {
DiscoveryNodes nodes = newNodes(between(1, 16));
int numberOfShards = randomIntBetween(1, 100);
Map<DiscoveryNode, Queue<ShardRouting>> nodeToShards = new HashMap<>();
Map<ShardId, List<ShardRouting>> groupShardRoutings = new HashMap<>();
for (int i = 0; i < numberOfShards; i++) {
ShardId shardId = new ShardId("test_index", "n/a", i);
DiscoveryNode node = randomFrom(nodes.getAllNodes());
ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, node.getId(), randomBoolean(), ShardRoutingState.STARTED);
groupShardRoutings.put(shardId, List.of(shardRouting));
nodeToShards.computeIfAbsent(node, k -> new LinkedList<>()).add(shardRouting);
}
TestTransportService transportService = new TestTransportService(threadPool, r -> {});
ClusterService clusterService = mockClusterService(ClusterState.builder(ClusterState.EMPTY_STATE).nodes(nodes).build());
TransportAnalyzeIndexDiskUsageAction transportAction = createTransportAction(clusterService, transportService, groupShardRoutings);
int maxConcurrentRequests = randomIntBetween(1, 5);
PlainActionFuture<AnalyzeIndexDiskUsageResponse> future = new PlainActionFuture<>();
Task task = new Task(randomLong(), "transport", "action", "", null, emptyMap());
TransportAnalyzeIndexDiskUsageAction.LimitingRequestPerNodeBroadcastAction broadcastAction =
transportAction.new LimitingRequestPerNodeBroadcastAction(task, randomDiskUsageRequest(), future, maxConcurrentRequests);
broadcastAction.start();
Map<DiscoveryNode, Integer> expectedRequestCounts = new HashMap<>();
for (Map.Entry<DiscoveryNode, Queue<ShardRouting>> e : nodeToShards.entrySet()) {
Queue<ShardRouting> shards = e.getValue();
int sentRequests = Math.min(shards.size(), maxConcurrentRequests);
expectedRequestCounts.put(e.getKey(), sentRequests);
for (int i = 0; i < sentRequests; i++) {
shards.remove();
}
}
assertThat(transportService.getRequestsSentPerNode(), equalTo(expectedRequestCounts));
expectedRequestCounts.clear();
final AtomicLong totalIndexSizeInBytes = new AtomicLong();
final List<CapturingRequest> pendingRequests = new ArrayList<>(transportService.getCapturedRequests(true));
while (pendingRequests.isEmpty() == false) {
expectedRequestCounts.clear();
List<CapturingRequest> toReply = randomSubsetOf(pendingRequests);
for (CapturingRequest r : toReply) {
long shardSize = between(1, Integer.MAX_VALUE);
totalIndexSizeInBytes.addAndGet(shardSize);
r.sendRandomResponse(shardSize, randomBoolean());
pendingRequests.remove(r);
if (nodeToShards.get(r.node).poll() != null) {
expectedRequestCounts.compute(r.node, (k, v) -> v == null ? 1 : v + 1);
}
}
assertBusy(() -> assertThat(transportService.getRequestsSentPerNode(), equalTo(expectedRequestCounts)));
pendingRequests.addAll(transportService.getCapturedRequests(true));
}
AnalyzeIndexDiskUsageResponse response = future.actionGet();
assertThat(response.getTotalShards(), equalTo(numberOfShards));
assertThat(response.getFailedShards(), equalTo(0));
assertThat(response.getSuccessfulShards(), equalTo(numberOfShards));
assertThat(response.getStats().get("test_index").getIndexSizeInBytes(), equalTo(totalIndexSizeInBytes.get()));
}
public void testRandomLimitConcurrentRequests() throws Exception {
DiscoveryNodes nodes = newNodes(between(1, 20));
int numberOfShards = randomIntBetween(1, 1000);
Map<ShardId, List<ShardRouting>> shardToRoutings = new HashMap<>();
for (int i = 0; i < numberOfShards; i++) {
ShardId shardId = new ShardId("test_index", "n/a", i);
List<ShardRouting> shardRoutings = randomSubsetOf(between(1, nodes.size()), nodes.getAllNodes()).stream()
.map(node -> TestShardRouting.newShardRouting(shardId, node.getId(), randomBoolean(), ShardRoutingState.STARTED))
.toList();
shardToRoutings.put(shardId, shardRoutings);
}
Set<ShardId> failedShards = new HashSet<>(randomSubsetOf(between(0, (numberOfShards + 4) / 5), shardToRoutings.keySet()));
int maxConcurrentRequests = randomIntBetween(1, 16);
PlainActionFuture<AnalyzeIndexDiskUsageResponse> requestFuture = new PlainActionFuture<>();
Queue<CapturingRequest> pendingRequests = ConcurrentCollections.newQueue();
Semaphore availableRequests = new Semaphore(0);
AtomicBoolean stopped = new AtomicBoolean();
TestTransportService transportService = new TestTransportService(threadPool, r -> {
pendingRequests.add(r);
availableRequests.release();
});
final AtomicLong totalIndexSize = new AtomicLong();
final Thread handlingThread = new Thread(() -> {
Map<ShardId, Integer> shardIdToRounds = ConcurrentCollections.newConcurrentMap();
while (stopped.get() == false && requestFuture.isDone() == false) {
try {
if (availableRequests.tryAcquire(10, TimeUnit.MILLISECONDS) == false) {
continue;
}
if (randomBoolean()) {
// make sure we never have more max_concurrent_requests outstanding requests on each node
Map<DiscoveryNode, Integer> perNode = new HashMap<>();
for (CapturingRequest r : pendingRequests) {
int count = perNode.compute(r.node, (k, v) -> v == null ? 1 : v + 1);
assertThat(count, lessThanOrEqualTo(maxConcurrentRequests));
}
}
final List<CapturingRequest> readyRequests = randomSubsetOf(between(1, pendingRequests.size()), pendingRequests);
pendingRequests.removeAll(readyRequests);
availableRequests.acquireUninterruptibly(readyRequests.size() - 1);
for (CapturingRequest r : readyRequests) {
ShardId shardId = r.request.shardId();
int round = shardIdToRounds.compute(shardId, (k, curr) -> curr == null ? 1 : curr + 1);
int maxRound = shardToRoutings.get(shardId).size();
if (failedShards.contains(shardId) || (round < maxRound && randomBoolean())) {
r.sendRandomFailure(randomBoolean());
} else {
long shardSize = between(1, Integer.MAX_VALUE);
totalIndexSize.addAndGet(shardSize);
r.sendRandomResponse(shardSize, randomBoolean());
}
}
} catch (InterruptedException e) {
throw new AssertionError(e);
}
}
});
handlingThread.start();
ClusterService clusterService = mockClusterService(ClusterState.builder(ClusterState.EMPTY_STATE).nodes(nodes).build());
TransportAnalyzeIndexDiskUsageAction transportAction = createTransportAction(clusterService, transportService, shardToRoutings);
Task task = new Task(randomLong(), "transport", "action", "", null, emptyMap());
TransportAnalyzeIndexDiskUsageAction.LimitingRequestPerNodeBroadcastAction broadcastAction =
transportAction.new LimitingRequestPerNodeBroadcastAction(task, randomDiskUsageRequest(), requestFuture, maxConcurrentRequests);
broadcastAction.start();
try {
AnalyzeIndexDiskUsageResponse response = requestFuture.actionGet(TimeValue.timeValueSeconds(30));
assertThat(response.getTotalShards(), equalTo(numberOfShards));
assertThat(response.getFailedShards(), equalTo(failedShards.size()));
assertThat(response.getSuccessfulShards(), equalTo(numberOfShards - failedShards.size()));
if (numberOfShards == failedShards.size()) {
assertTrue(response.getStats().isEmpty());
assertThat(totalIndexSize.get(), equalTo(0L));
} else {
assertThat(response.getStats().get("test_index").getIndexSizeInBytes(), equalTo(totalIndexSize.get()));
}
} finally {
stopped.set(true);
handlingThread.join();
}
}
/**
* Make sure that we don't hit StackOverflow if responses are replied on the same thread.
*/
public void testManyShards() {
DiscoveryNodes discoNodes = newNodes(10);
int numberOfShards = randomIntBetween(200, 10000);
Map<ShardId, List<ShardRouting>> shardToRoutings = new HashMap<>();
for (int i = 0; i < numberOfShards; i++) {
ShardId shardId = new ShardId("test_index", "n/a", i);
List<ShardRouting> shardRoutings = randomSubsetOf(between(1, discoNodes.size()), discoNodes.getAllNodes()).stream()
.map(node -> TestShardRouting.newShardRouting(shardId, node.getId(), randomBoolean(), ShardRoutingState.STARTED))
.toList();
shardToRoutings.put(shardId, shardRoutings);
}
Set<ShardId> successfulShards = new HashSet<>(randomSubsetOf(between(0, (numberOfShards + 4) / 5), shardToRoutings.keySet()));
final AtomicLong totalIndexSize = new AtomicLong();
boolean maybeFork = randomBoolean();
Map<ShardId, Integer> shardIdToRounds = ConcurrentCollections.newConcurrentMap();
TestTransportService transportService = new TestTransportService(threadPool, r -> {
ShardId shardId = r.request.shardId();
int round = shardIdToRounds.compute(shardId, (k, curr) -> curr == null ? 1 : curr + 1);
int maxRound = shardToRoutings.get(shardId).size();
if (successfulShards.contains(shardId) == false || (round < maxRound && randomBoolean())) {
r.sendRandomFailure(maybeFork);
} else {
long shardSize = between(0, Integer.MAX_VALUE);
totalIndexSize.addAndGet(shardSize);
r.sendRandomResponse(shardSize, maybeFork);
}
});
ClusterService clusterService = mockClusterService(ClusterState.builder(ClusterState.EMPTY_STATE).nodes(discoNodes).build());
TransportAnalyzeIndexDiskUsageAction transportAction = createTransportAction(clusterService, transportService, shardToRoutings);
int maxConcurrentRequests = randomIntBetween(1, 16);
PlainActionFuture<AnalyzeIndexDiskUsageResponse> future = new PlainActionFuture<>();
Task task = new Task(randomLong(), "transport", "action", "", null, emptyMap());
TransportAnalyzeIndexDiskUsageAction.LimitingRequestPerNodeBroadcastAction broadcastAction =
transportAction.new LimitingRequestPerNodeBroadcastAction(task, randomDiskUsageRequest(), future, maxConcurrentRequests);
broadcastAction.start();
AnalyzeIndexDiskUsageResponse resp = future.actionGet();
assertThat(resp.getTotalShards(), equalTo(numberOfShards));
assertThat(resp.getSuccessfulShards(), equalTo(successfulShards.size()));
if (successfulShards.isEmpty()) {
assertTrue(resp.getStats().isEmpty());
} else {
assertThat(resp.getStats().get("test_index").getIndexSizeInBytes(), equalTo(totalIndexSize.get()));
}
}
private static DiscoveryNodes newNodes(int numNodes) {
DiscoveryNodes.Builder nodes = DiscoveryNodes.builder();
for (int i = 0; i < numNodes; i++) {
nodes.add(DiscoveryNodeUtils.builder("node_" + i).roles(emptySet()).build());
}
return nodes.localNodeId("node_0").build();
}
private static AnalyzeIndexDiskUsageRequest randomDiskUsageRequest(String... indices) {
return new AnalyzeIndexDiskUsageRequest(indices, BroadcastRequest.DEFAULT_INDICES_OPTIONS, randomBoolean());
}
private TransportAnalyzeIndexDiskUsageAction createTransportAction(
ClusterService clusterService,
TransportService transportService,
Map<ShardId, List<ShardRouting>> targetShards
) {
return new TransportAnalyzeIndexDiskUsageAction(
clusterService,
transportService,
mock(IndicesService.class),
new ActionFilters(new HashSet<>()),
TestProjectResolvers.DEFAULT_PROJECT_ONLY,
new IndexNameExpressionResolver(
new ThreadContext(Settings.EMPTY),
EmptySystemIndices.INSTANCE,
TestProjectResolvers.DEFAULT_PROJECT_ONLY
) {
@Override
public String[] concreteIndexNames(ProjectMetadata project, IndicesRequest request) {
return request.indices();
}
}
) {
@Override
protected List<? extends ShardIterator> shards(
ClusterState clusterState,
AnalyzeIndexDiskUsageRequest request,
String[] concreteIndices
) {
final List<ShardIterator> shardIterators = new ArrayList<>(targetShards.size());
for (Map.Entry<ShardId, List<ShardRouting>> e : targetShards.entrySet()) {
shardIterators.add(new ShardIterator(e.getKey(), e.getValue()));
}
return shardIterators;
}
};
}
private ClusterService mockClusterService(ClusterState clusterState) {
ClusterService clusterService = mock(ClusterService.class);
when(clusterService.state()).thenReturn(clusterState);
when(clusterService.localNode()).thenReturn(clusterState.nodes().getLocalNode());
return clusterService;
}
private record CapturingRequest(
DiscoveryNode node,
AnalyzeDiskUsageShardRequest request,
TransportResponseHandler<AnalyzeDiskUsageShardResponse> handler
) {
void sendRandomResponse(long sizeInBytes, boolean maybeFork) {
AnalyzeDiskUsageShardResponse shardResponse = new AnalyzeDiskUsageShardResponse(
request.shardId(),
new IndexDiskUsageStats(sizeInBytes)
);
if (maybeFork && randomBoolean()) {
threadPool.generic().execute(() -> handler.handleResponse(shardResponse));
} else {
handler.handleResponse(shardResponse);
}
}
void sendRandomFailure(boolean maybeFork) {
TransportException e = new TransportException(new NodeDisconnectedException(node, "disconnected"));
if (maybeFork && randomBoolean()) {
threadPool.generic().execute(() -> handler.handleException(e));
} else {
handler.handleException(e);
}
}
}
static | TransportAnalyzeIndexDiskUsageActionTests |
java | quarkusio__quarkus | core/deployment/src/test/java/io/quarkus/deployment/recording/JobParameter.java | {
"start": 1440,
"end": 2380
} | class ____ of the job parameter (e.g. an object), this will never be an interface
*/
public String getActualClassName() {
return actualClassName;
}
/**
* The actual job parameter
*
* @return the actual job parameter
*/
public Object getObject() {
return object;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof JobParameter that))
return false;
return Objects.equals(className, that.className)
&& Objects.equals(actualClassName, that.actualClassName)
&& Objects.equals(object, that.object);
}
@Override
public int hashCode() {
return Objects.hash(className, actualClassName, object);
}
protected static boolean isNotNullNorAnEnum(Object object) {
return object != null && !(object instanceof Enum);
}
}
| name |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineFromIdConverter.java | {
"start": 936,
"end": 3365
} | enum ____ {
APPLICATION_FROMID {
@Override TimelineReaderContext decodeUID(String fromId) throws Exception {
if (fromId == null) {
return null;
}
List<String> appTupleList = TimelineReaderUtils.split(fromId);
if (appTupleList == null || appTupleList.size() != 5) {
throw new IllegalArgumentException(
"Invalid row key for application table.");
}
return new TimelineReaderContext(appTupleList.get(0), appTupleList.get(1),
appTupleList.get(2), Long.parseLong(appTupleList.get(3)),
appTupleList.get(4), null, null);
}
},
SUB_APPLICATION_ENTITY_FROMID {
@Override TimelineReaderContext decodeUID(String fromId) throws Exception {
if (fromId == null) {
return null;
}
List<String> split = TimelineReaderUtils.split(fromId);
if (split == null || split.size() != 6) {
throw new IllegalArgumentException(
"Invalid row key for sub app table.");
}
String subAppUserId = split.get(0);
String clusterId = split.get(1);
String entityType = split.get(2);
Long entityIdPrefix = Long.valueOf(split.get(3));
String entityId = split.get(4);
String userId = split.get(5);
return new TimelineReaderContext(clusterId, userId, null, null, null,
entityType, entityIdPrefix, entityId, subAppUserId);
}
},
GENERIC_ENTITY_FROMID {
@Override TimelineReaderContext decodeUID(String fromId) throws Exception {
if (fromId == null) {
return null;
}
List<String> split = TimelineReaderUtils.split(fromId);
if (split == null || split.size() != 8) {
throw new IllegalArgumentException("Invalid row key for entity table.");
}
Long flowRunId = Long.valueOf(split.get(3));
Long entityIdPrefix = Long.valueOf(split.get(6));
return new TimelineReaderContext(split.get(0), split.get(1), split.get(2),
flowRunId, split.get(4), split.get(5), entityIdPrefix, split.get(7));
}
};
/**
* Decodes FROM_ID depending on FROM_ID implementation.
*
* @param fromId FROM_ID to be decoded.
* @return a {@link TimelineReaderContext} object if FROM_ID passed can be
* decoded, null otherwise.
* @throws Exception if any problem occurs while decoding.
*/
abstract TimelineReaderContext decodeUID(String fromId) throws Exception;
}
| TimelineFromIdConverter |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/queries/QueryTest.java | {
"start": 4834,
"end": 5382
} | interface ____ {
@GET
String sendQuery(@QueryParam("foo") String query);
@Path("map")
@GET
String sendQueryMap(@RestQuery Map<String, String> query);
@Path("collection")
@GET
String sendQueryCollection(@QueryParam("foo") List<String> query);
@Path("array")
@GET
String sendQueryArray(@QueryParam("foo") String[] query);
@Path("2")
SubClient querySub(@QueryParam("foo") String query, @QueryParam("foo2") String query2);
}
public | Client |
java | google__guava | android/guava-testlib/test/com/google/common/testing/anotherpackage/SomeClassThatDoesNotUseNullable.java | {
"start": 805,
"end": 1077
} | class ____ {
void packagePrivateButDoesNotCheckNull(String s) {}
protected void protectedButDoesNotCheckNull(String s) {}
public void publicButDoesNotCheckNull(String s) {}
public static void staticButDoesNotCheckNull(String s) {}
}
| SomeClassThatDoesNotUseNullable |
java | spring-projects__spring-framework | framework-docs/src/main/java/org/springframework/docs/integration/cache/cachestoreconfigurationcaffeine/CacheConfiguration.java | {
"start": 951,
"end": 1097
} | class ____ {
// tag::snippet[]
@Bean
CacheManager cacheManager() {
return new CaffeineCacheManager();
}
// end::snippet[]
}
| CacheConfiguration |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/db2/DB2TruncateTest4.java | {
"start": 1037,
"end": 2749
} | class ____ extends DB2Test {
public void test_0() throws Exception {
String sql = "TRUNCATE TABLE INVENTORY REUSE STORAGE IGNORE DELETE TRIGGERS IMMEDIATE";
DB2StatementParser parser = new DB2StatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
DB2SchemaStatVisitor visitor = new DB2SchemaStatVisitor();
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("INVENTORY")));
// assertTrue(visitor.getColumns().contains(new Column("A", "F_0201")));
// assertTrue(visitor.getColumns().contains(new Column("mytable", "first_name")));
// assertTrue(visitor.getColumns().contains(new Column("mytable", "full_name")));
assertEquals("TRUNCATE TABLE INVENTORY REUSE STORAGE IGNORE DELETE TRIGGERS IMMEDIATE", //
SQLUtils.toSQLString(stmt, JdbcConstants.DB2));
assertEquals("truncate table INVENTORY reuse storage ignore delete triggers immediate", //
SQLUtils.toSQLString(stmt, JdbcConstants.DB2, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION));
}
}
| DB2TruncateTest4 |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java | {
"start": 45447,
"end": 46457
} | class ____ parameter takes values from
*/
public static <T extends Enum<T>> Parameter<T> enumParam(
String name,
boolean updateable,
Function<FieldMapper, T> initializer,
Supplier<T> defaultValue,
Class<T> enumClass
) {
Set<T> acceptedValues = EnumSet.allOf(enumClass);
return restrictedEnumParam(name, updateable, initializer, defaultValue, enumClass, acceptedValues);
}
/**
* Defines a parameter that takes one of a restricted set of values from an enumeration.
*
* @param name the parameter name
* @param updateable whether the parameter can be changed by a mapping update
* @param initializer a function that reads the parameter value from an existing mapper
* @param defaultValue the default value, to be used if the parameter is undefined in a mapping
* @param enumClass the enumeration | the |
java | quarkusio__quarkus | extensions/security/deployment/src/main/java/io/quarkus/security/deployment/SecurityAnnotationsRegistrar.java | {
"start": 485,
"end": 1217
} | class ____ implements InterceptorBindingRegistrar {
static final List<InterceptorBinding> SECURITY_BINDINGS = List.of(
// keep the contents the same as in io.quarkus.resteasy.deployment.SecurityTransformerUtils
InterceptorBinding.of(RolesAllowed.class, Collections.singleton("value")),
InterceptorBinding.of(PermissionsAllowed.class, Set.of("value", "params", "permission", "inclusive")),
InterceptorBinding.of(Authenticated.class),
InterceptorBinding.of(DenyAll.class),
InterceptorBinding.of(PermitAll.class));
@Override
public List<InterceptorBinding> getAdditionalBindings() {
return SECURITY_BINDINGS;
}
}
| SecurityAnnotationsRegistrar |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/config/ConfigDef.java | {
"start": 4051,
"end": 4208
} | class ____ be used standalone or in combination with {@link AbstractConfig} which provides some additional
* functionality for accessing configs.
*/
public | can |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/batch/BatchAndClassIdCollectionTest.java | {
"start": 3895,
"end": 4492
} | class ____ implements Serializable {
private long id;
public IdClass() {
}
public IdClass(long id) {
this.id = id;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
Parent.IdClass idClass = (Parent.IdClass) o;
return id == idClass.id;
}
@Override
public int hashCode() {
return (int) ( id ^ ( id >>> 32 ) );
}
}
}
@Entity(name = "Parent")
@Table(name = "parents")
@IdClass(Parent.IdClass.class)
public static | IdClass |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PrivateConstructorForUtilityClassTest.java | {
"start": 5166,
"end": 5337
} | class ____ {
static {
}
}
""")
.addOutputLines(
"out/Test.java",
"""
final | Test |
java | google__dagger | javatests/dagger/internal/codegen/AssistedFactoryErrorsTest.java | {
"start": 977,
"end": 1571
} | class ____ {
@Parameters(name = "{0}")
public static ImmutableCollection<Object[]> parameters() {
return CompilerMode.TEST_PARAMETERS;
}
private final CompilerMode compilerMode;
public AssistedFactoryErrorsTest(CompilerMode compilerMode) {
this.compilerMode = compilerMode;
}
@Test
public void testFactoryNotAbstract() {
Source foo =
CompilerTests.javaSource(
"test.Factory",
"package test;",
"",
"import dagger.assisted.AssistedFactory;",
"",
"@AssistedFactory | AssistedFactoryErrorsTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/impl/ManifestStoreOperationsThroughFileSystem.java | {
"start": 1553,
"end": 1742
} | class ____ subclassed in the ABFS module, which does add the resilient
* commit method.
*/
@InterfaceAudience.LimitedPrivate("mapreduce, object-stores")
@InterfaceStability.Unstable
public | is |
java | apache__camel | components/camel-jackson/src/test/java/org/apache/camel/component/jackson/SpringJacksonEnableFeatureTest.java | {
"start": 1169,
"end": 1908
} | class ____ extends CamelSpringTestSupport {
@Test
public void testMarshal() {
TestPojoView in = new TestPojoView();
Object marshalled = template.requestBody("direct:in", in);
String marshalledAsString = context.getTypeConverter().convertTo(String.class, marshalled);
// we enable the wrap root type feature so we should have TestPojoView
assertEquals("{\"TestPojoView\":{\"age\":30,\"height\":190,\"weight\":70}}", marshalledAsString);
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/component/jackson/SpringJacksonEnableFeatureTest.xml");
}
}
| SpringJacksonEnableFeatureTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/Transform.java | {
"start": 669,
"end": 807
} | interface ____ extends ToXContentObject {
ParseField TRANSFORM = new ParseField("transform");
String type();
abstract | Transform |
java | apache__rocketmq | test/src/test/java/org/apache/rocketmq/test/client/consumer/cluster/DynamicCrashConsumerIT.java | {
"start": 1597,
"end": 4167
} | class ____ extends BaseConf {
private static Logger logger = LoggerFactory.getLogger(NormalMsgStaticBalanceIT.class);
private RMQNormalProducer producer = null;
private String topic = null;
@Before
public void setUp() {
topic = initTopic();
logger.info(String.format("use topic: %s !", topic));
producer = getProducer(NAMESRV_ADDR, topic);
}
@After
public void tearDown() {
super.shutdown();
}
@Test
public void testAddOneConsumer() {
int msgSize = 100;
RMQNormalConsumer consumer1 = getConsumer(NAMESRV_ADDR, topic, "*", new RMQNormalListener());
RMQNormalConsumer consumer2 = getConsumer(NAMESRV_ADDR, consumer1.getConsumerGroup(), topic,
"*", new RMQNormalListener());
MQAsyncProducer asyncDefaultMQProducer = new MQAsyncProducer(producer, msgSize, 100);
asyncDefaultMQProducer.start();
TestUtils.waitForSeconds(WAIT_TIME);
consumer2.shutdown();
asyncDefaultMQProducer.waitSendAll(WAIT_TIME * 6);
MQWait.waitConsumeAll(CONSUME_TIME, producer.getAllMsgBody(), consumer1.getListener(),
consumer2.getListener());
boolean recvAll = MQWait.waitConsumeAll(CONSUME_TIME, producer.getAllMsgBody(),
consumer1.getListener(), consumer2.getListener());
assertThat(recvAll).isEqualTo(true);
}
@Test
public void testAddTwoConsumer() {
int msgSize = 100;
RMQNormalConsumer consumer1 = getConsumer(NAMESRV_ADDR, topic, "*", new RMQNormalListener());
RMQNormalConsumer consumer2 = getConsumer(NAMESRV_ADDR, consumer1.getConsumerGroup(), topic,
"*", new RMQNormalListener());
RMQNormalConsumer consumer3 = getConsumer(NAMESRV_ADDR, consumer1.getConsumerGroup(), topic,
"*", new RMQNormalListener());
MQAsyncProducer asyncDefaultMQProducer = new MQAsyncProducer(producer, msgSize, 100);
asyncDefaultMQProducer.start();
TestUtils.waitForSeconds(WAIT_TIME);
consumer2.shutdown();
consumer3.shutdown();
asyncDefaultMQProducer.waitSendAll(WAIT_TIME * 6);
MQWait.waitConsumeAll(CONSUME_TIME, producer.getAllMsgBody(), consumer1.getListener(),
consumer2.getListener(), consumer3.getListener());
boolean recvAll = MQWait.waitConsumeAll(CONSUME_TIME, producer.getAllMsgBody(),
consumer1.getListener(), consumer2.getListener(), consumer3.getListener());
assertThat(recvAll).isEqualTo(true);
}
}
| DynamicCrashConsumerIT |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/bytearrays/ByteArrays_assertHasSameSizeAs_with_Iterable_Test.java | {
"start": 1222,
"end": 2233
} | class ____ extends ByteArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
actual = null;
// WHEN
ThrowingCallable code = () -> arrays.assertHasSameSizeAs(someInfo(), actual, list("Solo", "Leia"));
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessage(actualIsNull());
}
@Test
void should_fail_if_size_of_actual_is_not_equal_to_expected_size() {
// GIVEN
AssertionInfo info = someInfo();
List<String> other = list("Solo", "Leia");
// WHEN
ThrowingCallable code = () -> arrays.assertHasSameSizeAs(info, actual, other);
// THEN
String error = shouldHaveSameSizeAs(actual, other, actual.length, other.size()).create(null, info.representation());
assertThatAssertionErrorIsThrownBy(code).withMessage(error);
}
@Test
void should_pass_if_size_of_actual_is_equal_to_expected_size() {
arrays.assertHasSameSizeAs(someInfo(), actual, list("Solo", "Leia", "Luke"));
}
}
| ByteArrays_assertHasSameSizeAs_with_Iterable_Test |
java | apache__rocketmq | remoting/src/test/java/org/apache/rocketmq/remoting/protocol/body/CheckClientRequestBodyTest.java | {
"start": 1086,
"end": 2017
} | class ____ {
@Test
public void testFromJson() {
SubscriptionData subscriptionData = new SubscriptionData();
String expectedClientId = "defalutId";
String expectedGroup = "defaultGroup";
CheckClientRequestBody checkClientRequestBody = new CheckClientRequestBody();
checkClientRequestBody.setClientId(expectedClientId);
checkClientRequestBody.setGroup(expectedGroup);
checkClientRequestBody.setSubscriptionData(subscriptionData);
String json = RemotingSerializable.toJson(checkClientRequestBody, true);
CheckClientRequestBody fromJson = RemotingSerializable.fromJson(json, CheckClientRequestBody.class);
assertThat(fromJson.getClientId()).isEqualTo(expectedClientId);
assertThat(fromJson.getGroup()).isEqualTo(expectedGroup);
assertThat(fromJson.getSubscriptionData()).isEqualTo(subscriptionData);
}
}
| CheckClientRequestBodyTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/codec/vectors/cluster/KMeansLocal.java | {
"start": 1253,
"end": 18491
} | class ____ {
// the minimum distance that is considered to be "far enough" to a centroid in order to compute the soar distance.
// For vectors that are closer than this distance to the centroid don't get spilled because they are well represented
// by the centroid itself. In many cases, it indicates a degenerated distribution, e.g the cluster is composed of the
// many equal vectors.
private static final float SOAR_MIN_DISTANCE = 1e-16f;
final int sampleSize;
final int maxIterations;
KMeansLocal(int sampleSize, int maxIterations) {
this.sampleSize = sampleSize;
this.maxIterations = maxIterations;
}
/**
* uses a Reservoir Sampling approach to picking the initial centroids which are subsequently expected
* to be used by a clustering algorithm
*
* @param vectors used to pick an initial set of random centroids
* @param centroidCount the total number of centroids to pick
* @return randomly selected centroids that are the min of centroidCount and sampleSize
* @throws IOException is thrown if vectors is inaccessible
*/
static float[][] pickInitialCentroids(FloatVectorValues vectors, int centroidCount) throws IOException {
Random random = new Random(42L);
int centroidsSize = Math.min(vectors.size(), centroidCount);
float[][] centroids = new float[centroidsSize][vectors.dimension()];
for (int i = 0; i < vectors.size(); i++) {
float[] vector;
if (i < centroidCount) {
vector = vectors.vectorValue(i);
System.arraycopy(vector, 0, centroids[i], 0, vector.length);
} else if (random.nextDouble() < centroidCount * (1.0 / i)) {
int c = random.nextInt(centroidCount);
vector = vectors.vectorValue(i);
System.arraycopy(vector, 0, centroids[c], 0, vector.length);
}
}
return centroids;
}
private static boolean stepLloyd(
FloatVectorValues vectors,
IntToIntFunction translateOrd,
float[][] centroids,
FixedBitSet centroidChanged,
int[] centroidCounts,
int[] assignments,
NeighborHood[] neighborhoods
) throws IOException {
boolean changed = false;
int dim = vectors.dimension();
centroidChanged.clear();
final float[] distances = new float[4];
for (int idx = 0; idx < vectors.size(); idx++) {
float[] vector = vectors.vectorValue(idx);
int vectorOrd = translateOrd.apply(idx);
final int assignment = assignments[vectorOrd];
final int bestCentroidOffset;
if (neighborhoods != null) {
bestCentroidOffset = getBestCentroidFromNeighbours(centroids, vector, assignment, neighborhoods[assignment], distances);
} else {
bestCentroidOffset = getBestCentroid(centroids, vector, distances);
}
if (assignment != bestCentroidOffset) {
if (assignment != -1) {
centroidChanged.set(assignment);
}
centroidChanged.set(bestCentroidOffset);
assignments[vectorOrd] = bestCentroidOffset;
changed = true;
}
}
if (changed) {
Arrays.fill(centroidCounts, 0);
for (int idx = 0; idx < vectors.size(); idx++) {
final int assignment = assignments[translateOrd.apply(idx)];
if (centroidChanged.get(assignment)) {
float[] centroid = centroids[assignment];
if (centroidCounts[assignment]++ == 0) {
Arrays.fill(centroid, 0.0f);
}
float[] vector = vectors.vectorValue(idx);
for (int d = 0; d < dim; d++) {
centroid[d] += vector[d];
}
}
}
for (int clusterIdx = 0; clusterIdx < centroids.length; clusterIdx++) {
if (centroidChanged.get(clusterIdx)) {
float count = (float) centroidCounts[clusterIdx];
if (count > 0) {
float[] centroid = centroids[clusterIdx];
for (int d = 0; d < dim; d++) {
centroid[d] /= count;
}
}
}
}
}
return changed;
}
private static int getBestCentroidFromNeighbours(
float[][] centroids,
float[] vector,
int centroidIdx,
NeighborHood neighborhood,
float[] distances
) {
final int limit = neighborhood.neighbors().length - 3;
int bestCentroidOffset = centroidIdx;
assert centroidIdx >= 0 && centroidIdx < centroids.length;
float minDsq = VectorUtil.squareDistance(vector, centroids[centroidIdx]);
int i = 0;
for (; i < limit; i += 4) {
if (minDsq < neighborhood.maxIntraDistance()) {
// if the distance found is smaller than the maximum intra-cluster distance
// we don't consider it for further re-assignment
return bestCentroidOffset;
}
ESVectorUtil.squareDistanceBulk(
vector,
centroids[neighborhood.neighbors()[i]],
centroids[neighborhood.neighbors()[i + 1]],
centroids[neighborhood.neighbors()[i + 2]],
centroids[neighborhood.neighbors()[i + 3]],
distances
);
for (int j = 0; j < distances.length; j++) {
float dsq = distances[j];
if (dsq < minDsq) {
minDsq = dsq;
bestCentroidOffset = neighborhood.neighbors()[i + j];
}
}
}
for (; i < neighborhood.neighbors().length; i++) {
if (minDsq < neighborhood.maxIntraDistance()) {
// if the distance found is smaller than the maximum intra-cluster distance
// we don't consider it for further re-assignment
return bestCentroidOffset;
}
int offset = neighborhood.neighbors()[i];
// float score = neighborhood.scores[i];
assert offset >= 0 && offset < centroids.length : "Invalid neighbor offset: " + offset;
// compute the distance to the centroid
float dsq = VectorUtil.squareDistance(vector, centroids[offset]);
if (dsq < minDsq) {
minDsq = dsq;
bestCentroidOffset = offset;
}
}
return bestCentroidOffset;
}
private static int getBestCentroid(float[][] centroids, float[] vector, float[] distances) {
final int limit = centroids.length - 3;
int bestCentroidOffset = 0;
float minDsq = Float.MAX_VALUE;
int i = 0;
for (; i < limit; i += 4) {
ESVectorUtil.squareDistanceBulk(vector, centroids[i], centroids[i + 1], centroids[i + 2], centroids[i + 3], distances);
for (int j = 0; j < distances.length; j++) {
float dsq = distances[j];
if (dsq < minDsq) {
minDsq = dsq;
bestCentroidOffset = i + j;
}
}
}
for (; i < centroids.length; i++) {
float dsq = VectorUtil.squareDistance(vector, centroids[i]);
if (dsq < minDsq) {
minDsq = dsq;
bestCentroidOffset = i;
}
}
return bestCentroidOffset;
}
private void assignSpilled(
FloatVectorValues vectors,
KMeansIntermediate kmeansIntermediate,
NeighborHood[] neighborhoods,
float soarLambda
) throws IOException {
// SOAR uses an adjusted distance for assigning spilled documents which is
// given by:
//
// soar(x, c) = ||x - c||^2 + lambda * ((x - c_1)^t (x - c))^2 / ||x - c_1||^2
//
// Here, x is the document, c is the nearest centroid, and c_1 is the first
// centroid the document was assigned to. The document is assigned to the
// cluster with the smallest soar(x, c).
int[] assignments = kmeansIntermediate.assignments();
assert assignments != null;
assert assignments.length == vectors.size();
int[] spilledAssignments = kmeansIntermediate.soarAssignments();
assert spilledAssignments != null;
assert spilledAssignments.length == vectors.size();
float[][] centroids = kmeansIntermediate.centroids();
float[] diffs = new float[vectors.dimension()];
final float[] distances = new float[4];
for (int i = 0; i < vectors.size(); i++) {
float[] vector = vectors.vectorValue(i);
int currAssignment = assignments[i];
float[] currentCentroid = centroids[currAssignment];
// TODO: cache these?
float vectorCentroidDist = VectorUtil.squareDistance(vector, currentCentroid);
if (vectorCentroidDist <= SOAR_MIN_DISTANCE) {
spilledAssignments[i] = NO_SOAR_ASSIGNMENT; // no SOAR assignment
continue;
}
for (int j = 0; j < vectors.dimension(); j++) {
diffs[j] = vector[j] - currentCentroid[j];
}
final int centroidCount;
final IntToIntFunction centroidOrds;
if (neighborhoods != null) {
assert neighborhoods[currAssignment] != null;
NeighborHood neighborhood = neighborhoods[currAssignment];
centroidCount = neighborhood.neighbors().length;
centroidOrds = c -> neighborhood.neighbors()[c];
} else {
centroidCount = centroids.length - 1;
centroidOrds = c -> c < currAssignment ? c : c + 1; // skip the current centroid
}
final int limit = centroidCount - 3;
int bestAssignment = -1;
float minSoar = Float.MAX_VALUE;
int j = 0;
for (; j < limit; j += 4) {
ESVectorUtil.soarDistanceBulk(
vector,
centroids[centroidOrds.apply(j)],
centroids[centroidOrds.apply(j + 1)],
centroids[centroidOrds.apply(j + 2)],
centroids[centroidOrds.apply(j + 3)],
diffs,
soarLambda,
vectorCentroidDist,
distances
);
for (int k = 0; k < distances.length; k++) {
float soar = distances[k];
if (soar < minSoar) {
minSoar = soar;
bestAssignment = centroidOrds.apply(j + k);
}
}
}
for (; j < centroidCount; j++) {
int centroidOrd = centroidOrds.apply(j);
float soar = ESVectorUtil.soarDistance(vector, centroids[centroidOrd], diffs, soarLambda, vectorCentroidDist);
if (soar < minSoar) {
minSoar = soar;
bestAssignment = centroidOrd;
}
}
assert bestAssignment != -1 : "Failed to assign soar vector to centroid";
spilledAssignments[i] = bestAssignment;
}
}
/**
* cluster using a lloyd k-means algorithm that is not neighbor aware
*
* @param vectors the vectors to cluster
* @param kMeansIntermediate the output object to populate which minimally includes centroids,
* but may include assignments and soar assignments as well; care should be taken in
* passing in a valid output object with a centroids array that is the size of centroids expected
* @throws IOException is thrown if vectors is inaccessible
*/
void cluster(FloatVectorValues vectors, KMeansIntermediate kMeansIntermediate) throws IOException {
doCluster(vectors, kMeansIntermediate, -1, -1);
}
/**
* cluster using a lloyd kmeans algorithm that also considers prior clustered neighborhoods when adjusting centroids
* this also is used to generate the neighborhood aware additional (SOAR) assignments
*
* @param vectors the vectors to cluster
* @param kMeansIntermediate the output object to populate which minimally includes centroids,
* the prior assignments of the given vectors; care should be taken in
* passing in a valid output object with a centroids array that is the size of centroids expected
* and assignments that are the same size as the vectors. The SOAR assignments are overwritten by this operation.
* @param clustersPerNeighborhood number of nearby neighboring centroids to be used to update the centroid positions.
* @param soarLambda lambda used for SOAR assignments
*
* @throws IOException is thrown if vectors is inaccessible or if the clustersPerNeighborhood is less than 2
*/
void cluster(FloatVectorValues vectors, KMeansIntermediate kMeansIntermediate, int clustersPerNeighborhood, float soarLambda)
throws IOException {
if (clustersPerNeighborhood < 2) {
throw new IllegalArgumentException("clustersPerNeighborhood must be at least 2, got [" + clustersPerNeighborhood + "]");
}
doCluster(vectors, kMeansIntermediate, clustersPerNeighborhood, soarLambda);
}
private void doCluster(FloatVectorValues vectors, KMeansIntermediate kMeansIntermediate, int clustersPerNeighborhood, float soarLambda)
throws IOException {
float[][] centroids = kMeansIntermediate.centroids();
boolean neighborAware = clustersPerNeighborhood != -1 && centroids.length > 1;
NeighborHood[] neighborhoods = null;
// if there are very few centroids, don't bother with neighborhoods or neighbor aware clustering
if (neighborAware && centroids.length > clustersPerNeighborhood) {
neighborhoods = NeighborHood.computeNeighborhoods(centroids, clustersPerNeighborhood);
}
cluster(vectors, kMeansIntermediate, neighborhoods);
if (neighborAware && soarLambda >= 0) {
assert kMeansIntermediate.soarAssignments().length == 0;
kMeansIntermediate.setSoarAssignments(new int[vectors.size()]);
assignSpilled(vectors, kMeansIntermediate, neighborhoods, soarLambda);
}
}
private void cluster(FloatVectorValues vectors, KMeansIntermediate kMeansIntermediate, NeighborHood[] neighborhoods)
throws IOException {
float[][] centroids = kMeansIntermediate.centroids();
int k = centroids.length;
int n = vectors.size();
int[] assignments = kMeansIntermediate.assignments();
if (k == 1) {
Arrays.fill(assignments, 0);
return;
}
IntToIntFunction translateOrd = i -> i;
FloatVectorValues sampledVectors = vectors;
if (sampleSize < n) {
sampledVectors = SampleReader.createSampleReader(vectors, sampleSize, 42L);
translateOrd = sampledVectors::ordToDoc;
}
assert assignments.length == n;
FixedBitSet centroidChanged = new FixedBitSet(centroids.length);
int[] centroidCounts = new int[centroids.length];
for (int i = 0; i < maxIterations; i++) {
// This is potentially sampled, so we need to translate ordinals
if (stepLloyd(sampledVectors, translateOrd, centroids, centroidChanged, centroidCounts, assignments, neighborhoods) == false) {
break;
}
}
// If we were sampled, do a once over the full set of vectors to finalize the centroids
if (sampleSize < n || maxIterations == 0) {
// No ordinal translation needed here, we are using the full set of vectors
stepLloyd(vectors, i -> i, centroids, centroidChanged, centroidCounts, assignments, neighborhoods);
}
}
/**
* helper that calls {@link KMeansLocal#cluster(FloatVectorValues, KMeansIntermediate)} given a set of initialized centroids,
* this call is not neighbor aware
*
* @param vectors the vectors to cluster
* @param centroids the initialized centroids to be shifted using k-means
* @param sampleSize the subset of vectors to use when shifting centroids
* @param maxIterations the max iterations to shift centroids
*/
public static void cluster(FloatVectorValues vectors, float[][] centroids, int sampleSize, int maxIterations) throws IOException {
KMeansIntermediate kMeansIntermediate = new KMeansIntermediate(centroids, new int[vectors.size()], vectors::ordToDoc);
KMeansLocal kMeans = new KMeansLocal(sampleSize, maxIterations);
kMeans.cluster(vectors, kMeansIntermediate);
}
}
| KMeansLocal |
java | apache__kafka | storage/src/main/java/org/apache/kafka/server/log/remote/storage/CustomMetadataSizeLimitExceededException.java | {
"start": 859,
"end": 928
} | class ____ extends Exception {
}
| CustomMetadataSizeLimitExceededException |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java | {
"start": 1124,
"end": 4182
} | class ____ {
private ActionTestUtils() { /* no construction */ }
public static <Request extends ActionRequest, Response extends ActionResponse> Response executeBlocking(
TransportAction<Request, Response> action,
Request request
) {
return ESTestCase.safeAwait(
future -> action.execute(request.createTask(1L, "direct", action.actionName, TaskId.EMPTY_TASK_ID, Map.of()), request, future)
);
}
public static <Request extends ActionRequest, Response extends ActionResponse> Response executeBlockingWithTask(
TaskManager taskManager,
Transport.Connection localConnection,
TransportAction<Request, Response> action,
Request request
) {
return ESTestCase.safeAwait(future -> taskManager.registerAndExecute("transport", action, request, localConnection, future));
}
/**
* Executes the given action.
*
* This is a shim method to make execution publicly available in tests.
*/
public static <Request extends ActionRequest, Response extends ActionResponse> void execute(
TransportAction<Request, Response> action,
Task task,
Request request,
ActionListener<Response> listener
) {
action.execute(task, request, listener);
}
public static <Request extends ActionRequest, Response extends ActionResponse> void execute(
TransportAction<Request, Response> action,
Request request,
ActionListener<Response> listener
) {
action.execute(request.createTask(1L, "direct", action.actionName, TaskId.EMPTY_TASK_ID, Map.of()), request, listener);
}
public static <T> ActionListener<T> assertNoFailureListener(CheckedConsumer<T, Exception> consumer) {
return ActionListener.wrap(consumer, ESTestCase::fail);
}
public static <T> ActionListener<T> assertNoSuccessListener(Consumer<Exception> consumer) {
return new ActionListener<>() {
@Override
public void onResponse(T result) {
fail(null, "unexpected success with result [%s] while expecting to handle failure with [%s]", result, consumer);
}
@Override
public void onFailure(Exception e) {
try {
consumer.accept(e);
} catch (Exception e2) {
if (e2 != e) {
e2.addSuppressed(e);
}
fail(e2, "unexpected failure in onFailure handler for [%s]", consumer);
}
}
};
}
public static ResponseListener wrapAsRestResponseListener(ActionListener<Response> listener) {
return new ResponseListener() {
@Override
public void onSuccess(Response response) {
listener.onResponse(response);
}
@Override
public void onFailure(Exception exception) {
listener.onFailure(exception);
}
};
}
}
| ActionTestUtils |
java | grpc__grpc-java | okhttp/src/main/java/io/grpc/okhttp/SslSocketFactoryChannelCredentials.java | {
"start": 915,
"end": 1216
} | class ____ {
private SslSocketFactoryChannelCredentials() {}
public static io.grpc.ChannelCredentials create(SSLSocketFactory factory) {
return new ChannelCredentials(factory);
}
// Hide implementation detail of how these credentials operate
static final | SslSocketFactoryChannelCredentials |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/assertion/RecursiveAssertionAssert_allFieldsSatisfy_with_ignoringFieldsOfTypes_Test.java | {
"start": 1979,
"end": 2197
} | class ____ {
String name;
String occupation;
Address address = new Address();
Person(String name, String occupation) {
this.name = name;
this.occupation = occupation;
}
}
static | Person |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/annotation/EnableJms.java | {
"start": 3443,
"end": 3966
} | class ____ {
*
* @Bean
* public MyService myService() {
* return new MyService();
* }
*
* // JMS infrastructure setup
* }</pre>
*
* <p>Alternatively, if {@code MyService} were annotated with {@code @Component}, the
* following configuration would ensure that its {@code @JmsListener} annotated
* method is invoked with a matching incoming message:
*
* <pre class="code">
* @Configuration
* @EnableJms
* @ComponentScan(basePackages="com.acme.foo")
* public | AppConfig |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/blob/BlobCacheService.java | {
"start": 1252,
"end": 4306
} | class ____ implements TaskExecutorBlobService {
/** Caching store for permanent BLOBs. */
private final PermanentBlobCache permanentBlobCache;
/** Store for transient BLOB files. */
private final TransientBlobCache transientBlobCache;
public BlobCacheService(
final Configuration blobClientConfig,
final File storageDir,
final BlobView blobView,
@Nullable final InetSocketAddress serverAddress)
throws IOException {
this(blobClientConfig, Reference.owned(storageDir), blobView, serverAddress);
}
/**
* Instantiates a new BLOB cache.
*
* @param blobClientConfig global configuration
* @param storageDir storage directory for the cached blobs
* @param blobView (distributed) blob store file system to retrieve files from first
* @param serverAddress address of the {@link BlobServer} to use for fetching files from or
* {@code null} if none yet
* @throws IOException thrown if the (local or distributed) file storage cannot be created or is
* not usable
*/
public BlobCacheService(
final Configuration blobClientConfig,
final Reference<File> storageDir,
final BlobView blobView,
@Nullable final InetSocketAddress serverAddress)
throws IOException {
this(
new PermanentBlobCache(blobClientConfig, storageDir, blobView, serverAddress),
new TransientBlobCache(blobClientConfig, storageDir, serverAddress));
}
/**
* Instantiates a new BLOB cache.
*
* @param permanentBlobCache BLOB cache to use for permanent BLOBs
* @param transientBlobCache BLOB cache to use for transient BLOBs
*/
public BlobCacheService(
PermanentBlobCache permanentBlobCache, TransientBlobCache transientBlobCache) {
this.permanentBlobCache = checkNotNull(permanentBlobCache);
this.transientBlobCache = checkNotNull(transientBlobCache);
}
@Override
public PermanentBlobCache getPermanentBlobService() {
return permanentBlobCache;
}
@Override
public TransientBlobCache getTransientBlobService() {
return transientBlobCache;
}
/**
* Sets the address of the {@link BlobServer}.
*
* @param blobServerAddress address of the {@link BlobServer}.
*/
public void setBlobServerAddress(InetSocketAddress blobServerAddress) {
permanentBlobCache.setBlobServerAddress(blobServerAddress);
transientBlobCache.setBlobServerAddress(blobServerAddress);
}
@Override
public void close() throws IOException {
permanentBlobCache.close();
transientBlobCache.close();
}
@Override
public int getPort() {
// NOTE: both blob stores connect to the same server!
return permanentBlobCache.getPort();
}
@Override
public InetAddress getAddress() {
return permanentBlobCache.serverAddress.getAddress();
}
}
| BlobCacheService |
java | apache__rocketmq | test/src/main/java/org/apache/rocketmq/test/factory/ConsumerFactory.java | {
"start": 1369,
"end": 3816
} | class ____ {
public static RMQNormalConsumer getRMQNormalConsumer(String nsAddr, String consumerGroup,
String topic, String subExpression,
AbstractListener listener) {
return getRMQNormalConsumer(nsAddr, consumerGroup, topic, subExpression, listener, false);
}
public static RMQNormalConsumer getRMQNormalConsumer(String nsAddr, String consumerGroup,
String topic, String subExpression,
AbstractListener listener, boolean useTLS) {
RMQNormalConsumer consumer = new RMQNormalConsumer(nsAddr, topic, subExpression,
consumerGroup, listener);
consumer.create(useTLS);
consumer.start();
return consumer;
}
public static RMQBroadCastConsumer getRMQBroadCastConsumer(String nsAddr, String consumerGroup,
String topic, String subExpression,
AbstractListener listner) {
RMQBroadCastConsumer consumer = new RMQBroadCastConsumer(nsAddr, topic, subExpression,
consumerGroup, listner);
consumer.create();
consumer.start();
return consumer;
}
public static RMQSqlConsumer getRMQSqlConsumer(String nsAddr, String consumerGroup,
String topic, MessageSelector selector,
AbstractListener listner) {
RMQSqlConsumer consumer = new RMQSqlConsumer(nsAddr, topic, selector,
consumerGroup, listner);
consumer.create();
consumer.start();
return consumer;
}
public static RMQPopConsumer getRMQPopConsumer(String nsAddr, String consumerGroup,
String topic, String subExpression, AbstractListener listener) {
RMQPopConsumer consumer = new RMQPopConsumer(nsAddr, topic, subExpression, consumerGroup, listener);
consumer.create();
consumer.start();
return consumer;
}
public static RMQPopClient getRMQPopClient() {
RMQPopClient client = new RMQPopClient();
client.create();
client.start();
return client;
}
public static DefaultMQPullConsumer getRMQPullConsumer(String nsAddr, String consumerGroup) throws Exception {
DefaultMQPullConsumer defaultMQPullConsumer = new DefaultMQPullConsumer(consumerGroup);
defaultMQPullConsumer.setInstanceName(UUID.randomUUID().toString());
defaultMQPullConsumer.setNamesrvAddr(nsAddr);
defaultMQPullConsumer.start();
return defaultMQPullConsumer;
}
}
| ConsumerFactory |
java | quarkusio__quarkus | extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/rest/client/deployment/test/KeycloakAdminClientInjectionDevServicesTest.java | {
"start": 758,
"end": 2056
} | class ____ {
@RegisterExtension
final static QuarkusUnitTest app = new QuarkusUnitTest()
.withApplicationRoot(jar -> jar
.addClasses(AdminResource.class)
.addAsResource("app-dev-mode-config.properties", "application.properties"))
// intention of this forced dependency is to test backwards compatibility
// when users started Keycloak Dev Service by adding OIDC extension and configured 'server-url'
.setForcedDependencies(
List.of(Dependency.of("io.quarkus", "quarkus-oidc-deployment", Version.getVersion())));
@Test
public void testGetRoles() {
// use 'password' grant type
final Response getRolesReq = RestAssured.given().get("/api/admin/roles");
assertEquals(200, getRolesReq.statusCode());
final List<RoleRepresentation> roles = getRolesReq.jsonPath().getList(".", RoleRepresentation.class);
assertNotNull(roles);
// assert there are roles admin and user (among others)
assertTrue(roles.stream().anyMatch(rr -> "user".equals(rr.getName())));
assertTrue(roles.stream().anyMatch(rr -> "admin".equals(rr.getName())));
}
@Path("/api/admin")
public static | KeycloakAdminClientInjectionDevServicesTest |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java | {
"start": 6625,
"end": 8961
} | interface ____ {
int expectedValue(String str, String substr, Integer start);
}
private static TestCaseSupplier supplier(
String name,
DataType strType,
DataType substrType,
Supplier<String> strValueSupplier,
Function<String, String> substrValueSupplier,
@Nullable Supplier<Integer> startSupplier,
ExpectedValue expectedValue
) {
List<DataType> types = types(strType, substrType, startSupplier != null);
return new TestCaseSupplier(name + TestCaseSupplier.nameFromTypes(types), types, () -> {
String str = strValueSupplier.get();
String substr = substrValueSupplier.apply(str);
Integer start = startSupplier == null ? null : startSupplier.get();
return testCase(strType, substrType, str, substr, start, expectedValue.expectedValue(str, substr, start));
});
}
private static String expectedToString(boolean hasStart) {
if (hasStart) {
return "LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]";
}
return "LocateNoStartEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1]]";
}
private static List<DataType> types(DataType firstType, DataType secondType, boolean hasStart) {
List<DataType> types = new ArrayList<>();
types.add(firstType);
types.add(secondType);
if (hasStart) {
types.add(DataType.INTEGER);
}
return types;
}
private static TestCaseSupplier.TestCase testCase(
DataType strType,
DataType substrType,
String str,
String substr,
Integer start,
Integer expectedValue
) {
List<TestCaseSupplier.TypedData> values = new ArrayList<>();
values.add(new TestCaseSupplier.TypedData(str == null ? null : new BytesRef(str), strType, "str"));
values.add(new TestCaseSupplier.TypedData(substr == null ? null : new BytesRef(substr), substrType, "substr"));
if (start != null) {
values.add(new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"));
}
return new TestCaseSupplier.TestCase(values, expectedToString(start != null), DataType.INTEGER, equalTo(expectedValue));
}
}
| ExpectedValue |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/MapWithParamConverterTest.java | {
"start": 988,
"end": 1749
} | class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.addClasses(HelloResource.class, MapParamConverter.class, MapParamConverterProvider.class));
@Test
public void noQueryParams() {
RestAssured.get("/hello")
.then().statusCode(200).body(Matchers.equalTo(""));
}
@Test
public void jsonQueryParam() {
RestAssured
.with()
.queryParam("param", "{\"a\":\"1\",\"b\":\"2\"}")
.get("/hello")
.then().statusCode(200).body(Matchers.equalTo("a:1-b:2"));
}
@Path("hello")
public static | MapWithParamConverterTest |
java | apache__flink | flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/result/DynamicResult.java | {
"start": 998,
"end": 1103
} | interface ____ {
/** Closes the retrieval and all involved threads. */
void close();
}
| DynamicResult |
java | apache__camel | components/camel-docker/src/test/java/org/apache/camel/component/docker/headers/InfoCmdHeaderTest.java | {
"start": 1144,
"end": 1723
} | class ____ extends BaseDockerHeaderTest<InfoCmd> {
@Mock
private InfoCmd mockObject;
@Test
void infoHeaderTest() {
Map<String, Object> headers = getDefaultParameters();
template.sendBodyAndHeaders("direct:in", "", headers);
Mockito.verify(dockerClient, Mockito.times(1)).infoCmd();
}
@Override
protected void setupMocks() {
Mockito.when(dockerClient.infoCmd()).thenReturn(mockObject);
}
@Override
protected DockerOperation getOperation() {
return DockerOperation.INFO;
}
}
| InfoCmdHeaderTest |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/single/SingleUsingTest.java | {
"start": 1176,
"end": 13206
} | class ____ extends RxJavaTest {
Function<Disposable, Single<Integer>> mapper = new Function<Disposable, Single<Integer>>() {
@Override
public Single<Integer> apply(Disposable d) throws Exception {
return Single.just(1);
}
};
Function<Disposable, Single<Integer>> mapperThrows = new Function<Disposable, Single<Integer>>() {
@Override
public Single<Integer> apply(Disposable d) throws Exception {
throw new TestException("Mapper");
}
};
Consumer<Disposable> disposer = new Consumer<Disposable>() {
@Override
public void accept(Disposable d) throws Exception {
d.dispose();
}
};
Consumer<Disposable> disposerThrows = new Consumer<Disposable>() {
@Override
public void accept(Disposable d) throws Exception {
throw new TestException("Disposer");
}
};
@Test
public void resourceSupplierThrows() {
Single.using(new Supplier<Integer>() {
@Override
public Integer get() throws Exception {
throw new TestException();
}
}, Functions.justFunction(Single.just(1)), Functions.emptyConsumer())
.test()
.assertFailure(TestException.class);
}
@Test
public void normalEager() {
Single.using(Functions.justSupplier(1), Functions.justFunction(Single.just(1)), Functions.emptyConsumer())
.test()
.assertResult(1);
}
@Test
public void normalNonEager() {
Single.using(Functions.justSupplier(1), Functions.justFunction(Single.just(1)), Functions.emptyConsumer(), false)
.test()
.assertResult(1);
}
@Test
public void errorEager() {
Single.using(Functions.justSupplier(1), Functions.justFunction(Single.error(new TestException())), Functions.emptyConsumer())
.test()
.assertFailure(TestException.class);
}
@Test
public void errorNonEager() {
Single.using(Functions.justSupplier(1), Functions.justFunction(Single.error(new TestException())), Functions.emptyConsumer(), false)
.test()
.assertFailure(TestException.class);
}
@Test
public void eagerMapperThrowsDisposerThrows() {
TestObserverEx<Integer> to = Single.using(Functions.justSupplier(Disposable.empty()), mapperThrows, disposerThrows)
.to(TestHelper.<Integer>testConsumer())
.assertFailure(CompositeException.class);
List<Throwable> ce = TestHelper.compositeList(to.errors().get(0));
TestHelper.assertError(ce, 0, TestException.class, "Mapper");
TestHelper.assertError(ce, 1, TestException.class, "Disposer");
}
@Test
public void noneagerMapperThrowsDisposerThrows() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Single.using(Functions.justSupplier(Disposable.empty()), mapperThrows, disposerThrows, false)
.to(TestHelper.<Integer>testConsumer())
.assertFailureAndMessage(TestException.class, "Mapper");
TestHelper.assertUndeliverable(errors, 0, TestException.class, "Disposer");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void resourceDisposedIfMapperCrashes() {
Disposable d = Disposable.empty();
Single.using(Functions.justSupplier(d), mapperThrows, disposer)
.test()
.assertFailure(TestException.class);
assertTrue(d.isDisposed());
}
@Test
public void resourceDisposedIfMapperCrashesNonEager() {
Disposable d = Disposable.empty();
Single.using(Functions.justSupplier(d), mapperThrows, disposer, false)
.test()
.assertFailure(TestException.class);
assertTrue(d.isDisposed());
}
@Test
public void dispose() {
Disposable d = Disposable.empty();
Single.using(Functions.justSupplier(d), mapper, disposer, false)
.test(true);
assertTrue(d.isDisposed());
}
@Test
public void disposerThrowsEager() {
Single.using(Functions.justSupplier(Disposable.empty()), mapper, disposerThrows)
.test()
.assertFailure(TestException.class);
}
@Test
public void disposerThrowsNonEager() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Single.using(Functions.justSupplier(Disposable.empty()), mapper, disposerThrows, false)
.test()
.assertResult(1);
TestHelper.assertUndeliverable(errors, 0, TestException.class, "Disposer");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void errorAndDisposerThrowsEager() {
TestObserverEx<Integer> to = Single.using(Functions.justSupplier(Disposable.empty()),
new Function<Disposable, SingleSource<Integer>>() {
@Override
public SingleSource<Integer> apply(Disposable v) throws Exception {
return Single.<Integer>error(new TestException("Mapper-run"));
}
}, disposerThrows)
.to(TestHelper.<Integer>testConsumer())
.assertFailure(CompositeException.class);
List<Throwable> ce = TestHelper.compositeList(to.errors().get(0));
TestHelper.assertError(ce, 0, TestException.class, "Mapper-run");
TestHelper.assertError(ce, 1, TestException.class, "Disposer");
}
@Test
public void errorAndDisposerThrowsNonEager() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Single.using(Functions.justSupplier(Disposable.empty()),
new Function<Disposable, SingleSource<Integer>>() {
@Override
public SingleSource<Integer> apply(Disposable v) throws Exception {
return Single.<Integer>error(new TestException("Mapper-run"));
}
}, disposerThrows, false)
.test()
.assertFailure(TestException.class);
TestHelper.assertUndeliverable(errors, 0, TestException.class, "Disposer");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void successDisposeRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp = PublishProcessor.create();
Disposable d = Disposable.empty();
final TestObserver<Integer> to = Single.using(Functions.justSupplier(d), new Function<Disposable, SingleSource<Integer>>() {
@Override
public SingleSource<Integer> apply(Disposable v) throws Exception {
return pp.single(-99);
}
}, disposer)
.test();
pp.onNext(1);
Runnable r1 = new Runnable() {
@Override
public void run() {
pp.onComplete();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
to.dispose();
}
};
TestHelper.race(r1, r2);
assertTrue(d.isDisposed());
}
}
@Test
public void doubleOnSubscribe() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Single.using(Functions.justSupplier(1), new Function<Integer, SingleSource<Integer>>() {
@Override
public SingleSource<Integer> apply(Integer v) throws Exception {
return new Single<Integer>() {
@Override
protected void subscribeActual(SingleObserver<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
assertFalse(((Disposable)observer).isDisposed());
Disposable d = Disposable.empty();
observer.onSubscribe(d);
assertTrue(d.isDisposed());
assertFalse(((Disposable)observer).isDisposed());
observer.onSuccess(1);
assertTrue(((Disposable)observer).isDisposed());
}
};
}
}, Functions.emptyConsumer())
.test()
.assertResult(1)
;
TestHelper.assertError(errors, 0, IllegalStateException.class, "Disposable already set!");
} finally {
RxJavaPlugins.reset();
}
}
@Test
@SuppressUndeliverable
public void errorDisposeRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp = PublishProcessor.create();
Disposable d = Disposable.empty();
final TestObserver<Integer> to = Single.using(Functions.justSupplier(d), new Function<Disposable, SingleSource<Integer>>() {
@Override
public SingleSource<Integer> apply(Disposable v) throws Exception {
return pp.single(-99);
}
}, disposer)
.test();
final TestException ex = new TestException();
Runnable r1 = new Runnable() {
@Override
public void run() {
pp.onError(ex);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
to.dispose();
}
};
TestHelper.race(r1, r2);
assertTrue(d.isDisposed());
}
}
@Test
public void eagerDisposeResourceThenDisposeUpstream() {
final StringBuilder sb = new StringBuilder();
TestObserver<Integer> to = Single.using(Functions.justSupplier(1),
new Function<Integer, Single<Integer>>() {
@Override
public Single<Integer> apply(Integer t) throws Throwable {
return Single.<Integer>never()
.doOnDispose(new Action() {
@Override
public void run() throws Throwable {
sb.append("Dispose");
}
})
;
}
}, new Consumer<Integer>() {
@Override
public void accept(Integer t) throws Throwable {
sb.append("Resource");
}
}, true)
.test()
;
to.assertEmpty();
to.dispose();
assertEquals("ResourceDispose", sb.toString());
}
@Test
public void nonEagerDisposeUpstreamThenDisposeResource() {
final StringBuilder sb = new StringBuilder();
TestObserver<Integer> to = Single.using(Functions.justSupplier(1),
new Function<Integer, Single<Integer>>() {
@Override
public Single<Integer> apply(Integer t) throws Throwable {
return Single.<Integer>never()
.doOnDispose(new Action() {
@Override
public void run() throws Throwable {
sb.append("Dispose");
}
})
;
}
}, new Consumer<Integer>() {
@Override
public void accept(Integer t) throws Throwable {
sb.append("Resource");
}
}, false)
.test()
;
to.assertEmpty();
to.dispose();
assertEquals("DisposeResource", sb.toString());
}
}
| SingleUsingTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java | {
"start": 12315,
"end": 13383
} | class ____ extends SubjectInheritingThread {
private final long ttl;
private final long ttlInterval;
public EntityDeletionThread(Configuration conf) {
ttl = conf.getLong(YarnConfiguration.TIMELINE_SERVICE_TTL_MS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_TTL_MS);
ttlInterval = conf.getLong(
YarnConfiguration.TIMELINE_SERVICE_LEVELDB_TTL_INTERVAL_MS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_LEVELDB_TTL_INTERVAL_MS);
LOG.info("Starting deletion thread with ttl " + ttl + " and cycle " +
"interval " + ttlInterval);
}
@Override
public void work() {
while (true) {
long timestamp = System.currentTimeMillis() - ttl;
try {
discardOldEntities(timestamp);
Thread.sleep(ttlInterval);
} catch (IOException e) {
LOG.error(e.toString());
} catch (InterruptedException e) {
LOG.info("Deletion thread received interrupt, exiting");
break;
}
}
}
}
private static | EntityDeletionThread |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/example/ContentBasedRouteTest.java | {
"start": 1260,
"end": 2592
} | class ____ extends SpringTestSupport {
protected MockEndpoint matchedEndpoint;
protected MockEndpoint notMatchedEndpoint;
protected Object body = "<hello>world!</hello>";
protected String header = "destination";
@Test
public void testMatchesPredicate() throws Exception {
matchedEndpoint.expectedMessageCount(1);
notMatchedEndpoint.expectedMessageCount(0);
template.sendBodyAndHeader("direct:start", body, header, "firstChoice");
assertIsSatisfied(matchedEndpoint, notMatchedEndpoint);
}
@Test
public void testDoesNotMatchPredicate() throws Exception {
matchedEndpoint.expectedMessageCount(0);
notMatchedEndpoint.expectedMessageCount(1);
template.sendBodyAndHeader("direct:start", body, header, "notMatchedValue");
assertIsSatisfied(matchedEndpoint, notMatchedEndpoint);
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
matchedEndpoint = getMockEndpoint("mock:matched");
notMatchedEndpoint = getMockEndpoint("mock:notMatched");
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/spring/example/contentBasedRoute.xml");
}
}
| ContentBasedRouteTest |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/BooleanUtils.java | {
"start": 4767,
"end": 13299
} | enum ____).
* @since 3.12.0
*/
public static Boolean[] booleanValues() {
return new Boolean[] {Boolean.FALSE, Boolean.TRUE};
}
/**
* Compares two {@code boolean} values. This is the same functionality as provided in Java 7.
*
* @param x the first {@code boolean} to compare
* @param y the second {@code boolean} to compare
* @return the value {@code 0} if {@code x == y};
* a value less than {@code 0} if {@code !x && y}; and
* a value greater than {@code 0} if {@code x && !y}
* @since 3.4
*/
public static int compare(final boolean x, final boolean y) {
if (x == y) {
return 0;
}
return x ? 1 : -1;
}
/**
* Performs the given action for each Boolean {@link BooleanUtils#values()}.
*
* @param action The action to be performed for each element
* @since 3.13.0
*/
public static void forEach(final Consumer<Boolean> action) {
values().forEach(action);
}
/**
* Checks if a {@link Boolean} value is {@code false},
* handling {@code null} by returning {@code false}.
*
* <pre>
* BooleanUtils.isFalse(Boolean.TRUE) = false
* BooleanUtils.isFalse(Boolean.FALSE) = true
* BooleanUtils.isFalse(null) = false
* </pre>
*
* @param bool the boolean to check, null returns {@code false}
* @return {@code true} only if the input is non-{@code null} and {@code false}
* @since 2.1
*/
public static boolean isFalse(final Boolean bool) {
return Boolean.FALSE.equals(bool);
}
/**
* Checks if a {@link Boolean} value is <em>not</em> {@code false},
* handling {@code null} by returning {@code true}.
*
* <pre>
* BooleanUtils.isNotFalse(Boolean.TRUE) = true
* BooleanUtils.isNotFalse(Boolean.FALSE) = false
* BooleanUtils.isNotFalse(null) = true
* </pre>
*
* @param bool the boolean to check, null returns {@code true}
* @return {@code true} if the input is {@code null} or {@code true}
* @since 2.3
*/
public static boolean isNotFalse(final Boolean bool) {
return !isFalse(bool);
}
/**
* Checks if a {@link Boolean} value is <em>not</em> {@code true},
* handling {@code null} by returning {@code true}.
*
* <pre>
* BooleanUtils.isNotTrue(Boolean.TRUE) = false
* BooleanUtils.isNotTrue(Boolean.FALSE) = true
* BooleanUtils.isNotTrue(null) = true
* </pre>
*
* @param bool the boolean to check, null returns {@code true}
* @return {@code true} if the input is null or false
* @since 2.3
*/
public static boolean isNotTrue(final Boolean bool) {
return !isTrue(bool);
}
/**
* Checks if a {@link Boolean} value is {@code true},
* handling {@code null} by returning {@code false}.
*
* <pre>
* BooleanUtils.isTrue(Boolean.TRUE) = true
* BooleanUtils.isTrue(Boolean.FALSE) = false
* BooleanUtils.isTrue(null) = false
* </pre>
*
* @param bool the boolean to check, {@code null} returns {@code false}
* @return {@code true} only if the input is non-null and true
* @since 2.1
*/
public static boolean isTrue(final Boolean bool) {
return Boolean.TRUE.equals(bool);
}
/**
* Negates the specified boolean.
*
* <p>If {@code null} is passed in, {@code null} will be returned.</p>
*
* <p>NOTE: This returns {@code null} and will throw a {@link NullPointerException}
* if unboxed to a boolean.</p>
*
* <pre>
* BooleanUtils.negate(Boolean.TRUE) = Boolean.FALSE;
* BooleanUtils.negate(Boolean.FALSE) = Boolean.TRUE;
* BooleanUtils.negate(null) = null;
* </pre>
*
* @param bool the Boolean to negate, may be null
* @return the negated Boolean, or {@code null} if {@code null} input
*/
public static Boolean negate(final Boolean bool) {
if (bool == null) {
return null;
}
return bool.booleanValue() ? Boolean.FALSE : Boolean.TRUE;
}
/**
* Performs a one-hot on an array of booleans.
* <p>
* This implementation returns true if one, and only one, of the supplied values is true.
* </p>
* <p>
* See also <a href="https://en.wikipedia.org/wiki/One-hot">One-hot</a>.
* </p>
* @param array an array of {@code boolean}s
* @return the result of the one-hot operations
* @throws NullPointerException if {@code array} is {@code null}
* @throws IllegalArgumentException if {@code array} is empty.
*/
public static boolean oneHot(final boolean... array) {
ObjectUtils.requireNonEmpty(array, "array");
boolean result = false;
for (final boolean element: array) {
if (element) {
if (result) {
return false;
}
result = true;
}
}
return result;
}
/**
* Performs a one-hot on an array of booleans.
* <p>
* This implementation returns true if one, and only one, of the supplied values is true.
* </p>
* <p>
* Null array elements map to false, like {@code Boolean.parseBoolean(null)} and its callers return false.
* </p>
* <p>
* See also <a href="https://en.wikipedia.org/wiki/One-hot">One-hot</a>.
* </p>
*
* @param array an array of {@code boolean}s
* @return the result of the one-hot operations
* @throws NullPointerException if {@code array} is {@code null}
* @throws IllegalArgumentException if {@code array} is empty.
*/
public static Boolean oneHot(final Boolean... array) {
return Boolean.valueOf(oneHot(ArrayUtils.toPrimitive(array)));
}
/**
* Performs an 'or' operation on a set of booleans.
*
* <pre>
* BooleanUtils.or(true, true) = true
* BooleanUtils.or(false, false) = false
* BooleanUtils.or(true, false) = true
* BooleanUtils.or(true, true, false) = true
* BooleanUtils.or(true, true, true) = true
* BooleanUtils.or(false, false, false) = false
* </pre>
*
* @param array an array of {@code boolean}s
* @return {@code true} if any of the arguments is {@code true}, and it returns {@code false} otherwise.
* @throws NullPointerException if {@code array} is {@code null}
* @throws IllegalArgumentException if {@code array} is empty.
* @since 3.0.1
*/
public static boolean or(final boolean... array) {
ObjectUtils.requireNonEmpty(array, "array");
for (final boolean element : array) {
if (element) {
return true;
}
}
return false;
}
/**
* Performs an 'or' operation on an array of Booleans.
* <pre>
* BooleanUtils.or(Boolean.TRUE, Boolean.TRUE) = Boolean.TRUE
* BooleanUtils.or(Boolean.FALSE, Boolean.FALSE) = Boolean.FALSE
* BooleanUtils.or(Boolean.TRUE, Boolean.FALSE) = Boolean.TRUE
* BooleanUtils.or(Boolean.TRUE, Boolean.TRUE, Boolean.TRUE) = Boolean.TRUE
* BooleanUtils.or(Boolean.FALSE, Boolean.FALSE, Boolean.TRUE) = Boolean.TRUE
* BooleanUtils.or(Boolean.TRUE, Boolean.FALSE, Boolean.TRUE) = Boolean.TRUE
* BooleanUtils.or(Boolean.FALSE, Boolean.FALSE, Boolean.FALSE) = Boolean.FALSE
* BooleanUtils.or(Boolean.TRUE, null) = Boolean.TRUE
* BooleanUtils.or(Boolean.FALSE, null) = Boolean.FALSE
* </pre>
* <p>
* Null array elements map to false, like {@code Boolean.parseBoolean(null)} and its callers return false.
* </p>
*
* @param array an array of {@link Boolean}s
* @return {@code true} if any of the arguments is {@code true}, and it returns {@code false} otherwise.
* @throws NullPointerException if {@code array} is {@code null}
* @throws IllegalArgumentException if {@code array} is empty.
* @since 3.0.1
*/
public static Boolean or(final Boolean... array) {
ObjectUtils.requireNonEmpty(array, "array");
return or(ArrayUtils.toPrimitive(array)) ? Boolean.TRUE : Boolean.FALSE;
}
/**
* Returns a new array of possible values (like an | would |
java | elastic__elasticsearch | modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/AbstractRepositoryS3ClientYamlTestSuiteIT.java | {
"start": 796,
"end": 1186
} | class ____ extends ESClientYamlSuiteTestCase {
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
public AbstractRepositoryS3ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
}
| AbstractRepositoryS3ClientYamlTestSuiteIT |
java | apache__camel | components/camel-sql/src/test/java/org/apache/camel/processor/aggregate/jdbc/JdbcAggregationRepositoryMultipleRepoTest.java | {
"start": 1240,
"end": 4327
} | class ____ extends CamelSpringTestSupport {
@Test
public void testMultipeRepo() {
JdbcAggregationRepository repo1 = applicationContext.getBean("repo1", JdbcAggregationRepository.class);
repo1.setReturnOldExchange(true);
repo1.start();
JdbcAggregationRepository repo2 = applicationContext.getBean("repo2", JdbcAggregationRepository.class);
repo2.setReturnOldExchange(true);
repo2.start();
// Can't get something we have not put in...
Exchange actual = repo1.get(context, "missing");
assertNull(actual);
actual = repo2.get(context, "missing");
assertNull(actual);
// Store it..
Exchange exchange1 = new DefaultExchange(context);
exchange1.getIn().setBody("counter:1");
actual = repo1.add(context, "foo", exchange1);
assertNull(actual);
// Get it back..
actual = repo1.get(context, "foo");
assertEquals("counter:1", actual.getIn().getBody());
assertNull(repo2.get(context, "foo"));
// Change it after reading the current exchange with version
Exchange exchange2 = new DefaultExchange(context);
exchange2 = repo1.get(context, "foo");
exchange2.getIn().setBody("counter:2");
actual = repo1.add(context, "foo", exchange2);
// the old one
assertEquals("counter:1", actual.getIn().getBody());
// add to repo2
Exchange exchange3 = new DefaultExchange(context);
exchange3.getIn().setBody("Hello World");
actual = repo2.add(context, "bar", exchange3);
assertNull(actual);
assertNull(repo1.get(context, "bar"));
// Get it back..
actual = repo1.get(context, "foo");
assertEquals("counter:2", actual.getIn().getBody());
assertNull(repo2.get(context, "foo"));
actual = repo2.get(context, "bar");
assertEquals("Hello World", actual.getIn().getBody());
assertNull(repo1.get(context, "bar"));
}
@Test
public void testMultipeRepoSameKeyDifferentContent() {
JdbcAggregationRepository repo1 = applicationContext.getBean("repo1", JdbcAggregationRepository.class);
repo1.start();
JdbcAggregationRepository repo2 = applicationContext.getBean("repo2", JdbcAggregationRepository.class);
repo2.start();
Exchange exchange1 = new DefaultExchange(context);
exchange1.getIn().setBody("Hello World");
repo1.add(context, "foo", exchange1);
Exchange exchange2 = new DefaultExchange(context);
exchange2.getIn().setBody("Bye World");
repo2.add(context, "foo", exchange2);
Exchange actual = repo1.get(context, "foo");
assertEquals("Hello World", actual.getIn().getBody());
actual = repo2.get(context, "foo");
assertEquals("Bye World", actual.getIn().getBody());
}
@Override
protected AbstractApplicationContext createApplicationContext() {
return newAppContext("JdbcSpringDataSource.xml");
}
}
| JdbcAggregationRepositoryMultipleRepoTest |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/immutable_constructor/ImmutablePOJO.java | {
"start": 747,
"end": 1266
} | class ____ implements Serializable {
private static final long serialVersionUID = -7086198701202598455L;
private final Integer immutableId;
private final String immutableDescription;
public ImmutablePOJO(Integer immutableId, String immutableDescription) {
this.immutableId = immutableId;
this.immutableDescription = immutableDescription;
}
public Integer getImmutableId() {
return immutableId;
}
public String getImmutableDescription() {
return immutableDescription;
}
}
| ImmutablePOJO |
java | apache__rocketmq | tools/src/test/java/org/apache/rocketmq/tools/command/server/ServerResponseMocker.java | {
"start": 4290,
"end": 5693
} | class ____ extends SimpleChannelInboundHandler<RemotingCommand> {
private HashMap<String, String> extMap;
public NettyServerHandler(HashMap<String, String> extMap) {
this.extMap = extMap;
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, RemotingCommand msg) throws Exception {
String remark = "mock data";
final RemotingCommand response =
RemotingCommand.createResponseCommand(RemotingSysResponseCode.SUCCESS, remark);
response.setOpaque(msg.getOpaque());
response.setBody(getBody());
if (extMap != null && extMap.size() > 0) {
response.setExtFields(extMap);
}
ctx.writeAndFlush(response);
}
}
public static ServerResponseMocker startServer(byte[] body) {
return startServer(body, null);
}
public static ServerResponseMocker startServer(byte[] body, HashMap<String, String> extMap) {
ServerResponseMocker mocker = new ServerResponseMocker() {
@Override
protected byte[] getBody() {
return body;
}
};
mocker.start(extMap);
// add jvm hook, close connection when jvm down
Runtime.getRuntime().addShutdownHook(new Thread(mocker::shutdown));
return mocker;
}
}
| NettyServerHandler |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/admin/DeleteConsumerGroupsResult.java | {
"start": 1090,
"end": 1887
} | class ____ {
private final Map<String, KafkaFuture<Void>> futures;
DeleteConsumerGroupsResult(final Map<String, KafkaFuture<Void>> futures) {
this.futures = futures;
}
/**
* Return a map from group id to futures which can be used to check the status of
* individual deletions.
*/
public Map<String, KafkaFuture<Void>> deletedGroups() {
Map<String, KafkaFuture<Void>> deletedGroups = new HashMap<>(futures.size());
deletedGroups.putAll(futures);
return deletedGroups;
}
/**
* Return a future which succeeds only if all the consumer group deletions succeed.
*/
public KafkaFuture<Void> all() {
return KafkaFuture.allOf(futures.values().toArray(new KafkaFuture<?>[0]));
}
}
| DeleteConsumerGroupsResult |
java | apache__camel | core/camel-core-reifier/src/main/java/org/apache/camel/reifier/dataformat/UniVocityFixedWidthDataFormatReifier.java | {
"start": 1126,
"end": 2216
} | class ____ extends UniVocityAbstractDataFormatReifier<UniVocityFixedDataFormat> {
public UniVocityFixedWidthDataFormatReifier(CamelContext camelContext, DataFormatDefinition definition) {
super(camelContext, definition);
}
@Override
protected void prepareDataFormatConfig(Map<String, Object> properties) {
super.prepareDataFormatConfig(properties);
properties.put("fieldLengths", getFieldLengths());
properties.put("skipTrailingCharsUntilNewline", definition.getSkipTrailingCharsUntilNewline());
properties.put("recordEndsOnNewline", definition.getRecordEndsOnNewline());
properties.put("padding", definition.getPadding());
}
private String getFieldLengths() {
if (definition.getHeaders() != null) {
StringJoiner sj = new StringJoiner(",");
for (UniVocityHeader header : definition.getHeaders()) {
sj.add(header.getLength());
}
return sj.toString();
} else {
return null;
}
}
}
| UniVocityFixedWidthDataFormatReifier |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/domain/AbstractPersistable.java | {
"start": 1022,
"end": 1523
} | class ____ entities. Allows parameterization of id type, chooses auto-generation and implements
* {@link #equals(Object)} and {@link #hashCode()} based on that id.
*
* @author Oliver Gierke
* @author Thomas Darimont
* @author Mark Paluch
* @author Greg Turnquist
* @author Ngoc Nhan
* @param <PK> the type of the identifier.
*/
@MappedSuperclass
@SuppressWarnings("NullAway") // querydsl does not work with jspecify -> 'Did not find type @org.jspecify.annotations.Nullable...'
public abstract | for |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/fuseable/FuseToFlowable.java | {
"start": 1181,
"end": 1554
} | interface ____ call the {@link #fuseToFlowable()}
* to return a Flowable which could be the Flowable-specific implementation of reduce(BiFunction).
* <p>
* This causes a slight overhead in assembly time (1 instanceof check, 1 operator allocation and 1 dropped
* operator) but does not incur the conversion overhead at runtime.
*
* @param <T> the value type
*/
public | and |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/MethodParameter.java | {
"start": 29529,
"end": 30998
} | class ____ extends MethodParameter {
private volatile Annotation @Nullable [] combinedAnnotations;
public FieldAwareConstructorParameter(Constructor<?> constructor, int parameterIndex, @Nullable String fieldName) {
super(constructor, parameterIndex);
this.parameterName = fieldName;
}
@Override
public Annotation[] getParameterAnnotations() {
String parameterName = this.parameterName;
Assert.state(parameterName != null, "Parameter name not initialized");
Annotation[] anns = this.combinedAnnotations;
if (anns == null) {
anns = super.getParameterAnnotations();
try {
Field field = getDeclaringClass().getDeclaredField(parameterName);
Annotation[] fieldAnns = field.getAnnotations();
if (fieldAnns.length > 0) {
List<Annotation> merged = new ArrayList<>(anns.length + fieldAnns.length);
merged.addAll(Arrays.asList(anns));
for (Annotation fieldAnn : fieldAnns) {
boolean existingType = false;
for (Annotation ann : anns) {
if (ann.annotationType() == fieldAnn.annotationType()) {
existingType = true;
break;
}
}
if (!existingType) {
merged.add(fieldAnn);
}
}
anns = merged.toArray(EMPTY_ANNOTATION_ARRAY);
}
}
catch (NoSuchFieldException | SecurityException ex) {
// ignore
}
this.combinedAnnotations = anns;
}
return anns;
}
}
/**
* Inner | FieldAwareConstructorParameter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/sql/partition/PostgresPartitionedTableTest.java | {
"start": 1386,
"end": 1915
} | class ____ {
@Test void test(EntityManagerFactoryScope scope) {
scope.inTransaction( session -> {
Partitioned partitioned = new Partitioned();
partitioned.id = 1L;
partitioned.pid = 500L;
session.persist( partitioned );
} );
scope.inTransaction( session -> {
Partitioned partitioned = session.find( Partitioned.class, 1L );
assertNotNull( partitioned );
partitioned.text = "updated";
} );
}
@Entity
@Table(name = "pgparts",
options = "partition by range (pid)")
static | PostgresPartitionedTableTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/ConnectionClosedException.java | {
"start": 951,
"end": 1367
} | class ____ extends ConnectionException {
private static final long serialVersionUID = 3802002501688542472L;
public ConnectionClosedException(String message) {
super(message);
}
public ConnectionClosedException(String message, Throwable cause) {
super(message, cause);
}
public ConnectionClosedException(Throwable cause) {
super(cause);
}
}
| ConnectionClosedException |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/interceptor/ReactiveTransactionInterceptorTests.java | {
"start": 963,
"end": 2045
} | class ____ extends AbstractReactiveTransactionAspectTests {
@Override
protected Object advised(Object target, ReactiveTransactionManager ptm, TransactionAttributeSource[] tas) {
TransactionInterceptor ti = new TransactionInterceptor();
ti.setTransactionManager(ptm);
ti.setTransactionAttributeSources(tas);
ProxyFactory pf = new ProxyFactory(target);
pf.addAdvice(0, ti);
return pf.getProxy();
}
/**
* Template method to create an advised object given the
* target object and transaction setup.
* Creates a TransactionInterceptor and applies it.
*/
@Override
protected Object advised(Object target, ReactiveTransactionManager ptm, TransactionAttributeSource tas) {
TransactionInterceptor ti = new TransactionInterceptor();
ti.setTransactionManager(ptm);
assertThat(ti.getTransactionManager()).isEqualTo(ptm);
ti.setTransactionAttributeSource(tas);
assertThat(ti.getTransactionAttributeSource()).isEqualTo(tas);
ProxyFactory pf = new ProxyFactory(target);
pf.addAdvice(0, ti);
return pf.getProxy();
}
}
| ReactiveTransactionInterceptorTests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java | {
"start": 1259,
"end": 1889
} | class ____ implements
Comparator<RawComparable> {
private RawComparator<Object> cmp;
public BytesComparator(RawComparator<Object> cmp) {
this.cmp = cmp;
}
@Override
public int compare(RawComparable o1, RawComparable o2) {
return compare(o1.buffer(), o1.offset(), o1.size(), o2.buffer(), o2
.offset(), o2.size());
}
public int compare(byte[] a, int off1, int len1, byte[] b, int off2,
int len2) {
return cmp.compare(a, off1, len1, b, off2, len2);
}
}
/**
* Interface for all objects that has a single integer magnitude.
*/
static | BytesComparator |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/config/RouteBuilderRefTest.java | {
"start": 1110,
"end": 1663
} | class ____ extends XmlConfigTestSupport {
@Test
public void testUsingRouteBuilderRefInCamelXml() throws Exception {
AbstractApplicationContext applicationContext
= new ClassPathXmlApplicationContext("org/apache/camel/spring/config/routeBuilderRef.xml");
CamelContext context = applicationContext.getBean("camel5", CamelContext.class);
assertValidContext(context);
// we're done so let's properly close the application context
IOHelper.close(applicationContext);
}
}
| RouteBuilderRefTest |
java | google__guice | extensions/assistedinject/src/com/google/inject/assistedinject/AssistedInjectBinding.java | {
"start": 902,
"end": 1146
} | interface ____<T> {
/** Returns the {@link Key} for the factory binding. */
Key<T> getKey();
/** Returns an {@link AssistedMethod} for each method in the factory. */
Collection<AssistedMethod> getAssistedMethods();
}
| AssistedInjectBinding |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/bind/annotation/RequestPart.java | {
"start": 2972,
"end": 3548
} | interface ____ {
/**
* Alias for {@link #name}.
*/
@AliasFor("name")
String value() default "";
/**
* The name of the part in the {@code "multipart/form-data"} request to bind to.
* @since 4.2
*/
@AliasFor("value")
String name() default "";
/**
* Whether the part is required.
* <p>Defaults to {@code true}, leading to an exception being thrown
* if the part is missing in the request. Switch this to
* {@code false} if you prefer a {@code null} value if the part is
* not present in the request.
*/
boolean required() default true;
}
| RequestPart |
java | google__guava | guava-testlib/test/com/google/common/testing/NullPointerTesterTest.java | {
"start": 36174,
"end": 36563
} | class ____ extends DefaultValueChecker {
@SuppressWarnings("unused") // called by NullPointerTester
@Keep
public void checkArray(EmptyEnum object, String s) {
calledWith(object, s);
}
void check() {
try {
runTester();
} catch (AssertionError expected) {
return;
}
fail("Should have failed because | EmptyEnumDefaultValueChecker |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucket.java | {
"start": 899,
"end": 1807
} | class ____ extends InternalExtendedStats implements ExtendedStatsBucket {
InternalExtendedStatsBucket(
String name,
long count,
double sum,
double min,
double max,
double sumOfSqrs,
double sigma,
DocValueFormat formatter,
Map<String, Object> metadata
) {
super(name, count, sum, min, max, sumOfSqrs, sigma, formatter, metadata);
}
/**
* Read from a stream.
*/
public InternalExtendedStatsBucket(StreamInput in) throws IOException {
super(in);
}
@Override
public String getWriteableName() {
return ExtendedStatsBucketPipelineAggregationBuilder.NAME;
}
@Override
protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) {
throw new UnsupportedOperationException("Not supported");
}
}
| InternalExtendedStatsBucket |
java | google__guava | android/guava-testlib/src/com/google/common/testing/TearDownStack.java | {
"start": 1355,
"end": 2742
} | class ____ implements TearDownAccepter {
private static final Logger logger = Logger.getLogger(TearDownStack.class.getName());
@VisibleForTesting final Object lock = new Object();
@GuardedBy("lock")
final Deque<TearDown> stack = new ArrayDeque<>();
private final boolean suppressThrows;
public TearDownStack() {
this.suppressThrows = false;
}
public TearDownStack(boolean suppressThrows) {
this.suppressThrows = suppressThrows;
}
@Override
public final void addTearDown(TearDown tearDown) {
synchronized (lock) {
stack.addFirst(checkNotNull(tearDown));
}
}
/** Causes teardown to execute. */
public final void runTearDown() {
Throwable exception = null;
List<TearDown> stackCopy;
synchronized (lock) {
stackCopy = new ArrayList<>(stack);
stack.clear();
}
for (TearDown tearDown : stackCopy) {
try {
tearDown.tearDown();
} catch (Throwable t) {
if (suppressThrows) {
logger.log(Level.INFO, "exception thrown during tearDown", t);
} else {
if (exception == null) {
exception = t;
} else {
exception.addSuppressed(t);
}
}
}
}
if (exception != null) {
throwIfUnchecked(exception);
throw new RuntimeException("failure during tearDown", exception);
}
}
}
| TearDownStack |
java | apache__camel | components/camel-web3j/src/test/java/org/apache/camel/component/web3j/integration/Web3jConsumerLogTest.java | {
"start": 1256,
"end": 2098
} | class ____ extends Web3jIntegrationTestSupport {
@Test
public void consumerTest() throws Exception {
mockResult.expectedMinimumMessageCount(1);
mockError.expectedMessageCount(0);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
errorHandler(deadLetterChannel("mock:error"));
from("web3j://" + getUrl()
+ OPERATION.toLowerCase() + "=" + ETH_LOG_OBSERVABLE + "&"
+ "fromBlock=earliest&"
+ "toBlock=latest&"
+ "address=0xc8CDceCE5d006dAB638029EBCf6Dd666efF5A952")
.to("mock:result");
}
};
}
}
| Web3jConsumerLogTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/active/TestingResourceEventHandler.java | {
"start": 2749,
"end": 4310
} | class ____<WorkerType extends ResourceIDRetrievable> {
private Consumer<Collection<WorkerType>> onPreviousAttemptWorkersRecoveredConsumer =
(ignore) -> {};
private BiConsumer<ResourceID, String> onWorkerTerminatedConsumer =
(ignore1, ignore2) -> {};
private Consumer<Throwable> onErrorConsumer = (ignore) -> {};
private Builder() {}
public Builder<WorkerType> setOnPreviousAttemptWorkersRecoveredConsumer(
Consumer<Collection<WorkerType>> onPreviousAttemptWorkersRecoveredConsumer) {
this.onPreviousAttemptWorkersRecoveredConsumer =
Preconditions.checkNotNull(onPreviousAttemptWorkersRecoveredConsumer);
return this;
}
public Builder<WorkerType> setOnWorkerTerminatedConsumer(
BiConsumer<ResourceID, String> onWorkerTerminatedConsumer) {
this.onWorkerTerminatedConsumer =
Preconditions.checkNotNull(onWorkerTerminatedConsumer);
return this;
}
public Builder<WorkerType> setOnErrorConsumer(Consumer<Throwable> onErrorConsumer) {
this.onErrorConsumer = Preconditions.checkNotNull(onErrorConsumer);
return this;
}
public TestingResourceEventHandler<WorkerType> build() {
return new TestingResourceEventHandler<>(
onPreviousAttemptWorkersRecoveredConsumer,
onWorkerTerminatedConsumer,
onErrorConsumer);
}
}
}
| Builder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.