language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RReliableFanout.java | {
"start": 937,
"end": 4340
} | interface ____<V> extends RExpirable, RReliableFanoutAsync<V>, RDestroyable {
/**
* Publishes a message to all subscribed queues based on the provided arguments.
*
* @param args arguments defining the message and publishing parameters
* @return The published message, or null if the message hasn't been added to all
* subscribed queues. The message may not be added to a subscribed queue if
* the queue has size limit and is full, if message size exceeds defined queue message size limit
* or message rejected due to deduplication.
*/
Message<V> publish(FanoutPublishArgs<V> args);
/**
* Publishes multiple messages to all subscribed queues based on the provided arguments.
*
* @param args arguments defining the messages and publishing parameters
* @return A list containing only messages that were added to at least a single
* subscribed queue. Messages may not be added to a subscribed queue if
* the queue has size limit and is full, if message size exceeds defined queue message size limit
* or message rejected due to deduplication.
*/
List<Message<V>> publishMany(FanoutPublishArgs<V> args);
/**
* Removes a filter for the specified queue name .
*
* @param name the queue name
*/
void removeFilter(String name);
/**
* Sets a filter that is applied to all messages published to the queue through
* this fanout.
* <p>
* The FanoutFilter object is replicated among all ReliableFanout objects
* and applied on each of them during message publishing.
*
* @param name the queue name
* @param filter applied to messages
*/
void setFilter(String name, MessageFilter<V> filter);
/**
* Checks if a queue with the specified name is subscribed to this fanout.
*
* @param name the queue name
* @return <code>true</code> if the queue is subscribed, <code>false</code> otherwise
*/
boolean isSubscribed(String name);
/**
* Subscribes a queue with the specified name to this fanout.
*
* @param name the queue name
* @return <code>true</code> if the queue was subscribed,
* <code>false</code> if queue is already subscribed
*/
boolean subscribeQueue(String name);
/**
* Subscribes a queue with the specified name to this fanout with a filter.
*
* @param name the queue name
* @param filter the filter that is applied to all messages published through this fanout
* @return <code>true</code> if the queue was subscribed,
* <code>false</code> if queue is already subscribed
*/
boolean subscribeQueue(String name, MessageFilter<V> filter);
/**
* Unsubscribes a queue with the specified name from this fanout.
*
* @param name the queue name
* @return <code>true</code> if the queue was unsubscribed,
* <code>false</code> if the queue isn't subscribed
*/
boolean unsubscribe(String name);
/**
* Returns a list of the names of all subscribers to this fanout.
*
* @return subscriber names
*/
List<String> getSubscribers();
/**
* Returns amount of subscribers to this fanout.
*
* @return amount of subscribers
*/
int countSubscribers();
}
| RReliableFanout |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/ITestAbfsFileSystemContractAppend.java | {
"start": 1222,
"end": 2063
} | class ____ extends AbstractContractAppendTest {
private final boolean isSecure;
private final ABFSContractTestBinding binding;
public ITestAbfsFileSystemContractAppend() throws Exception {
binding = new ABFSContractTestBinding();
this.isSecure = binding.isSecureMode();
}
@BeforeEach
@Override
public void setup() throws Exception {
binding.setup();
super.setup();
}
@Override
protected Configuration createConfiguration() {
return binding.getRawConfiguration();
}
@Override
protected AbstractFSContract createContract(final Configuration conf) {
return new AbfsFileSystemContract(conf, isSecure);
}
@Override
@Test
public void testRenameFileBeingAppended() throws Throwable {
skip("Skipping as renaming an opened file is not supported");
}
}
| ITestAbfsFileSystemContractAppend |
java | google__dagger | javatests/dagger/internal/codegen/ModuleFactoryGeneratorTest.java | {
"start": 35287,
"end": 35740
} | interface ____ {}");
daggerCompiler(component, usage, concreteModule, parameterizedModule, foo, packagePrivateBar)
.compile(
subject -> {
if (compilerMode.isKotlinCodegenEnabled()) {
// TODO(b/438765237): Currently, this fails at the declaration of the factory
// (rather than the call site) because the internal Bar is exposed in the public
// factory declaration: " | Bar |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/security/ProactiveAuthCompletionExceptionMapperTest.java | {
"start": 889,
"end": 2420
} | class ____ {
private static final String AUTHENTICATION_COMPLETION_EX = "AuthenticationCompletionException";
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest().setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(TestIdentityProvider.class, TestIdentityController.class,
CustomAuthCompletionExceptionMapper.class)
.addAsResource(new StringAsset("quarkus.http.auth.form.enabled=true\n"), "application.properties");
}
});
@BeforeAll
public static void setup() {
TestIdentityController.resetRoles().add("a d m i n", "a d m i n", "a d m i n");
}
@Test
public void testAuthCompletionExMapper() {
RestAssured.enableLoggingOfRequestAndResponseIfValidationFails();
RestAssured
.given()
.filter(new CookieFilter())
.redirects().follow(false)
.when()
.formParam("j_username", "a d m i n")
.formParam("j_password", "a d m i n")
.cookie("quarkus-redirect-location", "https://quarkus.io/guides")
.post("/j_security_check")
.then()
.assertThat()
.statusCode(401)
.body(Matchers.equalTo(AUTHENTICATION_COMPLETION_EX));
}
public static final | ProactiveAuthCompletionExceptionMapperTest |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/telemetry/endpoints/onerror/ErroneousClient_NoOnError.java | {
"start": 314,
"end": 775
} | class ____ {
public static List<String> MESSAGES = new ArrayList<>();
@OnTextMessage
Uni<Void> onMessage(String message) {
synchronized (this) {
MESSAGES.add(message);
if (MESSAGES.size() == 4) {
return Uni.createFrom().failure(new RuntimeException("You asked for an error, you got the error!"));
}
return Uni.createFrom().voidItem();
}
}
}
| ErroneousClient_NoOnError |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestNNHealthCheck.java | {
"start": 2214,
"end": 6263
} | class ____ {
private MiniDFSCluster cluster;
private Configuration conf;
@BeforeEach
public void setup() {
conf = new Configuration();
}
@AfterEach
public void shutdown() {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
@Test
public void testNNHealthCheck() throws IOException {
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(0)
.nnTopology(MiniDFSNNTopology.simpleHATopology())
.build();
doNNHealthCheckTest();
}
@Test
public void testNNHealthCheckWithLifelineAddress() throws IOException {
conf.set(DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY, "0.0.0.0:0");
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(0)
.nnTopology(MiniDFSNNTopology.simpleHATopology())
.build();
doNNHealthCheckTest();
}
@Test
public void testNNHAServiceTargetWithProvidedAddr() {
conf.set(DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY, "0.0.0.1:1");
conf.set(DFS_NAMENODE_RPC_ADDRESS_KEY, "0.0.0.1:2");
// Test constructor with provided address.
NNHAServiceTarget target = new NNHAServiceTarget(conf, "ns", "nn1",
"0.0.0.0:1", "0.0.0.0:2");
assertEquals("/0.0.0.0:1", target.getAddress().toString());
assertEquals("/0.0.0.0:2", target.getHealthMonitorAddress().toString());
}
@Test
public void testNNHealthCheckWithSafemodeAsUnhealthy() throws Exception {
conf.setBoolean(DFS_HA_NN_NOT_BECOME_ACTIVE_IN_SAFEMODE, true);
// now bring up just the NameNode.
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0)
.nnTopology(MiniDFSNNTopology.simpleHATopology()).build();
cluster.waitActive();
// manually set safemode.
cluster.getFileSystem(0)
.setSafeMode(SafeModeAction.ENTER);
NNHAServiceTarget haTarget = new NNHAServiceTarget(conf,
DFSUtil.getNamenodeNameServiceId(conf), "nn1");
final String expectedTargetString = haTarget.getAddress().toString();
assertTrue(haTarget.toString().contains(expectedTargetString),
"Expected haTarget " + haTarget + " containing " + expectedTargetString);
HAServiceProtocol rpc = haTarget.getHealthMonitorProxy(conf, 5000);
LambdaTestUtils.intercept(RemoteException.class,
"The NameNode is configured to report UNHEALTHY to ZKFC in Safemode.",
() -> rpc.monitorHealth());
}
private void doNNHealthCheckTest() throws IOException {
MockNameNodeResourceChecker mockResourceChecker =
new MockNameNodeResourceChecker(conf);
cluster.getNameNode(0).getNamesystem()
.setNNResourceChecker(mockResourceChecker);
NNHAServiceTarget haTarget = new NNHAServiceTarget(conf,
DFSUtil.getNamenodeNameServiceId(conf), "nn1");
final String expectedTargetString;
if (conf.get(DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY + "." +
DFSUtil.getNamenodeNameServiceId(conf) + ".nn1") != null) {
expectedTargetString = haTarget.getHealthMonitorAddress().toString();
} else {
expectedTargetString = haTarget.getAddress().toString();
}
assertTrue(haTarget.toString().contains(expectedTargetString),
"Expected haTarget " + haTarget + " containing " + expectedTargetString);
HAServiceProtocol rpc = haTarget.getHealthMonitorProxy(conf, conf.getInt(
HA_HM_RPC_TIMEOUT_KEY, HA_HM_RPC_TIMEOUT_DEFAULT));
// Should not throw error, which indicates healthy.
rpc.monitorHealth();
mockResourceChecker.setResourcesAvailable(false);
try {
// Should throw error - NN is unhealthy.
rpc.monitorHealth();
fail("Should not have succeeded in calling monitorHealth");
} catch (HealthCheckFailedException hcfe) {
GenericTestUtils.assertExceptionContains(
"The NameNode has no resources available", hcfe);
} catch (RemoteException re) {
GenericTestUtils.assertExceptionContains(
"The NameNode has no resources available",
re.unwrapRemoteException(HealthCheckFailedException.class));
}
}
}
| TestNNHealthCheck |
java | apache__camel | components/camel-spring-parent/camel-spring-redis/src/test/java/org/apache/camel/component/redis/processor/idempotent/RedisStringIdempotentRepositoryManualIT.java | {
"start": 1688,
"end": 5125
} | class ____ extends CamelTestSupport {
private static final JedisConnectionFactory CONNECTION_FACTORY = new JedisConnectionFactory();
protected SpringRedisStringIdempotentRepository idempotentRepository;
@Produce("direct:start")
private ProducerTemplate producer;
@EndpointInject("mock:result")
private MockEndpoint mockResult;
@Resource
private RedisTemplate<String, String> redisTemplate;
static {
CONNECTION_FACTORY.afterPropertiesSet();
}
@Override
protected void bindToRegistry(Registry registry) throws Exception {
redisTemplate = new RedisTemplate<>();
redisTemplate.setConnectionFactory(CONNECTION_FACTORY);
redisTemplate.afterPropertiesSet();
registry.bind("redisTemplate", redisTemplate);
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
idempotentRepository = new SpringRedisStringIdempotentRepository(
redisTemplate,
"redis-idempotent-repository");
RouteBuilder rb = new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").idempotentConsumer(body(), idempotentRepository).to("mock:result");
}
};
return rb;
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.setTracing(true);
return context;
}
@Test
public void blockDoubleSubmission() throws Exception {
mockResult.expectedMessageCount(3);
mockResult.setResultWaitTime(5000);
producer.sendBody("abc");
producer.sendBody("bcd");
producer.sendBody("abc");
producer.sendBody("xyz");
assertTrue(idempotentRepository.contains("abc"));
assertTrue(idempotentRepository.contains("bcd"));
assertTrue(idempotentRepository.contains("xyz"));
assertFalse(idempotentRepository.contains("mustNotContain"));
mockResult.assertIsSatisfied();
}
@Test
public void clearIdempotentRepository() {
for (int i = 0; i < 10000; i++) {
redisTemplate.opsForValue().set("key4711", "value4711");
}
assertEquals("value4711", redisTemplate.opsForValue().get("key4711"));
producer.sendBody("abc");
producer.sendBody("bcd");
redisTemplate.opsForValue().set("redis1", "1");
redisTemplate.opsForValue().set("different:xyz", "2");
assertTrue(idempotentRepository.contains("abc"));
assertTrue(idempotentRepository.contains("bcd"));
idempotentRepository.clear();
assertFalse(idempotentRepository.contains("abc"));
assertFalse(idempotentRepository.contains("bcd"));
assertFalse(idempotentRepository.contains("redis1"));
assertFalse(idempotentRepository.contains("different:xyz"));
assertEquals("1", redisTemplate.opsForValue().get("redis1"));
assertEquals("2", redisTemplate.opsForValue().get("different:xyz"));
}
@Test
public void expireIdempotent() throws Exception {
idempotentRepository.setExpiry(5L);
producer.sendBody("abc");
assertTrue(idempotentRepository.contains("abc"));
Thread.sleep(5000);
assertFalse(idempotentRepository.contains("abc"));
}
}
| RedisStringIdempotentRepositoryManualIT |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchStoreUtilsTests.java | {
"start": 1251,
"end": 8211
} | class ____ extends ESTestCase {
@NotMultiProjectCapable(description = "Watcher is not available in serverless")
private final ProjectId projectId = ProjectId.DEFAULT;
public void testGetConcreteIndexForDataStream() {
String dataStreamName = randomAlphaOfLength(20);
ProjectMetadata.Builder metadataBuilder = ProjectMetadata.builder(projectId);
Map<String, Metadata.ProjectCustom> customsBuilder = new HashMap<>();
Map<String, DataStream> dataStreams = new HashMap<>();
Map<String, IndexMetadata> indexMetadataMapBuilder = new HashMap<>();
List<String> indexNames = new ArrayList<>();
for (int i = 0; i < randomIntBetween(2, 10); i++) {
String indexName = dataStreamName + "_" + i;
indexNames.add(indexName);
indexMetadataMapBuilder.put(indexName, createIndexMetaData(indexName, null));
}
metadataBuilder.indices(indexMetadataMapBuilder);
dataStreams.put(
dataStreamName,
DataStreamTestHelper.newInstance(
dataStreamName,
indexNames.stream().map(indexName -> new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE)).collect(Collectors.toList())
)
);
ImmutableOpenMap<String, DataStreamAlias> dataStreamAliases = ImmutableOpenMap.of();
DataStreamMetadata dataStreamMetadata = new DataStreamMetadata(
ImmutableOpenMap.<String, DataStream>builder().putAllFromMap(dataStreams).build(),
dataStreamAliases
);
customsBuilder.put(DataStreamMetadata.TYPE, dataStreamMetadata);
metadataBuilder.customs(customsBuilder);
IndexMetadata concreteIndex = WatchStoreUtils.getConcreteIndex(
dataStreamName,
Metadata.builder().put(metadataBuilder.build()).build()
);
assertNotNull(concreteIndex);
assertEquals(indexNames.get(indexNames.size() - 1), concreteIndex.getIndex().getName());
}
public void testGetConcreteIndexForAliasWithMultipleNonWritableIndices() {
String aliasName = randomAlphaOfLength(20);
ProjectMetadata.Builder metadataBuilder = ProjectMetadata.builder(projectId);
AliasMetadata.Builder aliasMetadataBuilder = new AliasMetadata.Builder(aliasName);
aliasMetadataBuilder.writeIndex(false);
AliasMetadata aliasMetadata = aliasMetadataBuilder.build();
Map<String, IndexMetadata> indexMetadataMapBuilder = new HashMap<>();
for (int i = 0; i < randomIntBetween(2, 10); i++) {
String indexName = aliasName + "_" + i;
indexMetadataMapBuilder.put(indexName, createIndexMetaData(indexName, aliasMetadata));
}
metadataBuilder.indices(indexMetadataMapBuilder);
expectThrows(
IllegalStateException.class,
() -> WatchStoreUtils.getConcreteIndex(aliasName, Metadata.builder().put(metadataBuilder.build()).build())
);
}
public void testGetConcreteIndexForAliasWithMultipleIndicesWithWritable() {
String aliasName = randomAlphaOfLength(20);
ProjectMetadata.Builder metadataBuilder = ProjectMetadata.builder(projectId);
AliasMetadata.Builder aliasMetadataBuilder = new AliasMetadata.Builder(aliasName);
aliasMetadataBuilder.writeIndex(false);
AliasMetadata nonWritableAliasMetadata = aliasMetadataBuilder.build();
AliasMetadata.Builder writableAliasMetadataBuilder = new AliasMetadata.Builder(aliasName);
writableAliasMetadataBuilder.writeIndex(true);
AliasMetadata writableAliasMetadata = writableAliasMetadataBuilder.build();
Map<String, IndexMetadata> indexMetadataMapBuilder = new HashMap<>();
List<String> indexNames = new ArrayList<>();
int indexCount = randomIntBetween(2, 10);
int writableIndexIndex = randomIntBetween(0, indexCount - 1);
for (int i = 0; i < indexCount; i++) {
String indexName = aliasName + "_" + i;
indexNames.add(indexName);
final AliasMetadata aliasMetadata;
if (i == writableIndexIndex) {
aliasMetadata = writableAliasMetadata;
} else {
aliasMetadata = nonWritableAliasMetadata;
}
indexMetadataMapBuilder.put(indexName, createIndexMetaData(indexName, aliasMetadata));
}
metadataBuilder.indices(indexMetadataMapBuilder);
IndexMetadata concreteIndex = WatchStoreUtils.getConcreteIndex(aliasName, Metadata.builder().put(metadataBuilder.build()).build());
assertNotNull(concreteIndex);
assertEquals(indexNames.get(writableIndexIndex), concreteIndex.getIndex().getName());
}
public void testGetConcreteIndexForAliasWithOneNonWritableIndex() {
String aliasName = randomAlphaOfLength(20);
ProjectMetadata.Builder metadataBuilder = ProjectMetadata.builder(projectId);
AliasMetadata.Builder aliasMetadataBuilder = new AliasMetadata.Builder(aliasName);
aliasMetadataBuilder.writeIndex(false);
AliasMetadata aliasMetadata = aliasMetadataBuilder.build();
Map<String, IndexMetadata> indexMetadataMapBuilder = new HashMap<>();
String indexName = aliasName + "_" + 0;
indexMetadataMapBuilder.put(indexName, createIndexMetaData(indexName, aliasMetadata));
metadataBuilder.indices(indexMetadataMapBuilder);
IndexMetadata concreteIndex = WatchStoreUtils.getConcreteIndex(aliasName, Metadata.builder().put(metadataBuilder.build()).build());
assertNotNull(concreteIndex);
assertEquals(indexName, concreteIndex.getIndex().getName());
}
public void testGetConcreteIndexForConcreteIndex() {
String indexName = randomAlphaOfLength(20);
ProjectMetadata.Builder metadataBuilder = ProjectMetadata.builder(projectId);
Map<String, IndexMetadata> indexMetadataMapBuilder = new HashMap<>();
indexMetadataMapBuilder.put(indexName, createIndexMetaData(indexName, null));
metadataBuilder.indices(indexMetadataMapBuilder);
IndexMetadata concreteIndex = WatchStoreUtils.getConcreteIndex(indexName, Metadata.builder().put(metadataBuilder.build()).build());
assertNotNull(concreteIndex);
assertEquals(indexName, concreteIndex.getIndex().getName());
}
private IndexMetadata createIndexMetaData(String indexName, AliasMetadata aliasMetadata) {
IndexMetadata.Builder indexMetadataBuilder = new IndexMetadata.Builder(indexName);
Settings settings = indexSettings(1, 1).put(IndexMetadata.SETTING_PRIORITY, 5)
.put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), IndexVersion.current())
.build();
indexMetadataBuilder.settings(settings);
if (aliasMetadata != null) {
indexMetadataBuilder.putAlias(aliasMetadata);
}
return indexMetadataBuilder.build();
}
}
| WatchStoreUtilsTests |
java | apache__flink | flink-core-api/src/main/java/org/apache/flink/api/common/state/ListStateDeclaration.java | {
"start": 1444,
"end": 2114
} | enum ____ {
/**
* The whole state is logically a concatenation of all lists. On restore/redistribution, the
* list is evenly divided into as many sub-lists as there are parallel operators. Each
* operator gets a sub-list, which can be empty, or contain one or more elements.
*/
SPLIT,
/**
* The whole state is logically a concatenation of all lists. On restore/redistribution,
* each operator gets the complete list of state elements.
*/
UNION
}
/** Get type descriptor of this list state's element. */
TypeDescriptor<T> getTypeDescriptor();
}
| RedistributionStrategy |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/progress/SequenceNumber.java | {
"start": 173,
"end": 359
} | class ____ {
private static int sequenceNumber = 1;
public static synchronized int next() {
return sequenceNumber++;
}
private SequenceNumber() {}
}
| SequenceNumber |
java | apache__camel | core/camel-util/src/main/java/org/apache/camel/util/function/ThrowingBiConsumer.java | {
"start": 1238,
"end": 1596
} | interface ____<I1, I2, T extends Throwable> {
/**
* Performs this operation on the given arguments, potentially throwing an exception.
*
* @param i1 the first function argument
* @param i2 the first function argument
* @throws T the exception that may be thrown
*/
void accept(I1 i1, I2 i2) throws T;
}
| ThrowingBiConsumer |
java | hibernate__hibernate-orm | hibernate-spatial/src/main/java/org/hibernate/spatial/dialect/oracle/OracleSQLMMFunctionDescriptors.java | {
"start": 745,
"end": 4282
} | class ____ implements KeyedSqmFunctionDescriptors {
private final Map<FunctionKey, SqmFunctionDescriptor> map = new HashMap<>();
private final BasicTypeRegistry typeRegistry;
public OracleSQLMMFunctionDescriptors(FunctionContributions functionContributions) {
typeRegistry = functionContributions.getTypeConfiguration().getBasicTypeRegistry();
registerSQLMMFunctions();
}
private void registerSQLMMFunctions() {
addSTFunction( CommonSpatialFunction.ST_ASTEXT, "GET_WKT", StandardBasicTypes.STRING );
addSTFunction( CommonSpatialFunction.ST_GEOMETRYTYPE, StandardBasicTypes.STRING );
addSTFunction( CommonSpatialFunction.ST_ASBINARY, "GET_WKB", StandardBasicTypes.BINARY );
addSTFunction( CommonSpatialFunction.ST_DIMENSION, StandardBasicTypes.INTEGER );
addSTFunction( CommonSpatialFunction.ST_ISEMPTY, StandardBasicTypes.BOOLEAN );
addSTFunction( CommonSpatialFunction.ST_SRID, StandardBasicTypes.INTEGER );
addSTFunction( CommonSpatialFunction.ST_ISSIMPLE, StandardBasicTypes.BOOLEAN );
addSTFunction( CommonSpatialFunction.ST_OVERLAPS, "ST_OVERLAP", StandardBasicTypes.BOOLEAN );
addSTFunction( CommonSpatialFunction.ST_INTERSECTS, StandardBasicTypes.BOOLEAN );
addSTFunction( CommonSpatialFunction.ST_CONTAINS, StandardBasicTypes.BOOLEAN );
addSTFunction( CommonSpatialFunction.ST_DISJOINT, StandardBasicTypes.BOOLEAN );
addSTFunction( CommonSpatialFunction.ST_CROSSES, StandardBasicTypes.BOOLEAN );
addSTFunction( CommonSpatialFunction.ST_CONTAINS, StandardBasicTypes.BOOLEAN );
addSTFunction( CommonSpatialFunction.ST_TOUCHES, StandardBasicTypes.BOOLEAN );
addSTFunction( CommonSpatialFunction.ST_WITHIN, StandardBasicTypes.BOOLEAN );
addSTFunction( CommonSpatialFunction.ST_EQUALS, StandardBasicTypes.BOOLEAN );
addSTFunction( CommonSpatialFunction.ST_DISTANCE, StandardBasicTypes.DOUBLE );
addSTRelateFunction();
addSTFunction( CommonSpatialFunction.ST_DIFFERENCE );
addSTFunction( CommonSpatialFunction.ST_INTERSECTION );
addSTFunction( CommonSpatialFunction.ST_SYMDIFFERENCE );
addSTFunction( CommonSpatialFunction.ST_BUFFER );
addSTFunction( CommonSpatialFunction.ST_UNION );
addSTFunction( CommonSpatialFunction.ST_BOUNDARY );
addSTFunction( CommonSpatialFunction.ST_CONVEXHULL );
addSTFunction( CommonSpatialFunction.ST_ENVELOPE );
}
private <T> void addSTFunction(CommonSpatialFunction func, String stMethod, BasicTypeReference<T> tpe) {
map.put( func.getKey(), new OracleSpatialSQLMMFunction(
func.getKey().getName(),
stMethod,
func.getNumArgs(),
StandardFunctionReturnTypeResolvers.invariant(
typeRegistry.resolve( tpe ) )
) );
}
private void addSTFunction(CommonSpatialFunction func, String stMethod) {
map.put(
func.getKey(),
new OracleSpatialSQLMMFunction(
func.getKey().getName(),
stMethod,
func.getNumArgs(),
StandardFunctionReturnTypeResolvers.useFirstNonNull(),
true
)
);
}
private <T> void addSTFunction(CommonSpatialFunction func, BasicTypeReference<T> tpe) {
addSTFunction( func, func.getKey().getName().toUpperCase( Locale.ROOT ), tpe );
}
private void addSTFunction(CommonSpatialFunction func) {
addSTFunction( func, func.getKey().getName().toUpperCase( Locale.ROOT ) );
}
private void addSTRelateFunction() {
map.put( CommonSpatialFunction.ST_RELATE.getKey(), new STRelateFunction( typeRegistry ) );
}
@Override
public Map<FunctionKey, SqmFunctionDescriptor> asMap() {
return Collections.unmodifiableMap( map );
}
}
| OracleSQLMMFunctionDescriptors |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java | {
"start": 1490,
"end": 1565
} | class ____ implements ToXContentFragment, Writeable {
public | RecoveryState |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/data/MapEntry.java | {
"start": 995,
"end": 2280
} | class ____<K, V> implements Map.Entry<K, V> {
public final K key;
public final V value;
/**
* Creates a new {@link MapEntry}.
*
* @param <K> the type of the key of this entry.
* @param <V> the type of the value of this entry.
* @param key the key of the entry to create.
* @param value the value of the entry to create.
* @return the created {@code MapEntry}.
*/
public static <K, V> MapEntry<K, V> entry(K key, V value) {
return new MapEntry<>(key, value);
}
private MapEntry(K key, V value) {
this.key = key;
this.value = value;
}
@Override
public boolean equals(Object object) {
if (this == object) return true;
if (!(object instanceof Map.Entry)) return false;
Map.Entry<?, ?> that = (Map.Entry<?, ?>) object;
return Objects.equals(key, that.getKey()) && Objects.equals(value, that.getValue());
}
@Override
public int hashCode() {
return Objects.hashCode(key) ^ Objects.hashCode(value);
}
@Override
public String toString() {
return CONFIGURATION_PROVIDER.representation().toStringOf(this);
}
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return value;
}
/**
* Always throws {@link UnsupportedOperationException} as this | MapEntry |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/LeastSquaresOnlineRegression.java | {
"start": 547,
"end": 4374
} | class ____ {
private static final double SINGLE_VALUE_DECOMPOSITION_MAX_COND = 1e+15;
private final RunningStatistics statistics;
private final Array2DRowRealMatrix Nx;
private final Array2DRowRealMatrix Ny;
private final Array2DRowRealMatrix Nz;
private final int N;
LeastSquaresOnlineRegression(int degrees) {
this.N = degrees + 1;
statistics = new RunningStatistics(3 * N);
this.Nx = new Array2DRowRealMatrix(this.N, this.N);
this.Ny = new Array2DRowRealMatrix(this.N, 1);
this.Nz = new Array2DRowRealMatrix(this.N, 1);
}
double rSquared() {
if (statistics.count <= 0.0) {
return 0.0;
}
double var = statistics.stats[3 * N - 1] - statistics.stats[2 * N - 1] * statistics.stats[2 * N - 1];
double residualVariance = var;
int n = N + 1;
boolean done = false;
while (--n > 0 && done == false) {
if (n == 1) {
return 0.0;
} else if (n == this.N) {
OptionalDouble maybeResidualVar = residualVariance(N, Nx, Ny, Nz);
if (maybeResidualVar.isPresent()) {
residualVariance = maybeResidualVar.getAsDouble();
done = true;
}
} else {
Array2DRowRealMatrix x = new Array2DRowRealMatrix(n, n);
Array2DRowRealMatrix y = new Array2DRowRealMatrix(n, 1);
Array2DRowRealMatrix z = new Array2DRowRealMatrix(n, 1);
OptionalDouble maybeResidualVar = residualVariance(n, x, y, z);
if (maybeResidualVar.isPresent()) {
residualVariance = maybeResidualVar.getAsDouble();
done = true;
}
}
}
return Math.min(Math.max(1.0 - residualVariance / var, 0.0), 1.0);
}
private double[] statisticAdj(double x, double y) {
double[] d = new double[3 * N];
double xi = 1.0;
for (int i = 0; i < N; ++i, xi *= x) {
d[i] = xi;
d[i + 2 * N - 1] = xi * y;
}
for (int i = N; i < 2 * N - 1; ++i, xi *= x) {
d[i] = xi;
}
d[3 * N - 1] = y * y;
return d;
}
void add(double x, double y, double weight) {
statistics.add(statisticAdj(x, y), weight);
}
void remove(double x, double y, double weight) {
statistics.remove(statisticAdj(x, y), weight);
}
private OptionalDouble residualVariance(int n, Array2DRowRealMatrix x, Array2DRowRealMatrix y, Array2DRowRealMatrix z) {
if (n == 1) {
return OptionalDouble.of(statistics.stats[3 * N - 1] - statistics.stats[2 * N - 1] * statistics.stats[2 * N - 1]);
}
for (int i = 0; i < n; ++i) {
x.setEntry(i, i, statistics.stats[i + i]);
y.setEntry(i, 0, statistics.stats[i + 2 * N - 1]);
z.setEntry(i, 0, statistics.stats[i]);
for (int j = i + 1; j < n; ++j) {
x.setEntry(i, j, statistics.stats[i + j]);
x.setEntry(j, i, statistics.stats[i + j]);
}
}
SingularValueDecomposition svd = new SingularValueDecomposition(x);
double[] singularValues = svd.getSingularValues();
if (singularValues[0] > SINGLE_VALUE_DECOMPOSITION_MAX_COND * singularValues[n - 1]) {
return OptionalDouble.empty();
}
RealMatrix r = svd.getSolver().solve(y);
RealMatrix yr = y.transpose().multiply(r);
RealMatrix zr = z.transpose().multiply(r);
double t = statistics.stats[2 * N - 1] - zr.getEntry(0, 0);
return OptionalDouble.of((statistics.stats[3 * N - 1] - yr.getEntry(0, 0)) - (t * t));
}
private static | LeastSquaresOnlineRegression |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java | {
"start": 1378,
"end": 6906
} | class ____ extends AggregatorTestCase {
private static final String LONG_FIELD = "long";
private static final List<Long> dataset;
static {
List<Long> d = new ArrayList<>(45);
for (int i = 0; i < 10; i++) {
for (int j = 0; j < i; j++) {
d.add((long) i);
}
}
dataset = d;
}
public void testMatchNoDocs() throws IOException {
testSearchCase(
new MatchNoDocsQuery(),
dataset,
aggregation -> aggregation.field(LONG_FIELD),
agg -> assertEquals(0, agg.getBuckets().size()),
null // without type hint
);
testSearchCase(
new MatchNoDocsQuery(),
dataset,
aggregation -> aggregation.field(LONG_FIELD),
agg -> assertEquals(0, agg.getBuckets().size()),
ValueType.NUMERIC // with type hint
);
}
public void testMatchAllDocs() throws IOException {
Query query = new MatchAllDocsQuery();
testSearchCase(query, dataset, aggregation -> aggregation.field(LONG_FIELD), agg -> {
assertEquals(9, agg.getBuckets().size());
for (int i = 0; i < 9; i++) {
LongTerms.Bucket bucket = (LongTerms.Bucket) agg.getBuckets().get(i);
assertThat(bucket.getKey(), equalTo(9L - i));
assertThat(bucket.getDocCount(), equalTo(9L - i));
}
},
null // without type hint
);
testSearchCase(query, dataset, aggregation -> aggregation.field(LONG_FIELD), agg -> {
assertEquals(9, agg.getBuckets().size());
for (int i = 0; i < 9; i++) {
LongTerms.Bucket bucket = (LongTerms.Bucket) agg.getBuckets().get(i);
assertThat(bucket.getKey(), equalTo(9L - i));
assertThat(bucket.getDocCount(), equalTo(9L - i));
}
},
ValueType.NUMERIC // with type hint
);
}
public void testBadIncludeExclude() throws IOException {
IncludeExclude includeExclude = new IncludeExclude("foo", null, null, null);
// Numerics don't support any regex include/exclude, so should fail no matter what we do
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> testSearchCase(
new MatchNoDocsQuery(),
dataset,
aggregation -> aggregation.field(LONG_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"),
agg -> fail("test should have failed with exception"),
null
)
);
assertThat(
e.getMessage(),
equalTo(
"Aggregation [_name] cannot support regular expression style "
+ "include/exclude settings as they can only be applied to string fields. Use an array of numeric "
+ "values for include/exclude clauses used to filter numeric fields"
)
);
e = expectThrows(
IllegalArgumentException.class,
() -> testSearchCase(
new MatchNoDocsQuery(),
dataset,
aggregation -> aggregation.field(LONG_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"),
agg -> fail("test should have failed with exception"),
ValueType.NUMERIC // with type hint
)
);
assertThat(
e.getMessage(),
equalTo(
"Aggregation [_name] cannot support regular expression style "
+ "include/exclude settings as they can only be applied to string fields. Use an array of numeric "
+ "values for include/exclude clauses used to filter numeric fields"
)
);
}
private void testSearchCase(
Query query,
List<Long> dataset,
Consumer<TermsAggregationBuilder> configure,
Consumer<InternalMappedTerms<?, ?>> verify,
ValueType valueType
) throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Document document = new Document();
for (Long value : dataset) {
document.add(new SortedNumericDocValuesField(LONG_FIELD, value));
document.add(new LongPoint(LONG_FIELD, value));
indexWriter.addDocument(document);
document.clear();
}
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name");
if (valueType != null) {
aggregationBuilder.userValueTypeHint(valueType);
}
if (configure != null) {
configure.accept(aggregationBuilder);
}
MappedFieldType longFieldType = new NumberFieldMapper.NumberFieldType(LONG_FIELD, NumberFieldMapper.NumberType.LONG);
InternalMappedTerms<?, ?> rareTerms = searchAndReduce(
indexReader,
new AggTestConfig(aggregationBuilder, longFieldType).withQuery(query)
);
verify.accept(rareTerms);
}
}
}
}
| NumericTermsAggregatorTests |
java | google__dagger | javatests/dagger/functional/componentdependency/BoxedPrimitives.java | {
"start": 784,
"end": 881
} | interface ____ {
int primitive();
}
@Component(dependencies = Dependency.class)
| Dependency |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java | {
"start": 27350,
"end": 28474
} | class ____ extends Numeric {
protected final IndexNumericFieldData indexFieldData;
public FieldData(IndexNumericFieldData indexFieldData) {
this.indexFieldData = indexFieldData;
}
@Override
public boolean isFloatingPoint() {
return indexFieldData.getNumericType().isFloatingPoint();
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) {
return indexFieldData.load(context).getBytesValues();
}
@Override
public SortedNumericLongValues longValues(LeafReaderContext context) {
return indexFieldData.load(context).getLongValues();
}
@Override
public SortedNumericDoubleValues doubleValues(LeafReaderContext context) {
return indexFieldData.load(context).getDoubleValues();
}
}
/**
* {@link ValuesSource} implementation for stand alone scripts returning a Numeric value
*/
public static | FieldData |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnit3TestNotRunTest.java | {
"start": 4649,
"end": 5073
} | class ____ extends TestCase {
public void testBasic() {}
public void testMoreSpaces() {}
public void testMultiline() {}
}
""")
.doTest();
}
@Test
public void privateNamedTest() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import junit.framework.TestCase;
public | PositiveCases |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_279.java | {
"start": 826,
"end": 4123
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "select l.*\n" +
"from (select\n" +
" h.name COLLATE utf8_unicode_ci '供应商名称',\n" +
" contact_name COLLATE utf8_unicode_ci '联系人',\n" +
" contact_phone COLLATE utf8_unicode_ci '电话',\n" +
" address COLLATE utf8_unicode_ci '地址',\n" +
" protocol_district COLLATE utf8_unicode_ci '区划'\n" +
" from hotel_base_protocol h\n" +
" where h.is_in_time = 1 and h.status = 6 and h.enabled = 1 and h.is_internal_dept = 0\n" +
" UNION\n" +
" select\n" +
" supplier_name COLLATE utf8_unicode_ci '供应商名称',\n" +
" i.contact_name COLLATE utf8_unicode_ci '联系人',\n" +
" i.contact_phone COLLATE utf8_unicode_ci '电话',\n" +
" i.contact_addr COLLATE utf8_unicode_ci '地址',\n" +
" district_code COLLATE utf8_unicode_ci '区划'\n" +
" from insurance_protocol_t i\n" +
" where i.status = 6 and i.in_service = 1 and i.in_time = 1\n" +
" UNION\n" +
" select\n" +
" supplier_name '供应商名称',\n" +
" coordinator '联系人',\n" +
" cellphone '电话',\n" +
" address '地址',\n" +
" region '区划'\n" +
" from vehicle_repairment_protocol) l\n" +
"order by '区划'";
SQLStatement stmt = SQLUtils
.parseSingleStatement(sql, DbType.mysql);
assertEquals("SELECT l.*\n" +
"FROM (\n" +
"\tSELECT h.name COLLATE utf8_unicode_ci AS '供应商名称', contact_name COLLATE utf8_unicode_ci AS '联系人'\n" +
"\t\t, contact_phone COLLATE utf8_unicode_ci AS '电话', address COLLATE utf8_unicode_ci AS '地址'\n" +
"\t\t, protocol_district COLLATE utf8_unicode_ci AS '区划'\n" +
"\tFROM hotel_base_protocol h\n" +
"\tWHERE h.is_in_time = 1\n" +
"\t\tAND h.status = 6\n" +
"\t\tAND h.enabled = 1\n" +
"\t\tAND h.is_internal_dept = 0\n" +
"\tUNION\n" +
"\tSELECT supplier_name COLLATE utf8_unicode_ci AS '供应商名称', i.contact_name COLLATE utf8_unicode_ci AS '联系人'\n" +
"\t\t, i.contact_phone COLLATE utf8_unicode_ci AS '电话', i.contact_addr COLLATE utf8_unicode_ci AS '地址'\n" +
"\t\t, district_code COLLATE utf8_unicode_ci AS '区划'\n" +
"\tFROM insurance_protocol_t i\n" +
"\tWHERE i.status = 6\n" +
"\t\tAND i.in_service = 1\n" +
"\t\tAND i.in_time = 1\n" +
"\tUNION\n" +
"\tSELECT supplier_name AS '供应商名称', coordinator AS '联系人', cellphone AS '电话', address AS '地址', region AS '区划'\n" +
"\tFROM vehicle_repairment_protocol\n" +
") l\n" +
"ORDER BY '区划'", stmt.toString());
}
}
| MySqlSelectTest_279 |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/function/FailableToDoubleFunction.java | {
"start": 1147,
"end": 1900
} | interface ____<T, E extends Throwable> {
/** NOP singleton */
@SuppressWarnings("rawtypes")
FailableToDoubleFunction NOP = t -> 0d;
/**
* Gets the NOP singleton.
*
* @param <T> the type of the argument to the function
* @param <E> The kind of thrown exception or error.
* @return The NOP singleton.
*/
@SuppressWarnings("unchecked")
static <T, E extends Throwable> FailableToDoubleFunction<T, E> nop() {
return NOP;
}
/**
* Applies this function to the given arguments.
*
* @param t the first function argument
* @return the function result
* @throws E Thrown when the function fails.
*/
double applyAsDouble(T t) throws E;
}
| FailableToDoubleFunction |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java | {
"start": 2136,
"end": 25610
} | class ____ extends ESTestCase {
public void testEqualsAndHashCode() {
RepositoryData repositoryData1 = generateRandomRepoData();
RepositoryData repositoryData2 = repositoryData1.copy();
assertEquals(repositoryData1, repositoryData2);
assertEquals(repositoryData1.hashCode(), repositoryData2.hashCode());
}
public void testIndicesToUpdateAfterRemovingSnapshot() {
final RepositoryData repositoryData = generateRandomRepoData();
final List<IndexId> indicesBefore = List.copyOf(repositoryData.getIndices().values());
final SnapshotId randomSnapshot = randomFrom(repositoryData.getSnapshotIds());
final IndexId[] indicesToUpdate = indicesBefore.stream().filter(index -> {
final List<SnapshotId> snapshotIds = repositoryData.getSnapshots(index);
return snapshotIds.contains(randomSnapshot) && snapshotIds.size() > 1;
}).toArray(IndexId[]::new);
assertThat(getIndicesToUpdateAfterRemovingSnapshot(repositoryData, randomSnapshot), containsInAnyOrder(indicesToUpdate));
}
public void testXContent() throws IOException {
RepositoryData repositoryData = generateRandomRepoData().withClusterUuid(UUIDs.randomBase64UUID(random()));
XContentBuilder builder = JsonXContent.contentBuilder();
repositoryData.snapshotsToXContent(builder, IndexVersion.current());
try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) {
long gen = (long) randomIntBetween(0, 500);
RepositoryData fromXContent = RepositoryData.snapshotsFromXContent(parser, gen, randomBoolean());
assertEquals(repositoryData, fromXContent);
assertEquals(gen, fromXContent.getGenId());
}
}
public void testAddSnapshots() {
RepositoryData repositoryData = generateRandomRepoData();
// test that adding the same snapshot id to the repository data throws an exception
Map<String, IndexId> indexIdMap = repositoryData.getIndices();
// test that adding a snapshot and its indices works
SnapshotId newSnapshot = new SnapshotId(randomAlphaOfLength(7), UUIDs.randomBase64UUID());
List<IndexId> indices = new ArrayList<>();
Set<IndexId> newIndices = new HashSet<>();
int numNew = randomIntBetween(1, 10);
final ShardGenerations.Builder builder = ShardGenerations.builder();
for (int i = 0; i < numNew; i++) {
IndexId indexId = new IndexId(randomAlphaOfLength(7), UUIDs.randomBase64UUID());
newIndices.add(indexId);
indices.add(indexId);
builder.put(indexId, 0, ShardGeneration.newGeneration(random()));
}
int numOld = randomIntBetween(1, indexIdMap.size());
List<String> indexNames = new ArrayList<>(indexIdMap.keySet());
for (int i = 0; i < numOld; i++) {
final IndexId indexId = indexIdMap.get(indexNames.get(i));
indices.add(indexId);
builder.put(indexId, 0, ShardGeneration.newGeneration(random()));
}
final ShardGenerations shardGenerations = builder.build();
final Map<IndexId, String> indexLookup = shardGenerations.indices()
.stream()
.collect(Collectors.toMap(Function.identity(), ind -> randomAlphaOfLength(256)));
RepositoryData newRepoData = repositoryData.addSnapshot(
newSnapshot,
new RepositoryData.SnapshotDetails(
randomFrom(SnapshotState.SUCCESS, SnapshotState.PARTIAL, SnapshotState.FAILED),
randomFrom(IndexVersion.current(), IndexVersions.MINIMUM_COMPATIBLE),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomAlphaOfLength(10)
),
new UpdatedShardGenerations(shardGenerations, ShardGenerations.EMPTY),
indexLookup,
indexLookup.values().stream().collect(Collectors.toMap(Function.identity(), ignored -> UUIDs.randomBase64UUID(random())))
);
// verify that the new repository data has the new snapshot and its indices
assertTrue(newRepoData.getSnapshotIds().contains(newSnapshot));
for (IndexId indexId : indices) {
List<SnapshotId> snapshotIds = newRepoData.getSnapshots(indexId);
assertTrue(snapshotIds.contains(newSnapshot));
if (newIndices.contains(indexId)) {
assertEquals(snapshotIds.size(), 1); // if it was a new index, only the new snapshot should be in its set
}
}
assertEquals(repositoryData.getGenId(), newRepoData.getGenId());
}
public void testInitIndices() {
final int numSnapshots = randomIntBetween(1, 30);
final Map<String, SnapshotId> snapshotIds = Maps.newMapWithExpectedSize(numSnapshots);
final Map<String, RepositoryData.SnapshotDetails> snapshotsDetails = Maps.newMapWithExpectedSize(numSnapshots);
for (int i = 0; i < numSnapshots; i++) {
final SnapshotId snapshotId = new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID());
snapshotIds.put(snapshotId.getUUID(), snapshotId);
snapshotsDetails.put(
snapshotId.getUUID(),
new RepositoryData.SnapshotDetails(
randomFrom(SnapshotState.values()),
randomFrom(IndexVersion.current(), IndexVersions.MINIMUM_COMPATIBLE),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomAlphaOfLength(10)
)
);
}
RepositoryData repositoryData = new RepositoryData(
MISSING_UUID,
EMPTY_REPO_GEN,
snapshotIds,
Collections.emptyMap(),
Collections.emptyMap(),
ShardGenerations.EMPTY,
IndexMetaDataGenerations.EMPTY,
MISSING_UUID
);
// test that initializing indices works
Map<IndexId, List<SnapshotId>> indices = randomIndices(snapshotIds);
RepositoryData newRepoData = new RepositoryData(
UUIDs.randomBase64UUID(random()),
repositoryData.getGenId(),
snapshotIds,
snapshotsDetails,
indices,
ShardGenerations.EMPTY,
IndexMetaDataGenerations.EMPTY,
UUIDs.randomBase64UUID(random())
);
List<SnapshotId> expected = new ArrayList<>(repositoryData.getSnapshotIds());
Collections.sort(expected);
List<SnapshotId> actual = new ArrayList<>(newRepoData.getSnapshotIds());
Collections.sort(actual);
assertEquals(expected, actual);
for (IndexId indexId : indices.keySet()) {
assertEquals(indices.get(indexId), newRepoData.getSnapshots(indexId));
}
}
public void testRemoveSnapshot() {
RepositoryData repositoryData = generateRandomRepoData();
List<SnapshotId> snapshotIds = new ArrayList<>(repositoryData.getSnapshotIds());
assertThat(snapshotIds.size(), greaterThan(0));
SnapshotId removedSnapshotId = snapshotIds.remove(randomIntBetween(0, snapshotIds.size() - 1));
RepositoryData newRepositoryData = repositoryData.removeSnapshots(Collections.singleton(removedSnapshotId), ShardGenerations.EMPTY);
// make sure the repository data's indices no longer contain the removed snapshot
for (final IndexId indexId : newRepositoryData.getIndices().values()) {
assertFalse(newRepositoryData.getSnapshots(indexId).contains(removedSnapshotId));
}
}
public void testResolveIndexId() {
RepositoryData repositoryData = generateRandomRepoData();
Map<String, IndexId> indices = repositoryData.getIndices();
Set<String> indexNames = indices.keySet();
assertThat(indexNames.size(), greaterThan(0));
String indexName = indexNames.iterator().next();
IndexId indexId = indices.get(indexName);
assertEquals(indexId, repositoryData.resolveIndexId(indexName));
}
public void testGetSnapshotState() {
final SnapshotId snapshotId = new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID());
final SnapshotState state = randomFrom(SnapshotState.values());
final RepositoryData repositoryData = RepositoryData.EMPTY.addSnapshot(
snapshotId,
new RepositoryData.SnapshotDetails(
state,
randomFrom(IndexVersion.current(), IndexVersions.MINIMUM_COMPATIBLE),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomAlphaOfLength(10)
),
UpdatedShardGenerations.EMPTY,
Collections.emptyMap(),
Collections.emptyMap()
);
assertEquals(state, repositoryData.getSnapshotState(snapshotId));
assertNull(repositoryData.getSnapshotState(new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID())));
}
public void testIndexThatReferencesAnUnknownSnapshot() throws IOException {
final XContent xContent = randomFrom(XContentType.values()).xContent();
final RepositoryData repositoryData = generateRandomRepoData().withClusterUuid(UUIDs.randomBase64UUID(random()));
XContentBuilder builder = XContentBuilder.builder(xContent);
repositoryData.snapshotsToXContent(builder, IndexVersion.current());
RepositoryData parsedRepositoryData;
try (XContentParser xParser = createParser(builder)) {
parsedRepositoryData = RepositoryData.snapshotsFromXContent(xParser, repositoryData.getGenId(), randomBoolean());
}
assertEquals(repositoryData, parsedRepositoryData);
Map<String, SnapshotId> snapshotIds = new HashMap<>();
Map<String, RepositoryData.SnapshotDetails> snapshotsDetails = new HashMap<>();
for (SnapshotId snapshotId : parsedRepositoryData.getSnapshotIds()) {
snapshotIds.put(snapshotId.getUUID(), snapshotId);
snapshotsDetails.put(
snapshotId.getUUID(),
new RepositoryData.SnapshotDetails(
parsedRepositoryData.getSnapshotState(snapshotId),
parsedRepositoryData.getVersion(snapshotId),
parsedRepositoryData.getSnapshotDetails(snapshotId).getStartTimeMillis(),
parsedRepositoryData.getSnapshotDetails(snapshotId).getEndTimeMillis(),
randomAlphaOfLength(10)
)
);
}
final IndexId corruptedIndexId = randomFrom(parsedRepositoryData.getIndices().values());
Map<IndexId, List<SnapshotId>> indexSnapshots = new HashMap<>();
final ShardGenerations.Builder shardGenBuilder = ShardGenerations.builder();
for (Map.Entry<String, IndexId> snapshottedIndex : parsedRepositoryData.getIndices().entrySet()) {
IndexId indexId = snapshottedIndex.getValue();
List<SnapshotId> snapshotsIds = new ArrayList<>(parsedRepositoryData.getSnapshots(indexId));
if (corruptedIndexId.equals(indexId)) {
snapshotsIds.add(new SnapshotId("_uuid", "_does_not_exist"));
}
indexSnapshots.put(indexId, snapshotsIds);
final int shardCount = randomIntBetween(1, 10);
for (int i = 0; i < shardCount; ++i) {
shardGenBuilder.put(indexId, i, ShardGeneration.newGeneration(random()));
}
}
assertNotNull(corruptedIndexId);
RepositoryData corruptedRepositoryData = new RepositoryData(
parsedRepositoryData.getUuid(),
parsedRepositoryData.getGenId(),
snapshotIds,
snapshotsDetails,
indexSnapshots,
shardGenBuilder.build(),
IndexMetaDataGenerations.EMPTY,
UUIDs.randomBase64UUID(random())
);
final XContentBuilder corruptedBuilder = XContentBuilder.builder(xContent);
corruptedRepositoryData.snapshotsToXContent(corruptedBuilder, IndexVersion.current());
try (XContentParser xParser = createParser(corruptedBuilder)) {
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> RepositoryData.snapshotsFromXContent(xParser, corruptedRepositoryData.getGenId(), randomBoolean())
);
assertThat(
e.getMessage(),
equalTo(
"Detected a corrupted repository, index "
+ corruptedIndexId
+ " references an unknown "
+ "snapshot uuid [_does_not_exist]"
)
);
}
}
public void testIndexThatReferenceANullSnapshot() throws IOException {
final XContentBuilder builder = XContentBuilder.builder(randomFrom(XContentType.JSON).xContent());
builder.startObject();
{
builder.startArray("snapshots");
builder.value(new SnapshotId("_name", "_uuid"));
builder.endArray();
builder.startObject("indices");
{
builder.startObject("docs");
{
builder.field("id", "_id");
builder.startArray("snapshots");
{
builder.startObject();
if (randomBoolean()) {
builder.field("name", "_name");
}
builder.endObject();
}
builder.endArray();
}
builder.endObject();
}
builder.endObject();
}
builder.endObject();
try (XContentParser xParser = createParser(builder)) {
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> RepositoryData.snapshotsFromXContent(xParser, randomNonNegativeLong(), randomBoolean())
);
assertThat(
e.getMessage(),
equalTo("Detected a corrupted repository, " + "index [docs/_id] references an unknown snapshot uuid [null]")
);
}
}
// Test removing snapshot from random data where no two snapshots share any index metadata blobs
public void testIndexMetaDataToRemoveAfterRemovingSnapshotNoSharing() {
final RepositoryData repositoryData = generateRandomRepoData();
final SnapshotId snapshotId = randomFrom(repositoryData.getSnapshotIds());
final IndexMetaDataGenerations indexMetaDataGenerations = repositoryData.indexMetaDataGenerations();
final Collection<IndexId> indicesToUpdate = getIndicesToUpdateAfterRemovingSnapshot(repositoryData, snapshotId);
final Map<IndexId, Collection<String>> identifiersToRemove = indexMetaDataGenerations.lookup.get(snapshotId)
.entrySet()
.stream()
.filter(e -> indicesToUpdate.contains(e.getKey()))
.collect(
Collectors.toMap(Map.Entry::getKey, e -> Collections.singleton(indexMetaDataGenerations.getIndexMetaBlobId(e.getValue())))
);
assertEquals(repositoryData.indexMetaDataToRemoveAfterRemovingSnapshots(Collections.singleton(snapshotId)), identifiersToRemove);
}
// Test removing snapshot from random data that has some or all index metadata shared
public void testIndexMetaDataToRemoveAfterRemovingSnapshotWithSharing() {
final RepositoryData repositoryData = generateRandomRepoData();
final ShardGenerations.Builder builder = ShardGenerations.builder();
final SnapshotId otherSnapshotId = randomFrom(repositoryData.getSnapshotIds());
final Collection<IndexId> indicesInOther = repositoryData.getIndices()
.values()
.stream()
.filter(index -> repositoryData.getSnapshots(index).contains(otherSnapshotId))
.collect(Collectors.toSet());
for (IndexId indexId : indicesInOther) {
builder.put(indexId, 0, ShardGeneration.newGeneration(random()));
}
final Map<IndexId, String> newIndices = new HashMap<>();
final Map<String, String> newIdentifiers = new HashMap<>();
final Map<IndexId, Collection<String>> removeFromOther = new HashMap<>();
for (IndexId indexId : randomSubsetOf(repositoryData.getIndices().values())) {
if (indicesInOther.contains(indexId)) {
removeFromOther.put(
indexId,
Collections.singleton(repositoryData.indexMetaDataGenerations().indexMetaBlobId(otherSnapshotId, indexId))
);
}
final String identifier = randomAlphaOfLength(20);
newIndices.put(indexId, identifier);
newIdentifiers.put(identifier, UUIDs.randomBase64UUID(random()));
builder.put(indexId, 0, ShardGeneration.newGeneration(random()));
}
final ShardGenerations shardGenerations = builder.build();
final Map<IndexId, String> indexLookup = new HashMap<>(repositoryData.indexMetaDataGenerations().lookup.get(otherSnapshotId));
indexLookup.putAll(newIndices);
final SnapshotId newSnapshot = new SnapshotId(randomAlphaOfLength(7), UUIDs.randomBase64UUID(random()));
final RepositoryData.SnapshotDetails details = new RepositoryData.SnapshotDetails(
SnapshotState.SUCCESS,
IndexVersion.current(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomAlphaOfLength(10)
);
final RepositoryData newRepoData = repositoryData.addSnapshot(
newSnapshot,
details,
new UpdatedShardGenerations(shardGenerations, ShardGenerations.EMPTY),
indexLookup,
newIdentifiers
);
assertEquals(
newRepoData.indexMetaDataToRemoveAfterRemovingSnapshots(Collections.singleton(newSnapshot)),
newIndices.entrySet()
.stream()
.collect(Collectors.toMap(Map.Entry::getKey, e -> Collections.singleton(newIdentifiers.get(e.getValue()))))
);
assertEquals(newRepoData.indexMetaDataToRemoveAfterRemovingSnapshots(Collections.singleton(otherSnapshotId)), removeFromOther);
}
public void testFailsIfMinVersionNotSatisfied() throws IOException {
final String futureVersion = Version.fromId(IndexVersion.current().id() + 1_000_000).toString();
final XContentBuilder builder = XContentBuilder.builder(randomFrom(XContentType.JSON).xContent());
builder.startObject();
{
builder.field("min_version", futureVersion);
builder.field("junk", "should not get this far");
}
builder.endObject();
try (XContentParser xParser = createParser(builder)) {
IllegalStateException e = expectThrows(
IllegalStateException.class,
() -> RepositoryData.snapshotsFromXContent(xParser, randomNonNegativeLong(), randomBoolean())
);
assertThat(
e.getMessage(),
equalTo("this snapshot repository format requires Elasticsearch version [" + futureVersion + "] or later")
);
}
}
public void testToString() {
final var repositoryData = generateRandomRepoData();
assertThat(
repositoryData.toString(),
allOf(
containsString("RepositoryData"),
containsString(repositoryData.getUuid()),
containsString(Long.toString(repositoryData.getGenId())),
not(containsString("@")) // not the default Object#toString which does a very expensive hashcode computation
)
);
}
public static RepositoryData generateRandomRepoData() {
final int numIndices = randomIntBetween(1, 30);
final List<IndexId> indices = new ArrayList<>(numIndices);
for (int i = 0; i < numIndices; i++) {
indices.add(new IndexId(randomAlphaOfLength(8), UUIDs.randomBase64UUID()));
}
final int numSnapshots = randomIntBetween(1, 30);
RepositoryData repositoryData = RepositoryData.EMPTY;
for (int i = 0; i < numSnapshots; i++) {
final SnapshotId snapshotId = new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID());
final List<IndexId> someIndices = indices.subList(0, randomIntBetween(1, numIndices));
final ShardGenerations.Builder builder = ShardGenerations.builder();
for (IndexId someIndex : someIndices) {
final int shardCount = randomIntBetween(1, 10);
for (int j = 0; j < shardCount; ++j) {
final ShardGeneration shardGeneration = randomBoolean() ? null : ShardGeneration.newGeneration(random());
builder.put(someIndex, j, shardGeneration);
}
}
final Map<IndexId, String> indexLookup = someIndices.stream()
.collect(Collectors.toMap(Function.identity(), ind -> randomAlphaOfLength(256)));
repositoryData = repositoryData.addSnapshot(
snapshotId,
new RepositoryData.SnapshotDetails(
randomFrom(SnapshotState.values()),
randomFrom(IndexVersion.current(), IndexVersions.MINIMUM_COMPATIBLE),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomAlphaOfLength(10)
),
new UpdatedShardGenerations(builder.build(), ShardGenerations.EMPTY),
indexLookup,
indexLookup.values().stream().collect(Collectors.toMap(Function.identity(), ignored -> UUIDs.randomBase64UUID(random())))
);
}
return repositoryData;
}
private static Map<IndexId, List<SnapshotId>> randomIndices(final Map<String, SnapshotId> snapshotIdsMap) {
final List<SnapshotId> snapshotIds = new ArrayList<>(snapshotIdsMap.values());
final int totalSnapshots = snapshotIds.size();
final int numIndices = randomIntBetween(1, 30);
final Map<IndexId, List<SnapshotId>> indices = Maps.newMapWithExpectedSize(numIndices);
for (int i = 0; i < numIndices; i++) {
final IndexId indexId = new IndexId(randomAlphaOfLength(8), UUIDs.randomBase64UUID());
final Set<SnapshotId> indexSnapshots = new LinkedHashSet<>();
final int numIndicesForSnapshot = randomIntBetween(1, numIndices);
for (int j = 0; j < numIndicesForSnapshot; j++) {
indexSnapshots.add(snapshotIds.get(randomIntBetween(0, totalSnapshots - 1)));
}
indices.put(indexId, List.copyOf(indexSnapshots));
}
return indices;
}
private static Collection<IndexId> getIndicesToUpdateAfterRemovingSnapshot(RepositoryData repositoryData, SnapshotId snapshotToDelete) {
final var result = new ArrayList<IndexId>();
repositoryData.indicesToUpdateAfterRemovingSnapshot(List.of(snapshotToDelete)).forEachRemaining(result::add);
return result;
}
}
| RepositoryDataTests |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/OpenFileSupport.java | {
"start": 12701,
"end": 17295
} | class ____ {
/** File status; may be null. */
private S3AFileStatus status;
/** Active input policy. */
private S3AInputPolicy inputPolicy;
/** Change detection policy. */
private ChangeDetectionPolicy changePolicy;
/** Read ahead range. */
private long readAheadRange;
/** Buffer size. Currently ignored. */
private int bufferSize;
/**
* Where does the read start from. 0 unless known.
*/
private long splitStart;
/**
* What is the split end?
* Negative if not known.
*/
private long splitEnd = -1;
/**
* What is the file length?
* Negative if not known.
*/
private long fileLength = -1;
/**
* Threshold for stream reads to switch to
* asynchronous draining.
*/
private long asyncDrainThreshold;
/**
* Constructor.
*/
public OpenFileInformation() {
}
/**
* Build.
* @return this object
*/
public OpenFileInformation build() {
return this;
}
public S3AFileStatus getStatus() {
return status;
}
public S3AInputPolicy getInputPolicy() {
return inputPolicy;
}
public ChangeDetectionPolicy getChangePolicy() {
return changePolicy;
}
public long getReadAheadRange() {
return readAheadRange;
}
public int getBufferSize() {
return bufferSize;
}
public long getSplitStart() {
return splitStart;
}
public long getSplitEnd() {
return splitEnd;
}
@Override
public String toString() {
return "OpenFileInformation{" +
"status=" + status +
", inputPolicy=" + inputPolicy +
", changePolicy=" + changePolicy +
", readAheadRange=" + readAheadRange +
", splitStart=" + splitStart +
", splitEnd=" + splitEnd +
", bufferSize=" + bufferSize +
", drainThreshold=" + asyncDrainThreshold +
'}';
}
/**
* Get the file length.
* @return the file length; -1 if not known.
*/
public long getFileLength() {
return fileLength;
}
/**
* Set builder value.
* @param value new value
* @return the builder
*/
public OpenFileInformation withStatus(final S3AFileStatus value) {
status = value;
return this;
}
/**
* Set builder value.
* @param value new value
* @return the builder
*/
public OpenFileInformation withInputPolicy(final S3AInputPolicy value) {
inputPolicy = value;
return this;
}
/**
* Set builder value.
* @param value new value
* @return the builder
*/
public OpenFileInformation withChangePolicy(final ChangeDetectionPolicy value) {
changePolicy = value;
return this;
}
/**
* Set builder value.
* @param value new value
* @return the builder
*/
public OpenFileInformation withReadAheadRange(final long value) {
readAheadRange = value;
return this;
}
/**
* Set builder value.
* @param value new value
* @return the builder
*/
public OpenFileInformation withBufferSize(final int value) {
bufferSize = value;
return this;
}
/**
* Set builder value.
* @param value new value
* @return the builder
*/
public OpenFileInformation withSplitStart(final long value) {
splitStart = value;
return this;
}
/**
* Set builder value.
* @param value new value
* @return the builder
*/
public OpenFileInformation withSplitEnd(final long value) {
splitEnd = value;
return this;
}
/**
* Set builder value.
* @param value new value
* @return the builder
*/
public OpenFileInformation withFileLength(final long value) {
fileLength = value;
return this;
}
/**
* Set builder value.
* @param value new value
* @return the builder
*/
public OpenFileInformation withAsyncDrainThreshold(final long value) {
asyncDrainThreshold = value;
return this;
}
/**
* Propagate the options to the operation context
* being built up.
* @param roc context
* @return the context
*/
public S3AReadOpContext applyOptions(S3AReadOpContext roc) {
return roc
.withInputPolicy(inputPolicy)
.withChangeDetectionPolicy(changePolicy)
.withAsyncDrainThreshold(asyncDrainThreshold)
.withReadahead(readAheadRange);
}
}
}
| OpenFileInformation |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/CheckpointIDCounterTestBase.java | {
"start": 5990,
"end": 7006
} | class ____ implements Callable<List<Long>> {
/** Total number of {@link CheckpointIDCounter#getAndIncrement()} calls. */
private static final int NumIncrements = 128;
private final CountDownLatch startLatch;
private final CheckpointIDCounter counter;
public Incrementer(CountDownLatch startLatch, CheckpointIDCounter counter) {
this.startLatch = startLatch;
this.counter = counter;
}
@Override
public List<Long> call() throws Exception {
final Random rand = new Random();
final List<Long> counts = new ArrayList<>();
// Wait for the main thread to kick off execution
this.startLatch.await();
for (int i = 0; i < NumIncrements; i++) {
counts.add(counter.getAndIncrement());
// To get some "random" interleaving ;)
Thread.sleep(rand.nextInt(20));
}
return counts;
}
}
}
| Incrementer |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java | {
"start": 1888,
"end": 18206
} | class ____ implements ToXContentFragment, Writeable {
static final NodeFeature SOURCE_MODES_FEATURE = new NodeFeature("cluster.stats.source_modes");
private static final Pattern DOC_PATTERN = Pattern.compile("doc[\\[.]");
private static final Pattern SOURCE_PATTERN = Pattern.compile("params\\._source");
/**
* Create {@link MappingStats} from the given cluster state.
*/
public static MappingStats of(Metadata metadata, Runnable ensureNotCancelled) {
Map<String, FieldStats> fieldTypes = new HashMap<>();
Set<String> concreteFieldNames = new HashSet<>();
// Account different source modes based on index.mapping.source.mode setting:
Map<String, Integer> sourceModeUsageCount = new HashMap<>();
Map<String, RuntimeFieldStats> runtimeFieldTypes = new HashMap<>();
final int mappingCount = metadata.projects().values().stream().mapToInt(p -> p.getMappingsByHash().size()).sum();
final Map<MappingMetadata, Integer> mappingCounts = new IdentityHashMap<>(mappingCount);
final AtomicLong totalFieldCount = new AtomicLong();
final AtomicLong totalDeduplicatedFieldCount = new AtomicLong();
long totalMappingSizeBytes = 0L;
for (ProjectMetadata project : metadata.projects().values()) {
for (IndexMetadata indexMetadata : project) {
if (indexMetadata.isSystem()) {
// Don't include system indices in statistics about mappings,
// we care about the user's indices.
continue;
}
AnalysisStats.countMapping(mappingCounts, indexMetadata);
var sourceMode = IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexMetadata.getSettings());
sourceModeUsageCount.merge(sourceMode.toString().toLowerCase(Locale.ENGLISH), 1, Integer::sum);
}
for (MappingMetadata mappingMetadata : project.getMappingsByHash().values()) {
totalMappingSizeBytes += mappingMetadata.source().compressed().length;
}
}
for (Map.Entry<MappingMetadata, Integer> mappingAndCount : mappingCounts.entrySet()) {
ensureNotCancelled.run();
Set<String> indexFieldTypes = new HashSet<>();
Set<String> indexRuntimeFieldTypes = new HashSet<>();
final int count = mappingAndCount.getValue();
final Map<String, Object> map = mappingAndCount.getKey().getSourceAsMap();
MappingVisitor.visitMapping(map, (field, fieldMapping) -> {
concreteFieldNames.add(field);
String type = null;
Object typeO = fieldMapping.get("type");
if (typeO != null) {
type = typeO.toString();
} else if (fieldMapping.containsKey("properties")) {
type = "object";
}
if (type != null) {
totalDeduplicatedFieldCount.incrementAndGet();
totalFieldCount.addAndGet(count);
FieldStats stats;
if (type.equals("dense_vector")) {
stats = fieldTypes.computeIfAbsent(type, DenseVectorFieldStats::new);
DenseVectorFieldStats vStats = (DenseVectorFieldStats) stats;
if (fieldMapping.containsKey("similarity")) {
Object similarity = fieldMapping.get("similarity");
vStats.vectorSimilarityTypeCount.compute(similarity.toString(), (t, c) -> c == null ? count : c + count);
}
String elementTypeStr = "float";
if (fieldMapping.containsKey("element_type")) {
Object elementType = fieldMapping.get("element_type");
elementTypeStr = elementType.toString();
}
vStats.vectorElementTypeCount.compute(elementTypeStr, (t, c) -> c == null ? count : c + count);
boolean indexed = fieldMapping.containsKey("index") && (boolean) fieldMapping.get("index");
if (indexed) {
Object indexOptions = fieldMapping.get("index_options");
// NOTE, while the default for `float` is now `int8_hnsw`, that is actually added to the mapping
// if the value is truly missing & we are indexed, we default to hnsw.
String indexTypeStr = "hnsw";
if (indexOptions instanceof Map<?, ?> indexOptionsMap) {
Object indexType = indexOptionsMap.get("type");
if (indexType != null) {
indexTypeStr = indexType.toString();
}
}
vStats.vectorIndexTypeCount.compute(indexTypeStr, (t, c) -> c == null ? count : c + count);
vStats.indexedVectorCount += count;
Object obj = fieldMapping.get("dims");
if (obj != null) {
int dims = (int) obj;
if (vStats.indexedVectorDimMin == DenseVectorFieldStats.UNSET || dims < vStats.indexedVectorDimMin) {
vStats.indexedVectorDimMin = dims;
}
if (vStats.indexedVectorDimMin == DenseVectorFieldStats.UNSET || dims > vStats.indexedVectorDimMax) {
vStats.indexedVectorDimMax = dims;
}
}
} else {
vStats.vectorIndexTypeCount.compute(DenseVectorFieldStats.NOT_INDEXED, (t, c) -> c == null ? 1 : c + 1);
}
} else {
stats = fieldTypes.computeIfAbsent(type, FieldStats::new);
}
stats.count += count;
if (indexFieldTypes.add(type)) {
stats.indexCount += count;
}
Object scriptObject = fieldMapping.get("script");
if (scriptObject instanceof Map<?, ?> script) {
Object sourceObject = script.get("source");
stats.scriptCount += count;
updateScriptParams(sourceObject, stats.fieldScriptStats, count);
Object langObject = script.get("lang");
if (langObject != null) {
stats.scriptLangs.add(langObject.toString());
}
}
}
});
MappingVisitor.visitRuntimeMapping(map, (field, fieldMapping) -> {
Object typeObject = fieldMapping.get("type");
if (typeObject == null) {
return;
}
String type = typeObject.toString();
RuntimeFieldStats stats = runtimeFieldTypes.computeIfAbsent(type, RuntimeFieldStats::new);
stats.count += count;
if (indexRuntimeFieldTypes.add(type)) {
stats.indexCount += count;
}
if (concreteFieldNames.contains(field)) {
stats.shadowedCount += count;
}
Object scriptObject = fieldMapping.get("script");
if (scriptObject == null) {
stats.scriptLessCount += count;
} else if (scriptObject instanceof Map<?, ?> script) {
Object sourceObject = script.get("source");
updateScriptParams(sourceObject, stats.fieldScriptStats, count);
Object langObject = script.get("lang");
if (langObject != null) {
stats.scriptLangs.add(langObject.toString());
}
}
});
}
return new MappingStats(
totalFieldCount.get(),
totalDeduplicatedFieldCount.get(),
totalMappingSizeBytes,
fieldTypes.values(),
runtimeFieldTypes.values(),
sourceModeUsageCount
);
}
private static void updateScriptParams(Object scriptSourceObject, FieldScriptStats scriptStats, int multiplier) {
if (scriptSourceObject != null) {
String scriptSource = scriptSourceObject.toString();
int chars = scriptSource.length();
long lines = scriptSource.lines().count();
int docUsages = countOccurrences(scriptSource, DOC_PATTERN);
int sourceUsages = countOccurrences(scriptSource, SOURCE_PATTERN);
scriptStats.update(chars, lines, sourceUsages, docUsages, multiplier);
}
}
private static int countOccurrences(String script, Pattern pattern) {
int occurrences = 0;
Matcher matcher = pattern.matcher(script);
while (matcher.find()) {
occurrences++;
}
return occurrences;
}
@Nullable // for BwC
private final Long totalFieldCount;
@Nullable // for BwC
private final Long totalDeduplicatedFieldCount;
@Nullable // for BwC
private final Long totalMappingSizeBytes;
private final List<FieldStats> fieldTypeStats;
private final List<RuntimeFieldStats> runtimeFieldStats;
private final Map<String, Integer> sourceModeUsageCount;
MappingStats(
long totalFieldCount,
long totalDeduplicatedFieldCount,
long totalMappingSizeBytes,
Collection<FieldStats> fieldTypeStats,
Collection<RuntimeFieldStats> runtimeFieldStats,
Map<String, Integer> sourceModeUsageCount
) {
this.totalFieldCount = totalFieldCount;
this.totalDeduplicatedFieldCount = totalDeduplicatedFieldCount;
this.totalMappingSizeBytes = totalMappingSizeBytes;
this.sourceModeUsageCount = sourceModeUsageCount;
List<FieldStats> stats = new ArrayList<>(fieldTypeStats);
stats.sort(Comparator.comparing(IndexFeatureStats::getName));
this.fieldTypeStats = Collections.unmodifiableList(stats);
List<RuntimeFieldStats> runtimeStats = new ArrayList<>(runtimeFieldStats);
runtimeStats.sort(Comparator.comparing(RuntimeFieldStats::type));
this.runtimeFieldStats = Collections.unmodifiableList(runtimeStats);
}
MappingStats(StreamInput in) throws IOException {
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
totalFieldCount = in.readOptionalVLong();
totalDeduplicatedFieldCount = in.readOptionalVLong();
totalMappingSizeBytes = in.readOptionalVLong();
} else {
totalFieldCount = null;
totalDeduplicatedFieldCount = null;
totalMappingSizeBytes = null;
}
fieldTypeStats = in.readCollectionAsImmutableList(FieldStats::new);
runtimeFieldStats = in.readCollectionAsImmutableList(RuntimeFieldStats::new);
var transportVersion = in.getTransportVersion();
sourceModeUsageCount = canReadOrWriteSourceModeTelemetry(transportVersion)
? in.readImmutableMap(StreamInput::readString, StreamInput::readVInt)
: Map.of();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
out.writeOptionalVLong(totalFieldCount);
out.writeOptionalVLong(totalDeduplicatedFieldCount);
out.writeOptionalVLong(totalMappingSizeBytes);
}
out.writeCollection(fieldTypeStats);
out.writeCollection(runtimeFieldStats);
var transportVersion = out.getTransportVersion();
if (canReadOrWriteSourceModeTelemetry(transportVersion)) {
out.writeMap(sourceModeUsageCount, StreamOutput::writeVInt);
}
}
private static boolean canReadOrWriteSourceModeTelemetry(TransportVersion version) {
return version.isPatchFrom(TransportVersions.V_8_17_0) || version.supports(TransportVersions.V_8_18_0);
}
private static OptionalLong ofNullable(Long l) {
return l == null ? OptionalLong.empty() : OptionalLong.of(l);
}
/**
* @return the total number of fields (in non-system indices), or {@link OptionalLong#empty()} if omitted (due to BwC)
*/
public OptionalLong getTotalFieldCount() {
return ofNullable(totalFieldCount);
}
/**
* @return the total number of fields (in non-system indices) accounting for deduplication, or {@link OptionalLong#empty()} if omitted
* (due to BwC)
*/
public OptionalLong getTotalDeduplicatedFieldCount() {
return ofNullable(totalDeduplicatedFieldCount);
}
/**
* @return the total size of all mappings (including those for system indices) accounting for deduplication and compression, or {@link
* OptionalLong#empty()} if omitted (due to BwC).
*/
public OptionalLong getTotalMappingSizeBytes() {
return ofNullable(totalMappingSizeBytes);
}
/**
* Return stats about field types.
*/
public List<FieldStats> getFieldTypeStats() {
return fieldTypeStats;
}
/**
* Return stats about runtime field types.
*/
public List<RuntimeFieldStats> getRuntimeFieldStats() {
return runtimeFieldStats;
}
public Map<String, Integer> getSourceModeUsageCount() {
return sourceModeUsageCount;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("mappings");
if (totalFieldCount != null) {
builder.field("total_field_count", totalFieldCount);
}
if (totalDeduplicatedFieldCount != null) {
builder.field("total_deduplicated_field_count", totalDeduplicatedFieldCount);
}
if (totalMappingSizeBytes != null) {
builder.humanReadableField(
"total_deduplicated_mapping_size_in_bytes",
"total_deduplicated_mapping_size",
ByteSizeValue.ofBytes(totalMappingSizeBytes)
);
}
builder.startArray("field_types");
for (IndexFeatureStats st : fieldTypeStats) {
st.toXContent(builder, params);
}
builder.endArray();
builder.startArray("runtime_field_types");
for (RuntimeFieldStats st : runtimeFieldStats) {
st.toXContent(builder, params);
}
builder.endArray();
builder.startObject("source_modes");
var entries = sourceModeUsageCount.entrySet().stream().sorted(Map.Entry.comparingByKey()).toList();
for (var entry : entries) {
builder.field(entry.getKey(), entry.getValue());
}
builder.endObject();
builder.endObject();
return builder;
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MappingStats that = (MappingStats) o;
return Objects.equals(totalFieldCount, that.totalFieldCount)
&& Objects.equals(totalDeduplicatedFieldCount, that.totalDeduplicatedFieldCount)
&& Objects.equals(totalMappingSizeBytes, that.totalMappingSizeBytes)
&& fieldTypeStats.equals(that.fieldTypeStats)
&& runtimeFieldStats.equals(that.runtimeFieldStats)
&& sourceModeUsageCount.equals(that.sourceModeUsageCount);
}
@Override
public int hashCode() {
return Objects.hash(
totalFieldCount,
totalDeduplicatedFieldCount,
totalMappingSizeBytes,
fieldTypeStats,
runtimeFieldStats,
sourceModeUsageCount
);
}
}
| MappingStats |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/monitor/fs/FsProbeTests.java | {
"start": 1489,
"end": 13157
} | class ____ extends ESTestCase {
public void testFsInfo() throws IOException {
try (NodeEnvironment env = newNodeEnvironment()) {
FsProbe probe = new FsProbe(env);
FsInfo stats = probe.stats(null);
assertNotNull(stats);
assertThat(stats.getTimestamp(), greaterThan(0L));
if (Constants.LINUX) {
assertNotNull(stats.getIoStats());
assertNotNull(stats.getIoStats().devicesStats);
for (int i = 0; i < stats.getIoStats().devicesStats.length; i++) {
final FsInfo.DeviceStats deviceStats = stats.getIoStats().devicesStats[i];
assertNotNull(deviceStats);
assertThat(deviceStats.currentReadsCompleted, greaterThanOrEqualTo(0L));
assertThat(deviceStats.previousReadsCompleted, equalTo(-1L));
assertThat(deviceStats.currentSectorsRead, greaterThanOrEqualTo(0L));
assertThat(deviceStats.previousSectorsRead, equalTo(-1L));
assertThat(deviceStats.currentWritesCompleted, greaterThanOrEqualTo(0L));
assertThat(deviceStats.previousWritesCompleted, equalTo(-1L));
assertThat(deviceStats.currentSectorsWritten, greaterThanOrEqualTo(0L));
assertThat(deviceStats.previousSectorsWritten, equalTo(-1L));
assertThat(deviceStats.currentIOTime, greaterThanOrEqualTo(0L));
assertThat(deviceStats.previousIOTime, equalTo(-1L));
}
} else {
assertNull(stats.getIoStats());
}
FsInfo.Path total = stats.getTotal();
assertNotNull(total);
assertThat(total.total, greaterThan(0L));
assertThat(total.free, greaterThan(0L));
assertThat(total.available, greaterThan(0L));
for (FsInfo.Path path : stats) {
assertNotNull(path);
assertThat(path.getPath(), is(not(emptyOrNullString())));
assertThat(path.getMount(), is(not(emptyOrNullString())));
assertThat(path.getType(), is(not(emptyOrNullString())));
assertThat(path.total, greaterThan(0L));
assertThat(path.free, greaterThan(0L));
assertThat(path.available, greaterThan(0L));
}
}
}
public void testFsInfoOverflow() throws Exception {
final FsInfo.Path pathStats = new FsInfo.Path(
"/foo/bar",
null,
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong()
);
addUntilOverflow(pathStats, p -> p.total, "total", () -> new FsInfo.Path("/foo/baz", null, randomNonNegativeLong(), 0, 0));
addUntilOverflow(pathStats, p -> p.free, "free", () -> new FsInfo.Path("/foo/baz", null, 0, randomNonNegativeLong(), 0));
addUntilOverflow(pathStats, p -> p.available, "available", () -> new FsInfo.Path("/foo/baz", null, 0, 0, randomNonNegativeLong()));
// even after overflowing these should not be negative
assertThat(pathStats.total, greaterThan(0L));
assertThat(pathStats.free, greaterThan(0L));
assertThat(pathStats.available, greaterThan(0L));
}
private void addUntilOverflow(
final FsInfo.Path pathStats,
final Function<FsInfo.Path, Long> getter,
final String field,
final Supplier<FsInfo.Path> supplier
) {
FsInfo.Path pathToAdd = supplier.get();
while ((getter.apply(pathStats) + getter.apply(pathToAdd)) > 0) {
// add a path to increase the total bytes until it overflows
logger.info(
"--> adding {} bytes to {}, {} will be: {}",
getter.apply(pathToAdd),
getter.apply(pathStats),
field,
getter.apply(pathStats) + getter.apply(pathToAdd)
);
pathStats.add(pathToAdd);
pathToAdd = supplier.get();
}
// this overflows
logger.info(
"--> adding {} bytes to {}, {} will be: {}",
getter.apply(pathToAdd),
getter.apply(pathStats),
field,
getter.apply(pathStats) + getter.apply(pathToAdd)
);
assertThat(getter.apply(pathStats) + getter.apply(pathToAdd), lessThan(0L));
pathStats.add(pathToAdd);
}
public void testIoStats() {
final AtomicReference<List<String>> diskStats = new AtomicReference<>();
diskStats.set(
Arrays.asList(
" 259 0 nvme0n1 336609 0 7923613 82813 10264051 0 182983933 52451441 0 2970886 52536260",
" 259 1 nvme0n1p1 602 0 9919 131 1 0 1 0 0 19 131",
" 259 2 nvme0n1p2 186 0 8626 18 24 0 60 20 0 34 38",
" 259 3 nvme0n1p3 335733 0 7901620 82658 9592875 0 182983872 50843431 0 1737726 50926087",
" 253 0 dm-0 287716 0 7184666 33457 8398869 0 118857776 18730966 0 1918440 18767169",
" 253 1 dm-1 112 0 4624 13 0 0 0 0 0 5 13",
" 253 2 dm-2 47802 0 710658 49312 1371977 0 64126096 33730596 0 1058193 33781827"
)
);
final FsProbe probe = new FsProbe(null) {
@Override
List<String> readProcDiskStats() throws IOException {
return diskStats.get();
}
};
final Set<Tuple<Integer, Integer>> devicesNumbers = new HashSet<>();
devicesNumbers.add(Tuple.tuple(253, 0));
devicesNumbers.add(Tuple.tuple(253, 2));
final FsInfo.IoStats first = probe.ioStats(devicesNumbers, null);
assertNotNull(first);
assertThat(first.devicesStats[0].majorDeviceNumber, equalTo(253));
assertThat(first.devicesStats[0].minorDeviceNumber, equalTo(0));
assertThat(first.devicesStats[0].deviceName, equalTo("dm-0"));
assertThat(first.devicesStats[0].currentReadsCompleted, equalTo(287716L));
assertThat(first.devicesStats[0].previousReadsCompleted, equalTo(-1L));
assertThat(first.devicesStats[0].currentSectorsRead, equalTo(7184666L));
assertThat(first.devicesStats[0].previousSectorsRead, equalTo(-1L));
assertThat(first.devicesStats[0].currentWritesCompleted, equalTo(8398869L));
assertThat(first.devicesStats[0].previousWritesCompleted, equalTo(-1L));
assertThat(first.devicesStats[0].currentSectorsWritten, equalTo(118857776L));
assertThat(first.devicesStats[0].previousSectorsWritten, equalTo(-1L));
assertThat(first.devicesStats[0].currentIOTime, equalTo(1918440L));
assertThat(first.devicesStats[0].previousIOTime, equalTo(-1L));
assertThat(first.devicesStats[1].majorDeviceNumber, equalTo(253));
assertThat(first.devicesStats[1].minorDeviceNumber, equalTo(2));
assertThat(first.devicesStats[1].deviceName, equalTo("dm-2"));
assertThat(first.devicesStats[1].currentReadsCompleted, equalTo(47802L));
assertThat(first.devicesStats[1].previousReadsCompleted, equalTo(-1L));
assertThat(first.devicesStats[1].currentSectorsRead, equalTo(710658L));
assertThat(first.devicesStats[1].previousSectorsRead, equalTo(-1L));
assertThat(first.devicesStats[1].currentWritesCompleted, equalTo(1371977L));
assertThat(first.devicesStats[1].previousWritesCompleted, equalTo(-1L));
assertThat(first.devicesStats[1].currentSectorsWritten, equalTo(64126096L));
assertThat(first.devicesStats[1].previousSectorsWritten, equalTo(-1L));
assertThat(first.devicesStats[1].currentIOTime, equalTo(1058193L));
assertThat(first.devicesStats[1].previousIOTime, equalTo(-1L));
diskStats.set(
Arrays.asList(
" 259 0 nvme0n1 336870 0 7928397 82876 10264393 0 182986405 52451610 0 2971042 52536492",
" 259 1 nvme0n1p1 602 0 9919 131 1 0 1 0 0 19 131",
" 259 2 nvme0n1p2 186 0 8626 18 24 0 60 20 0 34 38",
" 259 3 nvme0n1p3 335994 0 7906404 82721 9593184 0 182986344 50843529 0 1737840 50926248",
" 253 0 dm-0 287734 0 7185242 33464 8398869 0 118857776 18730966 0 1918444 18767176",
" 253 1 dm-1 112 0 4624 13 0 0 0 0 0 5 13",
" 253 2 dm-2 48045 0 714866 49369 1372291 0 64128568 33730766 0 1058347 33782056"
)
);
final FsInfo previous = new FsInfo(System.currentTimeMillis(), first, new FsInfo.Path[0]);
final FsInfo.IoStats second = probe.ioStats(devicesNumbers, previous);
assertNotNull(second);
assertThat(second.devicesStats[0].majorDeviceNumber, equalTo(253));
assertThat(second.devicesStats[0].minorDeviceNumber, equalTo(0));
assertThat(second.devicesStats[0].deviceName, equalTo("dm-0"));
assertThat(second.devicesStats[0].currentReadsCompleted, equalTo(287734L));
assertThat(second.devicesStats[0].previousReadsCompleted, equalTo(287716L));
assertThat(second.devicesStats[0].currentSectorsRead, equalTo(7185242L));
assertThat(second.devicesStats[0].previousSectorsRead, equalTo(7184666L));
assertThat(second.devicesStats[0].currentWritesCompleted, equalTo(8398869L));
assertThat(second.devicesStats[0].previousWritesCompleted, equalTo(8398869L));
assertThat(second.devicesStats[0].currentSectorsWritten, equalTo(118857776L));
assertThat(second.devicesStats[0].previousSectorsWritten, equalTo(118857776L));
assertThat(second.devicesStats[0].currentIOTime, equalTo(1918444L));
assertThat(second.devicesStats[0].previousIOTime, equalTo(1918440L));
assertThat(second.devicesStats[1].majorDeviceNumber, equalTo(253));
assertThat(second.devicesStats[1].minorDeviceNumber, equalTo(2));
assertThat(second.devicesStats[1].deviceName, equalTo("dm-2"));
assertThat(second.devicesStats[1].currentReadsCompleted, equalTo(48045L));
assertThat(second.devicesStats[1].previousReadsCompleted, equalTo(47802L));
assertThat(second.devicesStats[1].currentSectorsRead, equalTo(714866L));
assertThat(second.devicesStats[1].previousSectorsRead, equalTo(710658L));
assertThat(second.devicesStats[1].currentWritesCompleted, equalTo(1372291L));
assertThat(second.devicesStats[1].previousWritesCompleted, equalTo(1371977L));
assertThat(second.devicesStats[1].currentSectorsWritten, equalTo(64128568L));
assertThat(second.devicesStats[1].previousSectorsWritten, equalTo(64126096L));
assertThat(second.devicesStats[1].currentIOTime, equalTo(1058347L));
assertThat(second.devicesStats[1].previousIOTime, equalTo(1058193L));
assertThat(second.totalOperations, equalTo(575L));
assertThat(second.totalReadOperations, equalTo(261L));
assertThat(second.totalWriteOperations, equalTo(314L));
assertThat(second.totalReadKilobytes, equalTo(2392L));
assertThat(second.totalWriteKilobytes, equalTo(1236L));
assertThat(second.totalIOTimeInMillis, equalTo(158L));
}
public void testAdjustForHugeFilesystems() throws Exception {
DataPath np = new FakeDataPath(createTempDir());
assertThat(FsProbe.getFSInfo(np).total, greaterThanOrEqualTo(0L));
assertThat(FsProbe.getFSInfo(np).free, greaterThanOrEqualTo(0L));
assertThat(FsProbe.getFSInfo(np).available, greaterThanOrEqualTo(0L));
}
static | FsProbeTests |
java | micronaut-projects__micronaut-core | inject-java/src/main/java/io/micronaut/annotation/processing/visitor/JavaEnumConstantElement.java | {
"start": 1405,
"end": 3452
} | enum ____
* @param nativeElement The native element
* @param annotationMetadataFactory The annotation metadata factory
* @param visitorContext The visitor context
*/
JavaEnumConstantElement(JavaEnumElement declaringEnum,
JavaNativeElement.Variable nativeElement,
ElementAnnotationMetadataFactory annotationMetadataFactory,
JavaVisitorContext visitorContext) {
super(nativeElement, annotationMetadataFactory, visitorContext);
this.declaringEnum = declaringEnum;
}
@Override
public JavaNativeElement.@NonNull Variable getNativeType() {
return (JavaNativeElement.Variable) super.getNativeType();
}
@Override
protected AbstractJavaElement copyThis() {
return new JavaEnumConstantElement(declaringEnum, getNativeType(), elementAnnotationMetadataFactory, visitorContext);
}
@Override
public FieldElement withAnnotationMetadata(AnnotationMetadata annotationMetadata) {
return (FieldElement) super.withAnnotationMetadata(annotationMetadata);
}
@Override
public ClassElement getDeclaringType() {
return declaringEnum;
}
@NonNull
@Override
public ClassElement getType() {
return declaringEnum;
}
@Override
public Set<ElementModifier> getModifiers() {
return ENUM_CONSTANT_MODIFIERS;
}
@Override
public boolean isPackagePrivate() {
return false;
}
@Override
public boolean isAbstract() {
return false;
}
@Override
public boolean isStatic() {
return true;
}
@Override
public boolean isPublic() {
return true;
}
@Override
public boolean isPrivate() {
return false;
}
@Override
public boolean isFinal() {
return true;
}
@Override
public boolean isProtected() {
return false;
}
@Override
public boolean isArray() {
return false;
}
}
| element |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/internals/ShareFetchMetricsManager.java | {
"start": 1070,
"end": 4444
} | class ____ implements AutoCloseable {
private final Metrics metrics;
private final Sensor throttleTime;
private final Sensor bytesFetched;
private final Sensor recordsFetched;
private final Sensor fetchLatency;
private final Sensor sentAcknowledgements;
private final Sensor failedAcknowledgements;
public ShareFetchMetricsManager(Metrics metrics, ShareFetchMetricsRegistry metricsRegistry) {
this.metrics = metrics;
this.bytesFetched = new SensorBuilder(metrics, "bytes-fetched")
.withAvg(metricsRegistry.fetchSizeAvg)
.withMax(metricsRegistry.fetchSizeMax)
.withMeter(metricsRegistry.bytesFetchedRate, metricsRegistry.bytesFetchedTotal)
.build();
this.recordsFetched = new SensorBuilder(metrics, "records-fetched")
.withAvg(metricsRegistry.recordsPerRequestAvg)
.withMax(metricsRegistry.recordsPerRequestMax)
.withMeter(metricsRegistry.recordsFetchedRate, metricsRegistry.recordsFetchedTotal)
.build();
this.sentAcknowledgements = new SensorBuilder(metrics, "sent-acknowledgements")
.withMeter(metricsRegistry.acknowledgementSendRate, metricsRegistry.acknowledgementSendTotal)
.build();
this.failedAcknowledgements = new SensorBuilder(metrics, "failed-acknowledgements")
.withMeter(metricsRegistry.acknowledgementErrorRate, metricsRegistry.acknowledgementErrorTotal)
.build();
this.fetchLatency = new SensorBuilder(metrics, "fetch-latency")
.withAvg(metricsRegistry.fetchLatencyAvg)
.withMax(metricsRegistry.fetchLatencyMax)
.withMeter(new WindowedCount(), metricsRegistry.fetchRequestRate, metricsRegistry.fetchRequestTotal)
.build();
this.throttleTime = new SensorBuilder(metrics, "fetch-throttle-time")
.withAvg(metricsRegistry.fetchThrottleTimeAvg)
.withMax(metricsRegistry.fetchThrottleTimeMax)
.build();
}
public Sensor throttleTimeSensor() {
return throttleTime;
}
void recordLatency(String node, long requestLatencyMs) {
fetchLatency.record(requestLatencyMs);
if (!node.isEmpty()) {
String nodeTimeName = "node-" + node + ".latency";
Sensor nodeRequestTime = metrics.getSensor(nodeTimeName);
if (nodeRequestTime != null)
nodeRequestTime.record(requestLatencyMs);
}
}
void recordBytesFetched(int bytes) {
bytesFetched.record(bytes);
}
void recordRecordsFetched(int records) {
recordsFetched.record(records);
}
void recordAcknowledgementSent(int acknowledgements) {
sentAcknowledgements.record(acknowledgements);
}
void recordFailedAcknowledgements(int acknowledgements) {
failedAcknowledgements.record(acknowledgements);
}
@Override
public void close() throws IOException {
Arrays.asList(
throttleTime.name(),
bytesFetched.name(),
recordsFetched.name(),
fetchLatency.name(),
sentAcknowledgements.name(),
failedAcknowledgements.name()
).forEach(metrics::removeSensor);
}
}
| ShareFetchMetricsManager |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/serde/LogicalTypeJsonSerdeTest.java | {
"start": 16383,
"end": 22178
} | class ____ only
StructuredType.newBuilder("NotInClassPathPojo")
.attributes(
Arrays.asList(
new StructuredAttribute("f0", new IntType(true)),
new StructuredAttribute("f1", new BigIntType(true)),
new StructuredAttribute(
"f2", new VarCharType(200), "desc")))
.build(),
// inline structured type with time indicator type
StructuredType.newBuilder(PojoClass.class.getName())
.attributes(
Arrays.asList(
new StructuredAttribute("f0", new IntType(true)),
new StructuredAttribute(
"f1",
new TimestampType(
false, TimestampKind.ROWTIME, 3))))
.build(),
// custom RawType
new RawType<>(LocalDateTime.class, LocalDateTimeSerializer.INSTANCE),
// external RawType
new RawType<>(
Row.class,
ExternalSerializer.of(
DataTypes.ROW(DataTypes.INT(), DataTypes.STRING()))),
new DescriptorType());
final List<LogicalType> mutableTypes = new ArrayList<>(types);
// RawType for MapView
addRawTypesForMapView(mutableTypes, new VarCharType(100), new VarCharType(100));
addRawTypesForMapView(mutableTypes, new VarCharType(100), new BigIntType());
addRawTypesForMapView(mutableTypes, new BigIntType(), new VarCharType(100));
addRawTypesForMapView(mutableTypes, new BigIntType(), new BigIntType());
// RawType for ListView
addRawTypesForListView(mutableTypes, new VarCharType(100));
addRawTypesForListView(mutableTypes, new BigIntType());
// RawType for custom MapView
mutableTypes.add(
DataViewUtils.adjustDataViews(
MapView.newMapViewDataType(
DataTypes.STRING().toInternal(),
DataTypes.STRING().bridgedTo(byte[].class)),
false)
.getLogicalType());
final List<LogicalType> allTypes = new ArrayList<>();
// consider nullable
for (LogicalType type : mutableTypes) {
allTypes.add(type.copy(true));
allTypes.add(type.copy(false));
}
// ignore nullable for NullType
allTypes.add(new NullType());
return allTypes;
}
private static void addRawTypesForMapView(
List<LogicalType> types, LogicalType keyType, LogicalType valueType) {
for (boolean hasStateBackedDataViews : Arrays.asList(true, false)) {
for (boolean keyNullable : Arrays.asList(true, false)) {
for (boolean isInternalKeyType : Arrays.asList(true, false)) {
for (boolean valueNullable : Arrays.asList(true, false)) {
for (boolean isInternalValueType : Arrays.asList(true, false)) {
final DataType viewDataType =
DataViewUtils.adjustDataViews(
MapView.newMapViewDataType(
convertToInternalTypeIfNeeded(
DataTypes.of(keyType.copy(keyNullable)),
isInternalKeyType),
convertToInternalTypeIfNeeded(
DataTypes.of(
valueType.copy(valueNullable)),
isInternalValueType)),
hasStateBackedDataViews);
types.add(viewDataType.getLogicalType());
}
}
}
}
}
}
private static void addRawTypesForListView(List<LogicalType> types, LogicalType elementType) {
for (boolean hasStateBackedDataViews : Arrays.asList(true, false)) {
for (boolean elementNullable : Arrays.asList(true, false)) {
for (boolean isInternalType : Arrays.asList(true, false)) {
final DataType viewDataType =
DataViewUtils.adjustDataViews(
ListView.newListViewDataType(
convertToInternalTypeIfNeeded(
DataTypes.of(elementType.copy(elementNullable)),
isInternalType)),
hasStateBackedDataViews);
types.add(viewDataType.getLogicalType());
}
}
}
}
private static DataType convertToInternalTypeIfNeeded(
DataType dataType, boolean isInternalType) {
return isInternalType ? dataType.toInternal() : dataType;
}
}
| name |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java | {
"start": 4000,
"end": 28720
} | class ____ extends ScrollDataExtractor {
private final Queue<Tuple<SearchResponse, ElasticsearchException>> responses = new LinkedList<>();
private int numScrollReset;
TestDataExtractor(long start, long end) {
this(createContext(start, end));
}
TestDataExtractor(ScrollDataExtractorContext context) {
super(client, context, timingStatsReporter);
}
@Override
protected InputStream initScroll(long startTimestamp) throws IOException {
initScrollStartTime = startTimestamp;
return super.initScroll(startTimestamp);
}
@Override
protected SearchResponse executeSearchRequest(ActionRequestBuilder<?, SearchResponse> searchRequestBuilder) {
capturedSearchRequests.add(searchRequestBuilder);
Tuple<SearchResponse, ElasticsearchException> responseOrException = responses.remove();
if (responseOrException.v2() != null) {
throw responseOrException.v2();
}
return responseOrException.v1();
}
@Override
protected SearchResponse executeSearchScrollRequest(String scrollId) {
capturedContinueScrollIds.add(scrollId);
Tuple<SearchResponse, ElasticsearchException> responseOrException = responses.remove();
if (responseOrException.v2() != null) {
throw responseOrException.v2();
}
return responseOrException.v1();
}
@Override
void markScrollAsErrored() {
++numScrollReset;
super.markScrollAsErrored();
}
int getNumScrollReset() {
return numScrollReset;
}
void setNextResponse(SearchResponse searchResponse) {
responses.add(Tuple.tuple(searchResponse, null));
}
void setNextResponseToError(ElasticsearchException ex) {
responses.add(Tuple.tuple(null, ex));
}
public long getInitScrollStartTime() {
return initScrollStartTime;
}
public Long getLastTimestamp() {
return lastTimestamp;
}
}
@Before
@SuppressWarnings("unchecked")
public void setUpTests() {
ThreadPool threadPool = mock(ThreadPool.class);
when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY));
client = mock(Client.class);
when(client.threadPool()).thenReturn(threadPool);
capturedSearchRequests = new ArrayList<>();
capturedContinueScrollIds = new ArrayList<>();
jobId = "test-job";
ExtractedField timeField = new TimeField("time", ExtractedField.Method.DOC_VALUE);
extractedFields = new TimeBasedExtractedFields(
timeField,
Arrays.asList(timeField, new DocValueField("field_1", Collections.singleton("keyword")))
);
indices = Arrays.asList("index-1", "index-2");
query = QueryBuilders.matchAllQuery();
scriptFields = Collections.emptyList();
scrollSize = 1000;
capturedClearScrollRequests = ArgumentCaptor.forClass(ClearScrollRequest.class);
when(client.execute(same(TransportClearScrollAction.TYPE), capturedClearScrollRequests.capture())).thenReturn(
mock(ActionFuture.class)
);
timingStatsReporter = new DatafeedTimingStatsReporter(new DatafeedTimingStats(jobId), mock(DatafeedTimingStatsPersister.class));
}
public void testSinglePageExtraction() throws IOException {
TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L);
SearchResponse response1 = createSearchResponse(Arrays.asList(1100L, 1200L), Arrays.asList("a1", "a2"), Arrays.asList("b1", "b2"));
extractor.setNextResponse(response1);
assertThat(extractor.hasNext(), is(true));
DataExtractor.Result result = extractor.next();
assertThat(result.searchInterval(), equalTo(new SearchInterval(1000L, 2000L)));
Optional<InputStream> stream = result.data();
assertThat(stream.isPresent(), is(true));
String expectedStream = "{\"time\":1100,\"field_1\":\"a1\"} {\"time\":1200,\"field_1\":\"a2\"}";
assertThat(asString(stream.get()), equalTo(expectedStream));
SearchResponse response2 = createEmptySearchResponse();
extractor.setNextResponse(response2);
assertThat(extractor.hasNext(), is(true));
assertThat(extractor.next().data().isPresent(), is(false));
assertThat(extractor.hasNext(), is(false));
assertThat(capturedSearchRequests.size(), equalTo(1));
String searchRequest = capturedSearchRequests.get(0).toString().replaceAll("\\s", "");
assertThat(searchRequest, containsString("\"size\":1000"));
assertThat(
searchRequest,
containsString(
"\"query\":{\"bool\":{\"filter\":[{\"match_all\":{\"boost\":1.0}},"
+ "{\"range\":{\"time\":{\"gte\":1000,\"lt\":2000,"
+ "\"format\":\"epoch_millis\",\"boost\":1.0}}}]"
)
);
assertThat(searchRequest, containsString("\"sort\":[{\"time\":{\"order\":\"asc\"}}]"));
assertThat(searchRequest, containsString("\"stored_fields\":\"_none_\""));
assertThat(capturedContinueScrollIds.size(), equalTo(1));
assertThat(capturedContinueScrollIds.get(0), equalTo(response1.getScrollId()));
List<String> capturedClearScrollIds = getCapturedClearScrollIds();
assertThat(capturedClearScrollIds.size(), equalTo(1));
assertThat(capturedClearScrollIds.get(0), equalTo(response2.getScrollId()));
}
public void testMultiplePageExtraction() throws IOException {
TestDataExtractor extractor = new TestDataExtractor(1000L, 10000L);
SearchResponse response1 = createSearchResponse(Arrays.asList(1000L, 2000L), Arrays.asList("a1", "a2"), Arrays.asList("b1", "b2"));
extractor.setNextResponse(response1);
assertThat(extractor.hasNext(), is(true));
DataExtractor.Result result = extractor.next();
assertThat(result.searchInterval(), equalTo(new SearchInterval(1000L, 10000L)));
Optional<InputStream> stream = result.data();
assertThat(stream.isPresent(), is(true));
String expectedStream = """
{"time":1000,"field_1":"a1"} {"time":2000,"field_1":"a2"}""";
assertThat(asString(stream.get()), equalTo(expectedStream));
SearchResponse response2 = createSearchResponse(Arrays.asList(3000L, 4000L), Arrays.asList("a3", "a4"), Arrays.asList("b3", "b4"));
extractor.setNextResponse(response2);
assertThat(extractor.hasNext(), is(true));
result = extractor.next();
assertThat(result.searchInterval(), equalTo(new SearchInterval(1000L, 10000L)));
stream = result.data();
assertThat(stream.isPresent(), is(true));
expectedStream = """
{"time":3000,"field_1":"a3"} {"time":4000,"field_1":"a4"}""";
assertThat(asString(stream.get()), equalTo(expectedStream));
SearchResponse response3 = createEmptySearchResponse();
extractor.setNextResponse(response3);
assertThat(extractor.hasNext(), is(true));
assertThat(extractor.next().data().isPresent(), is(false));
assertThat(extractor.hasNext(), is(false));
assertThat(capturedSearchRequests.size(), equalTo(1));
String searchRequest1 = capturedSearchRequests.get(0).toString().replaceAll("\\s", "");
assertThat(searchRequest1, containsString("\"size\":1000"));
assertThat(
searchRequest1,
containsString(
"\"query\":{\"bool\":{\"filter\":[{\"match_all\":{\"boost\":1.0}},"
+ "{\"range\":{\"time\":{\"gte\":1000,\"lt\":10000,"
+ "\"format\":\"epoch_millis\",\"boost\":1.0}}}]"
)
);
assertThat(searchRequest1, containsString("\"sort\":[{\"time\":{\"order\":\"asc\"}}]"));
assertThat(capturedContinueScrollIds.size(), equalTo(2));
assertThat(capturedContinueScrollIds.get(0), equalTo(response1.getScrollId()));
assertThat(capturedContinueScrollIds.get(1), equalTo(response2.getScrollId()));
List<String> capturedClearScrollIds = getCapturedClearScrollIds();
assertThat(capturedClearScrollIds.size(), equalTo(1));
assertThat(capturedClearScrollIds.get(0), equalTo(response3.getScrollId()));
}
public void testMultiplePageExtractionGivenCancel() throws IOException {
TestDataExtractor extractor = new TestDataExtractor(1000L, 10000L);
SearchResponse response1 = createSearchResponse(Arrays.asList(1000L, 2000L), Arrays.asList("a1", "a2"), Arrays.asList("b1", "b2"));
extractor.setNextResponse(response1);
assertThat(extractor.hasNext(), is(true));
Optional<InputStream> stream = extractor.next().data();
assertThat(stream.isPresent(), is(true));
String expectedStream = """
{"time":1000,"field_1":"a1"} {"time":2000,"field_1":"a2"}""";
assertThat(asString(stream.get()), equalTo(expectedStream));
extractor.cancel();
SearchResponse response2 = createSearchResponse(
Arrays.asList(2000L, 2000L, 3000L),
Arrays.asList("a3", "a4", "a5"),
Arrays.asList("b3", "b4", "b5")
);
extractor.setNextResponse(response2);
assertThat(extractor.isCancelled(), is(true));
assertThat(extractor.hasNext(), is(true));
stream = extractor.next().data();
assertThat(stream.isPresent(), is(true));
expectedStream = """
{"time":2000,"field_1":"a3"} {"time":2000,"field_1":"a4"}""";
assertThat(asString(stream.get()), equalTo(expectedStream));
assertThat(extractor.hasNext(), is(false));
List<String> capturedClearScrollIds = getCapturedClearScrollIds();
assertThat(capturedClearScrollIds.size(), equalTo(1));
assertThat(capturedClearScrollIds.get(0), equalTo(response2.getScrollId()));
}
public void testExtractionGivenInitSearchResponseHasError() {
TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L);
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
assertThat(extractor.hasNext(), is(true));
expectThrows(SearchPhaseExecutionException.class, extractor::next);
}
public void testExtractionGivenContinueScrollResponseHasError() throws IOException {
TestDataExtractor extractor = new TestDataExtractor(1000L, 10000L);
SearchResponse response1 = createSearchResponse(Arrays.asList(1000L, 2000L), Arrays.asList("a1", "a2"), Arrays.asList("b1", "b2"));
extractor.setNextResponse(response1);
assertThat(extractor.hasNext(), is(true));
Optional<InputStream> stream = extractor.next().data();
assertThat(stream.isPresent(), is(true));
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
assertThat(extractor.hasNext(), is(true));
expectThrows(SearchPhaseExecutionException.class, extractor::next);
assertThat(extractor.getNumScrollReset(), equalTo(1));
}
public void testExtractionGivenInitSearchResponseEncounteredFailure() {
TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L);
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
assertThat(extractor.hasNext(), is(true));
expectThrows(SearchPhaseExecutionException.class, extractor::next);
}
public void testResetScrollAfterShardFailure() throws IOException {
TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L);
SearchResponse goodResponse = createSearchResponse(
Arrays.asList(1100L, 1200L),
Arrays.asList("a1", "a2"),
Arrays.asList("b1", "b2")
);
extractor.setNextResponse(goodResponse);
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
// goodResponse needs to be recreated because previous response is consumed (deleted/modified) while processing the response.
goodResponse = createSearchResponse(Arrays.asList(1100L, 1200L), Arrays.asList("a1", "a2"), Arrays.asList("b1", "b2"));
extractor.setNextResponse(goodResponse);
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
// first response is good
assertThat(extractor.hasNext(), is(true));
Optional<InputStream> output = extractor.next().data();
assertThat(output.isPresent(), is(true));
// this should recover from the first shard failure and try again
assertThat(extractor.hasNext(), is(true));
output = extractor.next().data();
assertThat(output.isPresent(), is(true));
// A second failure is not tolerated
assertThat(extractor.hasNext(), is(true));
expectThrows(SearchPhaseExecutionException.class, extractor::next);
assertThat(extractor.getNumScrollReset(), equalTo(1));
}
public void testResetScrollUsesLastResultTimestamp() throws IOException {
TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L);
SearchResponse goodResponse = createSearchResponse(
Arrays.asList(1100L, 1200L),
Arrays.asList("a1", "a2"),
Arrays.asList("b1", "b2")
);
extractor.setNextResponse(goodResponse);
extractor.setNextResponseToError(new ElasticsearchException("something not search phase exception"));
extractor.setNextResponseToError(new ElasticsearchException("something not search phase exception"));
Optional<InputStream> output = extractor.next().data();
assertThat(output.isPresent(), is(true));
assertEquals(1000L, extractor.getInitScrollStartTime());
expectThrows(ElasticsearchException.class, extractor::next);
// the new start time after error is the last record timestamp +1
assertEquals(1201L, extractor.getInitScrollStartTime());
}
public void testResetScrollAfterSearchPhaseExecutionException() throws IOException {
TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L);
SearchResponse firstResponse = createSearchResponse(
Arrays.asList(1100L, 1200L),
Arrays.asList("a1", "a2"),
Arrays.asList("b1", "b2")
);
SearchResponse secondResponse = createSearchResponse(
Arrays.asList(1300L, 1400L),
Arrays.asList("a1", "a2"),
Arrays.asList("b1", "b2")
);
extractor.setNextResponse(firstResponse);
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
extractor.setNextResponse(secondResponse);
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
// first response is good
assertThat(extractor.hasNext(), is(true));
Optional<InputStream> output = extractor.next().data();
assertThat(output.isPresent(), is(true));
// this should recover from the SearchPhaseExecutionException and try again
assertThat(extractor.hasNext(), is(true));
output = extractor.next().data();
assertThat(output.isPresent(), is(true));
assertEquals(Long.valueOf(1400L), extractor.getLastTimestamp());
// A second failure is not tolerated
assertThat(extractor.hasNext(), is(true));
expectThrows(SearchPhaseExecutionException.class, extractor::next);
assertThat(extractor.getNumScrollReset(), equalTo(1));
}
public void testSearchPhaseExecutionExceptionOnInitScroll() {
TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L);
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY));
expectThrows(SearchPhaseExecutionException.class, extractor::next);
assertThat(extractor.getNumScrollReset(), equalTo(1));
}
public void testDomainSplitScriptField() throws IOException {
SearchSourceBuilder.ScriptField withoutSplit = new SearchSourceBuilder.ScriptField("script1", mockScript("return 1+1;"), false);
SearchSourceBuilder.ScriptField withSplit = new SearchSourceBuilder.ScriptField(
"script2",
new Script(ScriptType.INLINE, "painless", "return domainSplit('foo.com', params);", emptyMap()),
false
);
List<SearchSourceBuilder.ScriptField> sFields = Arrays.asList(withoutSplit, withSplit);
ScrollDataExtractorContext context = new ScrollDataExtractorContext(
jobId,
extractedFields,
indices,
query,
sFields,
scrollSize,
1000,
2000,
Collections.emptyMap(),
SearchRequest.DEFAULT_INDICES_OPTIONS,
Collections.emptyMap()
);
TestDataExtractor extractor = new TestDataExtractor(context);
SearchResponse response1 = createSearchResponse(Arrays.asList(1100L, 1200L), Arrays.asList("a1", "a2"), Arrays.asList("b1", "b2"));
extractor.setNextResponse(response1);
assertThat(extractor.hasNext(), is(true));
Optional<InputStream> stream = extractor.next().data();
assertThat(stream.isPresent(), is(true));
String expectedStream = """
{"time":1100,"field_1":"a1"} {"time":1200,"field_1":"a2"}""";
assertThat(asString(stream.get()), equalTo(expectedStream));
SearchResponse response2 = createEmptySearchResponse();
extractor.setNextResponse(response2);
assertThat(extractor.hasNext(), is(true));
assertThat(extractor.next().data().isPresent(), is(false));
assertThat(extractor.hasNext(), is(false));
assertThat(capturedSearchRequests.size(), equalTo(1));
String searchRequest = XContentHelper.stripWhitespace(capturedSearchRequests.get(0).toString());
assertThat(searchRequest, containsString("\"size\":1000"));
assertThat(
searchRequest,
containsString(
"\"query\":{\"bool\":{\"filter\":[{\"match_all\":{\"boost\":1.0}},"
+ "{\"range\":{\"time\":{\"gte\":1000,\"lt\":2000,"
+ "\"format\":\"epoch_millis\",\"boost\":1.0}}}]"
)
);
assertThat(searchRequest, containsString("\"sort\":[{\"time\":{\"order\":\"asc\"}}]"));
assertThat(searchRequest, containsString("\"stored_fields\":\"_none_\""));
// Check for the scripts
assertThat(searchRequest, containsString("""
{"script":{"source":"return 1+1;","lang":"mockscript"}"""));
assertThat(capturedContinueScrollIds.size(), equalTo(1));
assertThat(capturedContinueScrollIds.get(0), equalTo(response1.getScrollId()));
List<String> capturedClearScrollIds = getCapturedClearScrollIds();
assertThat(capturedClearScrollIds.size(), equalTo(1));
assertThat(capturedClearScrollIds.get(0), equalTo(response2.getScrollId()));
}
public void testGetSummary() {
ScrollDataExtractorContext context = createContext(1000L, 2300L);
TestDataExtractor extractor = new TestDataExtractor(context);
extractor.setNextResponse(createSummaryResponse(1001L, 2299L, 10L));
DataSummary summary = extractor.getSummary();
assertThat(summary.earliestTime(), equalTo(1001L));
assertThat(summary.latestTime(), equalTo(2299L));
assertThat(summary.totalHits(), equalTo(10L));
assertThat(capturedSearchRequests.size(), equalTo(1));
String searchRequest = capturedSearchRequests.get(0).toString().replaceAll("\\s", "");
assertThat(searchRequest, containsString("\"size\":0"));
assertThat(
searchRequest,
containsString(
"\"query\":{\"bool\":{\"filter\":[{\"match_all\":{\"boost\":1.0}},"
+ "{\"range\":{\"time\":{\"gte\":1000,\"lt\":2300,"
+ "\"format\":\"epoch_millis\",\"boost\":1.0}}}]"
)
);
assertThat(
searchRequest,
containsString(
"\"aggregations\":{\"earliest_time\":{\"min\":{\"field\":\"time\"}}," + "\"latest_time\":{\"max\":{\"field\":\"time\"}}}}"
)
);
assertThat(searchRequest, not(containsString("\"track_total_hits\":false")));
assertThat(searchRequest, not(containsString("\"sort\"")));
}
private ScrollDataExtractorContext createContext(long start, long end) {
return new ScrollDataExtractorContext(
jobId,
extractedFields,
indices,
query,
scriptFields,
scrollSize,
start,
end,
Collections.emptyMap(),
SearchRequest.DEFAULT_INDICES_OPTIONS,
Collections.emptyMap()
);
}
private SearchResponse createEmptySearchResponse() {
return createSearchResponse(Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
}
private SearchResponse createSearchResponse(List<Long> timestamps, List<String> field1Values, List<String> field2Values) {
SearchResponse searchResponse = mock(SearchResponse.class);
when(searchResponse.status()).thenReturn(RestStatus.OK);
when(searchResponse.getScrollId()).thenReturn(randomAlphaOfLength(1000));
List<SearchHit> hits = new ArrayList<>();
for (int i = 0; i < timestamps.size(); i++) {
Map<String, DocumentField> fields = new HashMap<>();
fields.put(extractedFields.timeField(), new DocumentField("time", Collections.singletonList(timestamps.get(i))));
fields.put("field_1", new DocumentField("field_1", Collections.singletonList(field1Values.get(i))));
fields.put("field_2", new DocumentField("field_2", Collections.singletonList(field2Values.get(i))));
SearchHit hit = new SearchHit(randomInt(), null);
hit.addDocumentFields(fields, Map.of());
hits.add(hit);
}
SearchHits searchHits = new SearchHits(hits.toArray(SearchHits.EMPTY), new TotalHits(hits.size(), TotalHits.Relation.EQUAL_TO), 1);
when(searchResponse.getHits()).thenReturn(searchHits.asUnpooled());
searchHits.decRef();
when(searchResponse.getTook()).thenReturn(TimeValue.timeValueMillis(randomNonNegativeLong()));
return searchResponse;
}
private SearchResponse createSummaryResponse(long start, long end, long totalHits) {
SearchResponse searchResponse = mock(SearchResponse.class);
when(searchResponse.getHits()).thenReturn(
new SearchHits(SearchHits.EMPTY, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1)
);
when(searchResponse.getAggregations()).thenReturn(
InternalAggregations.from(List.of(new Min("earliest_time", start, null, null), new Max("latest_time", end, null, null)))
);
return searchResponse;
}
private List<String> getCapturedClearScrollIds() {
return capturedClearScrollRequests.getAllValues().stream().map(r -> r.getScrollIds().get(0)).collect(Collectors.toList());
}
private static String asString(InputStream inputStream) throws IOException {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) {
return reader.lines().collect(Collectors.joining("\n"));
}
}
}
| TestDataExtractor |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/htmlunit/HelloController.java | {
"start": 924,
"end": 1048
} | class ____ {
@RequestMapping("/a")
public String header(HttpServletRequest request) {
return "hello";
}
}
| HelloController |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/disposer/illegal/DisposerInjectTest.java | {
"start": 1143,
"end": 1349
} | class ____ {
@Produces
@Dependent
String produce() {
return "";
}
@Inject
void dispose(@Disposes String ignored) {
}
}
}
| ProducerDisposer |
java | apache__logging-log4j2 | log4j-layout-template-json/src/main/java/org/apache/logging/log4j/layout/template/json/util/JsonWriter.java | {
"start": 27487,
"end": 28538
} | class ____ {
private int maxStringLength;
private String truncatedStringSuffix;
public int getMaxStringLength() {
return maxStringLength;
}
public Builder setMaxStringLength(final int maxStringLength) {
this.maxStringLength = maxStringLength;
return this;
}
public String getTruncatedStringSuffix() {
return truncatedStringSuffix;
}
public Builder setTruncatedStringSuffix(final String truncatedStringSuffix) {
this.truncatedStringSuffix = truncatedStringSuffix;
return this;
}
public JsonWriter build() {
validate();
return new JsonWriter(this);
}
private void validate() {
if (maxStringLength <= 0) {
throw new IllegalArgumentException("was expecting maxStringLength > 0: " + maxStringLength);
}
Objects.requireNonNull(truncatedStringSuffix, "truncatedStringSuffix");
}
}
}
| Builder |
java | apache__camel | components/camel-saxon/src/test/java/org/apache/camel/component/xquery/XQueryWithExtensionTest.java | {
"start": 2819,
"end": 4108
} | class ____ extends ExtensionFunctionDefinition {
private static final long serialVersionUID = 1L;
@Override
public SequenceType[] getArgumentTypes() {
return new SequenceType[] { SequenceType.SINGLE_STRING };
}
@Override
public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) {
return SequenceType.SINGLE_STRING;
}
@Override
public StructuredQName getFunctionQName() {
return new StructuredQName("efx", "http://test/saxon/ext", "simple");
}
@Override
public ExtensionFunctionCall makeCallExpression() {
return new ExtensionFunctionCall() {
private static final long serialVersionUID = 1L;
@Override
public Sequence call(XPathContext xPathContext, Sequence[] sequences) throws XPathException {
// get value of first arg passed to the function
Item arg1 = sequences[0].head();
String arg1Val = arg1.getStringValue();
// return a altered version of the first arg
return new StringValue("arg1[" + arg1Val + "]");
}
};
}
}
}
| SimpleExtension |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashTests.java | {
"start": 1336,
"end": 5787
} | class ____ extends AbstractScalarFunctionTestCase {
public HashTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
List<TestCaseSupplier> cases = new ArrayList<>();
for (String algorithm : List.of("MD5", "SHA", "SHA-224", "SHA-256", "SHA-384", "SHA-512")) {
cases.addAll(createTestCases(algorithm));
}
cases.add(new TestCaseSupplier("Invalid algorithm", List.of(DataType.KEYWORD, DataType.KEYWORD), () -> {
var input = randomAlphaOfLength(10);
return new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(new BytesRef("invalid"), DataType.KEYWORD, "algorithm"),
new TestCaseSupplier.TypedData(new BytesRef(input), DataType.KEYWORD, "input")
),
"HashEvaluator[algorithm=Attribute[channel=0], input=Attribute[channel=1]]",
DataType.KEYWORD,
is(nullValue())
).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.")
.withWarning("Line 1:1: java.security.NoSuchAlgorithmException: invalid MessageDigest not available")
.withFoldingException(
InvalidArgumentException.class,
"invalid algorithm for [source]: invalid MessageDigest not available"
);
}));
return parameterSuppliersFromTypedDataWithDefaultChecks(true, cases);
}
private static List<TestCaseSupplier> createTestCases(String algorithm) {
return List.of(
createTestCase(algorithm, false, DataType.KEYWORD, DataType.KEYWORD),
createTestCase(algorithm, false, DataType.KEYWORD, DataType.TEXT),
createTestCase(algorithm, false, DataType.TEXT, DataType.KEYWORD),
createTestCase(algorithm, false, DataType.TEXT, DataType.TEXT),
createTestCase(algorithm, true, DataType.KEYWORD, DataType.KEYWORD),
createTestCase(algorithm, true, DataType.KEYWORD, DataType.TEXT),
createTestCase(algorithm, true, DataType.TEXT, DataType.KEYWORD),
createTestCase(algorithm, true, DataType.TEXT, DataType.TEXT)
);
}
private static TestCaseSupplier createTestCase(String algorithm, boolean forceLiteral, DataType algorithmType, DataType inputType) {
return new TestCaseSupplier(algorithm, List.of(algorithmType, inputType), () -> {
var input = randomFrom(TestCaseSupplier.stringCases(inputType)).get();
return new TestCaseSupplier.TestCase(
List.of(createTypedData(algorithm, forceLiteral, algorithmType, "algorithm"), input),
forceLiteral
? "HashConstantEvaluator[algorithm=" + algorithm + ", input=Attribute[channel=0]]"
: "HashEvaluator[algorithm=Attribute[channel=0], input=Attribute[channel=1]]",
DataType.KEYWORD,
equalTo(new BytesRef(hash(algorithm, BytesRefs.toString(input.data()))))
);
});
}
static void addHashFunctionTestCases(List<TestCaseSupplier> cases, String algorithm) {
TestCaseSupplier.forUnaryStrings(
cases,
"HashConstantEvaluator[algorithm=" + algorithm + ", input=Attribute[channel=0]]",
DataType.KEYWORD,
input -> new BytesRef(HashTests.hash(algorithm, BytesRefs.toString(input))),
List.of()
);
}
private static TestCaseSupplier.TypedData createTypedData(String value, boolean forceLiteral, DataType type, String name) {
var data = new TestCaseSupplier.TypedData(new BytesRef(value), type, name);
return forceLiteral ? data.forceLiteral() : data;
}
static String hash(String algorithm, String input) {
try {
return HexFormat.of().formatHex(MessageDigest.getInstance(algorithm).digest(input.getBytes(StandardCharsets.UTF_8)));
} catch (NoSuchAlgorithmException e) {
throw new IllegalArgumentException("Unknown algorithm: " + algorithm);
}
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new Hash(source, args.get(0), args.get(1));
}
}
| HashTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/logging/DeprecationCategory.java | {
"start": 997,
"end": 1221
} | enum ____ {
AGGREGATIONS,
ANALYSIS,
API,
COMPATIBLE_API,
INDICES,
MAPPINGS,
OTHER,
PARSING,
PLUGINS,
QUERIES,
SCRIPTING,
SECURITY,
SETTINGS,
TEMPLATES
}
| DeprecationCategory |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnicodeEscapeTest.java | {
"start": 1023,
"end": 1464
} | class ____ {
private final CompilationTestHelper helper =
CompilationTestHelper.newInstance(UnicodeEscape.class, getClass());
private final BugCheckerRefactoringTestHelper refactoring =
BugCheckerRefactoringTestHelper.newInstance(UnicodeEscape.class, getClass());
@Test
public void printableAsciiCharacter_finding() {
helper
.addSourceLines(
"Test.java",
"""
| UnicodeEscapeTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/SingleTableDiscriminatorFormulaTest.java | {
"start": 2549,
"end": 3486
} | class ____ {
@Id
private Long id;
private String owner;
private BigDecimal balance;
private BigDecimal interestRate;
//Getters and setters are omitted for brevity
//end::entity-inheritance-single-table-discriminator-formula-example[]
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getOwner() {
return owner;
}
public void setOwner(String owner) {
this.owner = owner;
}
public BigDecimal getBalance() {
return balance;
}
public void setBalance(BigDecimal balance) {
this.balance = balance;
}
public BigDecimal getInterestRate() {
return interestRate;
}
public void setInterestRate(BigDecimal interestRate) {
this.interestRate = interestRate;
}
//tag::entity-inheritance-single-table-discriminator-formula-example[]
}
@Entity(name = "DebitAccount")
@DiscriminatorValue(value = "Debit")
public static | Account |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeTests.java | {
"start": 1241,
"end": 14137
} | class ____ extends ESTestCase {
public static ApplicationPrivilege createPrivilege(final String applicationName, final String privilegeName, final String... patterns) {
return createPrivilege(applicationName, Collections.singleton(privilegeName), patterns);
}
public static ApplicationPrivilege createPrivilege(
final String applicationName,
final Set<String> privilegeNames,
final String... patterns
) {
// TODO rewrite this to use `ApplicationPrivilege.get()`
return new ApplicationPrivilege(applicationName, privilegeNames, patterns);
}
public void testValidationOfApplicationName() {
final String specialCharacters = ":;$#%()+='.{}[]!@^&'";
final Supplier<Character> specialCharacter = () -> specialCharacters.charAt(randomInt(specialCharacters.length() - 1));
assertValidationFailure("a p p", "application name", () -> ApplicationPrivilege.validateApplicationName("a p p"));
assertValidationFailure("ap", "application name", () -> ApplicationPrivilege.validateApplicationName("ap"));
for (String app : Arrays.asList(
"App",// must start with lowercase
"1app", // must start with letter
"app" + specialCharacter.get() // cannot contain special characters unless preceded by a "-" or "_"
)) {
assertValidationFailure(app, "application name", () -> ApplicationPrivilege.validateApplicationName(app));
assertValidationFailure(app, "application name", () -> ApplicationPrivilege.validateApplicationNameOrWildcard(app));
}
// no wildcards
assertValidationFailure("app*", "application names", () -> ApplicationPrivilege.validateApplicationName("app*"));
// no special characters with wildcards
final String appNameWithSpecialCharAndWildcard = "app" + specialCharacter.get() + "*";
assertValidationFailure(
appNameWithSpecialCharAndWildcard,
"application name",
() -> ApplicationPrivilege.validateApplicationNameOrWildcard(appNameWithSpecialCharAndWildcard)
);
String appNameWithSpecialChars = "myapp" + randomFrom('-', '_');
for (int i = randomIntBetween(1, 12); i > 0; i--) {
appNameWithSpecialChars = appNameWithSpecialChars + specialCharacter.get();
}
// these should all be OK
for (String app : Arrays.asList("app", "app1", "myApp", "myApp-:;$#%()+='.", "myApp_:;$#%()+='.", appNameWithSpecialChars)) {
assertNoException(app, () -> ApplicationPrivilege.validateApplicationName(app));
assertNoException(app, () -> ApplicationPrivilege.validateApplicationNameOrWildcard(app));
}
// wildcards in the suffix
for (String app : Arrays.asList("app1-*", "app1-foo*", "app1-.*", "app1-.foo.*", appNameWithSpecialChars + "*")) {
assertValidationFailure(app, "application name", () -> ApplicationPrivilege.validateApplicationName(app));
assertNoException(app, () -> ApplicationPrivilege.validateApplicationNameOrWildcard(app));
}
}
public void testValidationOfPrivilegeName() {
// must start with lowercase
assertValidationFailure("Read", "privilege names", () -> ApplicationPrivilege.validatePrivilegeName("Read"));
// must start with letter
assertValidationFailure("1read", "privilege names", () -> ApplicationPrivilege.validatePrivilegeName("1read"));
// cannot contain special characters
final String specialChars = ":;$#%()+=/',";
final String withSpecialChar = "read" + specialChars.charAt(randomInt(specialChars.length() - 1));
assertValidationFailure(withSpecialChar, "privilege names", () -> ApplicationPrivilege.validatePrivilegeName(withSpecialChar));
// these should all be OK
for (String priv : Arrays.asList("read", "read1", "readData", "read-data", "read.data", "read_data")) {
assertNoException(priv, () -> ApplicationPrivilege.validatePrivilegeName(priv));
assertNoException(priv, () -> ApplicationPrivilege.validatePrivilegeOrActionName(priv));
}
for (String priv : Arrays.asList("r e a d", "read\n", "copy®")) {
assertValidationFailure(priv, "privilege names and action", () -> ApplicationPrivilege.validatePrivilegeOrActionName(priv));
}
for (String priv : Arrays.asList("read:*", "read/*", "read/a_b.c-d+e%f#(g)")) {
assertNoException(priv, () -> ApplicationPrivilege.validatePrivilegeOrActionName(priv));
}
}
public void testNonePrivilege() {
final ApplicationPrivilege none = ApplicationPrivilege.NONE.apply("super-mega-app");
CharacterRunAutomaton run = new CharacterRunAutomaton(none.getAutomaton());
for (int i = randomIntBetween(5, 10); i > 0; i--) {
final String action;
if (randomBoolean()) {
action = randomAlphaOfLengthBetween(3, 12);
} else {
action = randomAlphaOfLengthBetween(3, 6) + randomFrom(":", "/") + randomAlphaOfLengthBetween(3, 8);
}
assertFalse("NONE should not grant " + action, run.run(action));
}
}
public void testGetPrivilegeByName() {
final ApplicationPrivilegeDescriptor myRead = descriptor("my-app", "read", "data:read/*", "action:login");
final ApplicationPrivilegeDescriptor myWrite = descriptor("my-app", "write", "data:write/*", "action:login");
final ApplicationPrivilegeDescriptor myAdmin = descriptor("my-app", "admin", "data:read/*", "action:*");
final ApplicationPrivilegeDescriptor yourRead = descriptor("your-app", "read", "data:read/*", "action:login");
final Set<ApplicationPrivilegeDescriptor> stored = Sets.newHashSet(myRead, myWrite, myAdmin, yourRead);
final Set<ApplicationPrivilege> myAppRead = ApplicationPrivilege.get("my-app", Collections.singleton("read"), stored);
assertThat(myAppRead, iterableWithSize(1));
assertPrivilegeEquals(myAppRead.iterator().next(), myRead);
final Set<ApplicationPrivilege> myAppWrite = ApplicationPrivilege.get("my-app", Collections.singleton("write"), stored);
assertThat(myAppWrite, iterableWithSize(1));
assertPrivilegeEquals(myAppWrite.iterator().next(), myWrite);
final Set<ApplicationPrivilege> myReadWrite = ApplicationPrivilege.get("my-app", Sets.newHashSet("read", "write"), stored);
assertThat(myReadWrite, Matchers.hasSize(1));
final ApplicationPrivilege readWrite = myReadWrite.iterator().next();
assertThat(readWrite.getApplication(), equalTo("my-app"));
assertThat(readWrite.name(), containsInAnyOrder("read", "write"));
assertThat(readWrite.getPatterns(), arrayContainingInAnyOrder("data:read/*", "data:write/*", "action:login"));
CharacterRunAutomaton run = new CharacterRunAutomaton(readWrite.getAutomaton());
for (String action : Arrays.asList("data:read/settings", "data:write/user/kimchy", "action:login")) {
assertTrue(run.run(action));
}
for (String action : Arrays.asList("data:delete/user/kimchy", "action:shutdown")) {
assertFalse(run.run(action));
}
}
public void testGetPrivilegeByWildcard() {
final ApplicationPrivilegeDescriptor apmRead = descriptor("apm", "read", "action:read/*");
final ApplicationPrivilegeDescriptor apmWrite = descriptor("apm", "write", "action:write/*");
final ApplicationPrivilegeDescriptor kibanaRead = descriptor("kibana", "read", "data:read/*", "action:read:*");
final ApplicationPrivilegeDescriptor kibanaWrite = descriptor("kibana", "write", "data:write/*", "action:w*");
final Set<ApplicationPrivilegeDescriptor> stored = Sets.newHashSet(apmRead, apmWrite, kibanaRead, kibanaWrite);
{
final Set<ApplicationPrivilege> everyThing = ApplicationPrivilege.get("*", Set.of("*"), stored);
assertThat(everyThing, hasItem(privilegeEquals("*", "*", Set.of("*"))));
assertThat(everyThing, hasItem(privilegeEquals("apm", "*", Set.of("*"))));
assertThat(everyThing, hasItem(privilegeEquals("kibana", "*", Set.of("*"))));
assertThat(everyThing, iterableWithSize(3));
}
{
final Set<ApplicationPrivilege> allKibana = ApplicationPrivilege.get("kibana", Set.of("*"), stored);
assertThat(allKibana, hasItem(privilegeEquals("kibana", "*", Set.of("*"))));
assertThat(allKibana, iterableWithSize(1));
}
{
final Set<ApplicationPrivilege> allRead = ApplicationPrivilege.get("*", Set.of("read"), stored);
assertThat(allRead, hasItem(privilegeEquals(kibanaRead)));
assertThat(allRead, hasItem(privilegeEquals(apmRead)));
assertThat(allRead, hasItem(privilegeEquals("*", "read", Set.of())));
assertThat(allRead, iterableWithSize(3));
}
}
private void assertPrivilegeEquals(ApplicationPrivilege privilege, ApplicationPrivilegeDescriptor descriptor) {
assertThat(privilege, privilegeEquals(descriptor));
}
private Matcher<ApplicationPrivilege> privilegeEquals(ApplicationPrivilegeDescriptor descriptor) {
return privilegeEquals(descriptor.getApplication(), descriptor.getName(), descriptor.getActions());
}
private Matcher<ApplicationPrivilege> privilegeEquals(String application, String name, Set<String> actions) {
return new CustomTypeSafeMatcher<>("equals(" + application + ";" + name + ";" + actions + ")") {
@Override
protected boolean matchesSafely(ApplicationPrivilege item) {
return item.getApplication().equals(application)
&& item.name().equals(Set.of(name))
&& Set.of(item.getPatterns()).equals(actions);
}
};
}
private ApplicationPrivilegeDescriptor descriptor(String application, String name, String... actions) {
return new ApplicationPrivilegeDescriptor(application, name, Sets.newHashSet(actions), Collections.emptyMap());
}
public void testEqualsAndHashCode() {
final ApplicationPrivilege privilege = randomPrivilege();
final EqualsHashCodeTestUtils.MutateFunction<ApplicationPrivilege> mutate = randomFrom(
orig -> createPrivilege("x" + orig.getApplication(), getPrivilegeName(orig), orig.getPatterns()),
orig -> createPrivilege(orig.getApplication(), "x" + getPrivilegeName(orig), orig.getPatterns()),
orig -> createPrivilege(orig.getApplication(), getPrivilegeName(orig), "*")
);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
privilege,
original -> createPrivilege(original.getApplication(), getPrivilegeName(original), original.getPatterns()),
mutate
);
}
private String getPrivilegeName(ApplicationPrivilege privilege) {
if (privilege.name.size() == 1) {
return privilege.name.iterator().next();
} else {
throw new IllegalStateException(privilege + " has a multivariate name: " + collectionToCommaDelimitedString(privilege.name));
}
}
private void assertValidationFailure(String reason, String messageContent, ThrowingRunnable body) {
final IllegalArgumentException exception;
try {
exception = expectThrows(IllegalArgumentException.class, body);
assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString(messageContent.toLowerCase(Locale.ROOT)));
} catch (AssertionFailedError e) {
fail(reason + " - " + e.getMessage());
}
}
private void assertNoException(String reason, ThrowingRunnable body) {
try {
body.run();
// pass
} catch (Throwable e) {
Assert.fail(reason + " - Expected no exception, but got: " + e);
}
}
private ApplicationPrivilege randomPrivilege() {
final String applicationName;
if (randomBoolean()) {
applicationName = "*";
} else {
applicationName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 10);
}
final String privilegeName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 8);
final String[] patterns = new String[randomIntBetween(0, 5)];
for (int i = 0; i < patterns.length; i++) {
final String suffix = randomBoolean() ? "*" : randomAlphaOfLengthBetween(4, 9);
patterns[i] = randomAlphaOfLengthBetween(2, 5) + "/" + suffix;
}
return createPrivilege(applicationName, privilegeName, patterns);
}
}
| ApplicationPrivilegeTests |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/web/server/ServerHttpSecurity.java | {
"start": 79031,
"end": 86311
} | class ____ {
private final ServerWebExchangeMatcher xhrMatcher = (exchange) -> Mono.just(exchange.getRequest().getHeaders())
.filter((h) -> h.getOrEmpty("X-Requested-With").contains("XMLHttpRequest"))
.flatMap((h) -> ServerWebExchangeMatcher.MatchResult.match())
.switchIfEmpty(ServerWebExchangeMatcher.MatchResult.notMatch());
private ReactiveAuthenticationManager authenticationManager;
private ServerSecurityContextRepository securityContextRepository;
private ServerAuthenticationEntryPoint entryPoint;
private ServerAuthenticationFailureHandler authenticationFailureHandler;
private final List<ServerAuthenticationSuccessHandler> defaultSuccessHandlers = new ArrayList<>(
List.of(new WebFilterChainServerAuthenticationSuccessHandler()));
private List<ServerAuthenticationSuccessHandler> authenticationSuccessHandlers = new ArrayList<>();
private HttpBasicSpec() {
List<DelegateEntry> entryPoints = new ArrayList<>();
entryPoints
.add(new DelegateEntry(this.xhrMatcher, new HttpStatusServerEntryPoint(HttpStatus.UNAUTHORIZED)));
DelegatingServerAuthenticationEntryPoint defaultEntryPoint = new DelegatingServerAuthenticationEntryPoint(
entryPoints);
defaultEntryPoint.setDefaultEntryPoint(new HttpBasicServerAuthenticationEntryPoint());
this.entryPoint = defaultEntryPoint;
}
/**
* The {@link ServerAuthenticationSuccessHandler} used after authentication
* success. Defaults to {@link WebFilterChainServerAuthenticationSuccessHandler}.
* Note that this method clears previously added success handlers via
* {@link #authenticationSuccessHandler(Consumer)}
* @param authenticationSuccessHandler the success handler to use
* @return the {@link HttpBasicSpec} to continue configuring
* @since 6.3
*/
public HttpBasicSpec authenticationSuccessHandler(
ServerAuthenticationSuccessHandler authenticationSuccessHandler) {
Assert.notNull(authenticationSuccessHandler, "authenticationSuccessHandler cannot be null");
authenticationSuccessHandler((handlers) -> {
handlers.clear();
handlers.add(authenticationSuccessHandler);
});
return this;
}
/**
* Allows customizing the list of {@link ServerAuthenticationSuccessHandler}. The
* default list contains a
* {@link WebFilterChainServerAuthenticationSuccessHandler}.
* @param handlersConsumer the handlers consumer
* @return the {@link HttpBasicSpec} to continue configuring
* @since 6.3
*/
public HttpBasicSpec authenticationSuccessHandler(
Consumer<List<ServerAuthenticationSuccessHandler>> handlersConsumer) {
Assert.notNull(handlersConsumer, "handlersConsumer cannot be null");
handlersConsumer.accept(this.authenticationSuccessHandlers);
return this;
}
/**
* The {@link ReactiveAuthenticationManager} used to authenticate. Defaults to
* {@link ServerHttpSecurity#authenticationManager(ReactiveAuthenticationManager)}.
* @param authenticationManager the authentication manager to use
* @return the {@link HttpBasicSpec} to continue configuring
*/
public HttpBasicSpec authenticationManager(ReactiveAuthenticationManager authenticationManager) {
this.authenticationManager = authenticationManager;
return this;
}
/**
* The {@link ServerSecurityContextRepository} used to save the
* {@code Authentication}. Defaults to
* {@link NoOpServerSecurityContextRepository}. For the {@code SecurityContext} to
* be loaded on subsequent requests the {@link ReactorContextWebFilter} must be
* configured to be able to load the value (they are not implicitly linked).
* @param securityContextRepository the repository to use
* @return the {@link HttpBasicSpec} to continue configuring
*/
public HttpBasicSpec securityContextRepository(ServerSecurityContextRepository securityContextRepository) {
this.securityContextRepository = securityContextRepository;
return this;
}
/**
* Allows easily setting the entry point.
* @param authenticationEntryPoint the {@link ServerAuthenticationEntryPoint} to
* use
* @return {@link HttpBasicSpec} for additional customization
* @since 5.2.0
*/
public HttpBasicSpec authenticationEntryPoint(ServerAuthenticationEntryPoint authenticationEntryPoint) {
Assert.notNull(authenticationEntryPoint, "authenticationEntryPoint cannot be null");
this.entryPoint = authenticationEntryPoint;
return this;
}
public HttpBasicSpec authenticationFailureHandler(
ServerAuthenticationFailureHandler authenticationFailureHandler) {
Assert.notNull(authenticationFailureHandler, "authenticationFailureHandler cannot be null");
this.authenticationFailureHandler = authenticationFailureHandler;
return this;
}
/**
* Disables HTTP Basic authentication.
* @return the {@link ServerHttpSecurity} to continue configuring
*/
public ServerHttpSecurity disable() {
ServerHttpSecurity.this.httpBasic = null;
return ServerHttpSecurity.this;
}
protected void configure(ServerHttpSecurity http) {
MediaTypeServerWebExchangeMatcher restMatcher = new MediaTypeServerWebExchangeMatcher(
MediaType.APPLICATION_ATOM_XML, MediaType.APPLICATION_FORM_URLENCODED, MediaType.APPLICATION_JSON,
MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_XML, MediaType.MULTIPART_FORM_DATA,
MediaType.TEXT_XML);
restMatcher.setIgnoredMediaTypes(Collections.singleton(MediaType.ALL));
ServerWebExchangeMatcher notHtmlMatcher = new NegatedServerWebExchangeMatcher(
new MediaTypeServerWebExchangeMatcher(MediaType.TEXT_HTML));
ServerWebExchangeMatcher restNotHtmlMatcher = new AndServerWebExchangeMatcher(
Arrays.asList(notHtmlMatcher, restMatcher));
ServerWebExchangeMatcher preferredMatcher = new OrServerWebExchangeMatcher(
Arrays.asList(this.xhrMatcher, restNotHtmlMatcher));
ServerHttpSecurity.this.defaultEntryPoints.add(new DelegateEntry(preferredMatcher, this.entryPoint));
AuthenticationWebFilter authenticationFilter = new AuthenticationWebFilter(this.authenticationManager);
authenticationFilter.setAuthenticationFailureHandler(authenticationFailureHandler());
authenticationFilter.setServerAuthenticationConverter(new ServerHttpBasicAuthenticationConverter());
authenticationFilter.setSecurityContextRepository(this.securityContextRepository);
authenticationFilter.setAuthenticationSuccessHandler(getAuthenticationSuccessHandler(http));
http.addFilterAt(authenticationFilter, SecurityWebFiltersOrder.HTTP_BASIC);
}
private ServerAuthenticationSuccessHandler getAuthenticationSuccessHandler(ServerHttpSecurity http) {
if (this.authenticationSuccessHandlers.isEmpty()) {
return new DelegatingServerAuthenticationSuccessHandler(this.defaultSuccessHandlers);
}
return new DelegatingServerAuthenticationSuccessHandler(this.authenticationSuccessHandlers);
}
private ServerAuthenticationFailureHandler authenticationFailureHandler() {
if (this.authenticationFailureHandler != null) {
return this.authenticationFailureHandler;
}
return new ServerAuthenticationEntryPointFailureHandler(this.entryPoint);
}
}
/**
* Configures password management.
*
* @author Evgeniy Cheban
* @since 5.6
* @see #passwordManagement()
*/
public final | HttpBasicSpec |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/stream/sql/NullTypeTest.java | {
"start": 4785,
"end": 4952
} | class ____ extends ScalarFunction {
public @DataTypeHint("ARRAY<NULL>") Object eval(Integer i) {
return null;
}
}
}
| NestedNullTypeFunction |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/binding/ComponentRequirement.java | {
"start": 3358,
"end": 10147
} | enum ____ {
/** Make a new instance. */
NEW,
/** Throw an exception. */
THROW,
/** Allow use of null values. */
ALLOW,
}
/**
* An override for the requirement's null policy. If set, this is used as the null policy instead
* of the default behavior in {@link #nullPolicy}.
*
* <p>Some implementations' null policy can be determined upon construction (e.g., for binding
* instances), but others' require Elements which must wait until {@link #nullPolicy} is called.
*/
abstract Optional<NullPolicy> overrideNullPolicy();
/**
* The nullability of the requirement. If set, this is used to determine the nullability of the
* requirement's type.
*/
public Nullability getNullability() {
return nullability;
}
private Nullability nullability = Nullability.NOT_NULLABLE;
/** The requirement's null policy. */
public NullPolicy nullPolicy() {
if (overrideNullPolicy().isPresent()) {
return overrideNullPolicy().get();
}
switch (kind()) {
case MODULE:
return componentCanMakeNewInstances(typeElement())
? NullPolicy.NEW
: requiresAPassedInstance() ? NullPolicy.THROW : NullPolicy.ALLOW;
case DEPENDENCY:
case BOUND_INSTANCE:
return NullPolicy.THROW;
}
throw new AssertionError();
}
/**
* Returns true if the passed {@link ComponentRequirement} requires a passed instance in order to
* be used within a component.
*/
public boolean requiresAPassedInstance() {
if (!kind().isModule()) {
// Bound instances and dependencies always require the user to provide an instance.
return true;
}
return requiresModuleInstance() && !componentCanMakeNewInstances(typeElement());
}
/**
* Returns {@code true} if an instance is needed for this (module) requirement.
*
* @see #requiresModuleInstance(XTypeElement)
*/
public boolean requiresModuleInstance() {
return requiresModuleInstance(typeElement());
}
/**
* Returns {@code true} if an instance is needed for this (module) requirement.
*
* <p>An instance is only needed if there is a binding method on the module that is neither {@code
* abstract} nor {@code static}; if all bindings are one of those, then there should be no
* possible dependency on instance state in the module's bindings.
*
* <p>Alternatively, if the module is a Kotlin Object then the binding methods are considered
* {@code static}, requiring no module instance.
*/
public static boolean requiresModuleInstance(XTypeElement typeElement) {
if (typeElement.isKotlinObject() || typeElement.isCompanionObject()) {
return false;
}
return XTypeElements.getAllNonPrivateInstanceMethods(typeElement).stream()
.filter(ComponentRequirement::isBindingMethod)
.anyMatch(method -> !method.isAbstract() && !method.isStatic());
}
private static boolean isBindingMethod(XMethodElement method) {
// TODO(cgdecker): At the very least, we should have utility methods to consolidate this stuff
// in one place; listing individual annotations all over the place is brittle.
return hasAnyAnnotation(
method,
XTypeNames.PROVIDES,
XTypeNames.PRODUCES,
// TODO(ronshapiro): it would be cool to have internal meta-annotations that could describe
// these, like @AbstractBindingMethod
XTypeNames.BINDS,
XTypeNames.MULTIBINDS,
XTypeNames.BINDS_OPTIONAL_OF);
}
/** The key for this requirement, if one is available. */
public abstract Optional<Key> key();
/** Returns the name for this requirement that could be used as a variable. */
public abstract String variableName();
/** Returns a parameter spec for this requirement. */
public XParameterSpec toParameterSpec() {
return XParameterSpecs.of(variableName(), type().asTypeName());
}
public static ComponentRequirement forDependency(ComponentDependencyBinding binding) {
return forDependency(binding.key().type().xprocessing());
}
public static ComponentRequirement forDependency(XType type) {
checkArgument(isDeclared(checkNotNull(type)));
return create(Kind.DEPENDENCY, type);
}
public static ComponentRequirement forModule(XType type) {
checkArgument(isDeclared(checkNotNull(type)));
return create(Kind.MODULE, type);
}
public static ComponentRequirement forBoundInstance(BoundInstanceBinding binding) {
checkArgument(binding.kind().equals(BindingKind.BOUND_INSTANCE));
return forBoundInstance(
binding.key(), binding.isNullable(), binding.bindingElement().get(), binding.nullability());
}
static ComponentRequirement forBoundInstance(
Key key, boolean nullable, XElement elementForVariableName, Nullability nullability) {
return create(
Kind.BOUND_INSTANCE,
key.type().xprocessing(),
nullable ? Optional.of(NullPolicy.ALLOW) : Optional.empty(),
Optional.of(key),
nullability,
getSimpleName(elementForVariableName));
}
private static ComponentRequirement create(Kind kind, XType type) {
return create(
kind,
type,
/* overrideNullPolicy= */ Optional.empty(),
/* key= */ Optional.empty(),
Nullability.NOT_NULLABLE,
simpleVariableName(type.getTypeElement().asClassName()));
}
private static ComponentRequirement create(
Kind kind,
XType type,
Optional<NullPolicy> overrideNullPolicy,
Optional<Key> key,
Nullability nullability,
String variableName) {
ComponentRequirement requirement =
new AutoValue_ComponentRequirement(
kind, XTypes.equivalence().wrap(type), overrideNullPolicy, key, variableName);
requirement.nullability = nullability;
return requirement;
}
/**
* Returns true if and only if a component can instantiate new instances (typically of a module)
* rather than requiring that they be passed.
*/
// TODO(bcorso): Should this method throw if its called knowing that an instance is not needed?
public static boolean componentCanMakeNewInstances(XTypeElement typeElement) {
// TODO(gak): still need checks for visibility
return typeElement.isClass()
&& !typeElement.isAbstract()
&& !requiresEnclosingInstance(typeElement)
&& hasVisibleDefaultConstructor(typeElement);
}
private static boolean requiresEnclosingInstance(XTypeElement typeElement) {
return isNested(typeElement) && !typeElement.isStatic();
}
private static boolean hasVisibleDefaultConstructor(XTypeElement typeElement) {
return typeElement.getConstructors().stream()
.anyMatch(constructor -> !constructor.isPrivate() && constructor.getParameters().isEmpty());
}
}
| NullPolicy |
java | grpc__grpc-java | examples/src/main/java/io/grpc/examples/experimental/CompressingHelloWorldClient.java | {
"start": 1272,
"end": 3157
} | class ____ {
private static final Logger logger =
Logger.getLogger(CompressingHelloWorldClient.class.getName());
private final ManagedChannel channel;
private final GreeterGrpc.GreeterBlockingStub blockingStub;
/** Construct client connecting to HelloWorld server at {@code host:port}. */
public CompressingHelloWorldClient(String host, int port) {
channel = Grpc.newChannelBuilderForAddress(host, port, InsecureChannelCredentials.create())
.build();
blockingStub = GreeterGrpc.newBlockingStub(channel);
}
public void shutdown() throws InterruptedException {
channel.shutdown().awaitTermination(5, TimeUnit.SECONDS);
}
/** Say hello to server. */
public void greet(String name) {
logger.info("Will try to greet " + name + " ...");
HelloRequest request = HelloRequest.newBuilder().setName(name).build();
HelloReply response;
try {
// This enables compression for requests. Independent of this setting, servers choose whether
// to compress responses.
response = blockingStub.withCompression("gzip").sayHello(request);
} catch (StatusRuntimeException e) {
logger.log(Level.WARNING, "RPC failed: {0}", e.getStatus());
return;
}
logger.info("Greeting: " + response.getMessage());
}
/**
* Greet server. If provided, the first element of {@code args} is the name to use in the
* greeting.
*/
public static void main(String[] args) throws Exception {
// Access a service running on the local machine on port 50051
CompressingHelloWorldClient client = new CompressingHelloWorldClient("localhost", 50051);
try {
String user = "world";
// Use the arg as the name to greet if provided
if (args.length > 0) {
user = args[0];
}
client.greet(user);
} finally {
client.shutdown();
}
}
}
| CompressingHelloWorldClient |
java | elastic__elasticsearch | modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java | {
"start": 1211,
"end": 1267
} | class ____ extends AbstractProcessor {
| ConvertProcessor |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/runtime/operators/windowing/WindowOperatorContractTest.java | {
"start": 147480,
"end": 149033
} | interface ____ {
void setIsEventTime(WindowAssigner<?, ?> mockAssigner);
void advanceTime(OneInputStreamOperatorTestHarness testHarness, long timestamp)
throws Exception;
void registerTimer(Trigger.TriggerContext ctx, long timestamp);
void deleteTimer(Trigger.TriggerContext ctx, long timestamp);
int numTimers(AbstractStreamOperatorTestHarness testHarness);
int numTimersOtherDomain(AbstractStreamOperatorTestHarness testHarness);
void shouldRegisterTimerOnElement(Trigger<?, TimeWindow> mockTrigger, long timestamp)
throws Exception;
void shouldDeleteTimerOnElement(Trigger<?, TimeWindow> mockTrigger, long timestamp)
throws Exception;
void shouldContinueOnTime(Trigger<?, TimeWindow> mockTrigger) throws Exception;
void shouldFireOnTime(Trigger<?, TimeWindow> mockTrigger) throws Exception;
void shouldFireAndPurgeOnTime(Trigger<?, TimeWindow> mockTrigger) throws Exception;
void shouldPurgeOnTime(Trigger<?, TimeWindow> mockTrigger) throws Exception;
void verifyTriggerCallback(
Trigger<?, TimeWindow> mockTrigger,
VerificationMode verificationMode,
Long time,
TimeWindow window)
throws Exception;
void verifyCorrectTime(
OneInputStreamOperatorTestHarness testHarness,
InternalWindowFunction.InternalWindowContext context);
}
private static | TimeDomainAdaptor |
java | grpc__grpc-java | services/src/generated/main/grpc/io/grpc/channelz/v1/ChannelzGrpc.java | {
"start": 18970,
"end": 19426
} | class ____
implements io.grpc.BindableService, AsyncService {
@java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
return ChannelzGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service Channelz.
* <pre>
* Channelz is a service exposed by gRPC servers that provides detailed debug
* information.
* </pre>
*/
public static final | ChannelzImplBase |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/volume/csi/ContainerVolumePublisher.java | {
"start": 2173,
"end": 8720
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(ContainerVolumePublisher.class);
private final Container container;
private final String localMountRoot;
private final OCIContainerRuntime runtime;
public ContainerVolumePublisher(Container container, String localMountRoot,
OCIContainerRuntime runtime) {
LOG.info("Initiate container volume publisher, containerID={},"
+ " volume local mount rootDir={}",
container.getContainerId().toString(), localMountRoot);
this.container = container;
this.localMountRoot = localMountRoot;
this.runtime = runtime;
}
/**
* It first discovers the volume info from container resource;
* then negotiates with CSI driver adaptor to publish the volume on this
* node manager, on a specific directory under container's work dir;
* and then map the local mounted directory to volume target mount in
* the docker container.
*
* CSI volume publish is a two phase work, by reaching up here
* we can assume the 1st phase is done on the RM side, which means
* YARN is already called the controller service of csi-driver
* to publish the volume; here we only need to call the node service of
* csi-driver to publish the volume on this local node manager.
*
* @return a map where each key is the local mounted path on current node,
* and value is the remote mount path on the container.
* @throws YarnException
* @throws IOException
*/
public Map<String, String> publishVolumes() throws YarnException,
IOException {
LOG.info("publishing volumes");
Map<String, String> volumeMounts = new HashMap<>();
List<VolumeMetaData> volumes = getVolumes();
LOG.info("Found {} volumes to be published on this node", volumes.size());
for (VolumeMetaData volume : volumes) {
Map<String, String> bindings = publishVolume(volume);
if (bindings != null && !bindings.isEmpty()) {
volumeMounts.putAll(bindings);
}
}
return volumeMounts;
}
public void unpublishVolumes() throws YarnException, IOException {
LOG.info("Un-publishing Volumes");
List<VolumeMetaData> volumes = getVolumes();
LOG.info("Volumes to un-publish {}", volumes.size());
for (VolumeMetaData volume : volumes) {
this.unpublishVolume(volume);
}
}
private File getLocalVolumeMountPath(
String containerWorkDir, String volumeId) {
return new File(containerWorkDir, volumeId + "_mount");
}
private File getLocalVolumeStagingPath(
String containerWorkDir, String volumeId) {
return new File(containerWorkDir, volumeId + "_staging");
}
private List<VolumeMetaData> getVolumes() throws InvalidVolumeException {
List<VolumeMetaData> volumes = new ArrayList<>();
Resource containerResource = container.getResource();
if (containerResource != null) {
for (ResourceInformation resourceInformation :
containerResource.getAllResourcesListCopy()) {
if (resourceInformation.getTags()
.contains(CsiConstants.CSI_VOLUME_RESOURCE_TAG)) {
volumes.addAll(VolumeMetaData.fromResource(resourceInformation));
}
}
}
if (volumes.size() > 0) {
LOG.info("Total number of volumes require provisioning is {}",
volumes.size());
}
return volumes;
}
private Map<String, String> publishVolume(VolumeMetaData volume)
throws IOException, YarnException {
Map<String, String> bindVolumes = new HashMap<>();
// compose a local mount for CSI volume with the container ID
File localMount = getLocalVolumeMountPath(
localMountRoot, volume.getVolumeId().toString());
File localStaging = getLocalVolumeStagingPath(
localMountRoot, volume.getVolumeId().toString());
LOG.info("Volume {}, local mount path: {}, local staging path {}",
volume.getVolumeId().toString(), localMount, localStaging);
NodePublishVolumeRequest publishRequest = NodePublishVolumeRequest
.newInstance(volume.getVolumeId().getId(), // volume Id
false, // read only flag
localMount.getAbsolutePath(), // target path
localStaging.getAbsolutePath(), // staging path
new ValidateVolumeCapabilitiesRequest.VolumeCapability(
ValidateVolumeCapabilitiesRequest
.AccessMode.SINGLE_NODE_WRITER,
ValidateVolumeCapabilitiesRequest.VolumeType.FILE_SYSTEM,
ImmutableList.of()), // capability
ImmutableMap.of(), // publish context
ImmutableMap.of()); // secrets
// make sure the volume is a known type
if (runtime.getCsiClients().get(volume.getDriverName()) == null) {
throw new YarnException("No csi-adaptor is found that can talk"
+ " to csi-driver " + volume.getDriverName());
}
// publish volume to node
LOG.info("Publish volume on NM, request {}",
publishRequest.toString());
runtime.getCsiClients().get(volume.getDriverName())
.nodePublishVolume(publishRequest);
// once succeed, bind the container to this mount
String containerMountPath = volume.getMountPoint();
bindVolumes.put(localMount.getAbsolutePath(), containerMountPath);
return bindVolumes;
}
private void unpublishVolume(VolumeMetaData volume)
throws YarnException, IOException {
CsiAdaptorProtocol csiClient =
runtime.getCsiClients().get(volume.getDriverName());
if (csiClient == null) {
throw new YarnException(
"No csi-adaptor is found that can talk"
+ " to csi-driver " + volume.getDriverName());
}
// When container is launched, the container work dir is memorized,
// and that is also the dir we mount the volume to.
File localMount = getLocalVolumeMountPath(container.getCsiVolumesRootDir(),
volume.getVolumeId().toString());
if (!localMount.exists()) {
LOG.info("Local mount {} no longer exist, skipping cleaning"
+ " up the volume", localMount.getAbsolutePath());
return;
}
NodeUnpublishVolumeRequest unpublishRequest =
NodeUnpublishVolumeRequest.newInstance(
volume.getVolumeId().getId(), // volume id
localMount.getAbsolutePath()); // target path
// un-publish volume from node
LOG.info("Un-publish volume {}, request {}",
volume.getVolumeId().toString(), unpublishRequest.toString());
csiClient.nodeUnpublishVolume(unpublishRequest);
}
}
| ContainerVolumePublisher |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/rolling/AbstractRolloverStrategy.java | {
"start": 1735,
"end": 6859
} | class ____ implements RolloverStrategy {
/**
* Allow subclasses access to the status logger without creating another instance.
*/
protected static final Logger LOGGER = StatusLogger.getLogger();
public static final Pattern PATTERN_COUNTER = Pattern.compile(".*%(?<ZEROPAD>0)?(?<PADDING>\\d+)?i.*");
protected final StrSubstitutor strSubstitutor;
protected AbstractRolloverStrategy(final StrSubstitutor strSubstitutor) {
this.strSubstitutor = strSubstitutor;
}
public StrSubstitutor getStrSubstitutor() {
return strSubstitutor;
}
protected Action merge(final Action compressAction, final List<Action> custom, final boolean stopOnError) {
if (custom.isEmpty()) {
return compressAction;
}
if (compressAction == null) {
return new CompositeAction(custom, stopOnError);
}
final List<Action> all = new ArrayList<>();
all.add(compressAction);
all.addAll(custom);
return new CompositeAction(all, stopOnError);
}
protected int suffixLength(final String lowFilename) {
for (final FileExtension extension : FileExtension.values()) {
if (extension.isExtensionFor(lowFilename)) {
return extension.length();
}
}
return 0;
}
protected SortedMap<Integer, Path> getEligibleFiles(final RollingFileManager manager) {
return getEligibleFiles(manager, true);
}
protected SortedMap<Integer, Path> getEligibleFiles(final RollingFileManager manager, final boolean isAscending) {
final StringBuilder buf = new StringBuilder();
final String pattern = manager.getPatternProcessor().getPattern();
manager.getPatternProcessor().formatFileName(strSubstitutor, buf, NotANumber.NAN);
final String fileName = manager.isDirectWrite() ? "" : manager.getFileName();
return getEligibleFiles(fileName, buf.toString(), pattern, isAscending);
}
protected SortedMap<Integer, Path> getEligibleFiles(final String path, final String pattern) {
return getEligibleFiles("", path, pattern, true);
}
@Deprecated
protected SortedMap<Integer, Path> getEligibleFiles(
final String path, final String logfilePattern, final boolean isAscending) {
return getEligibleFiles("", path, logfilePattern, isAscending);
}
@SuppressFBWarnings(
value = "PATH_TRAVERSAL_IN",
justification = "The file path should be specified in the configuration file.")
protected SortedMap<Integer, Path> getEligibleFiles(
final String currentFile, final String path, final String logfilePattern, final boolean isAscending) {
final TreeMap<Integer, Path> eligibleFiles = new TreeMap<>();
final File file = new File(path);
File parent = file.getParentFile();
if (parent == null) {
parent = new File(".");
} else {
parent.mkdirs();
}
if (!PATTERN_COUNTER.matcher(logfilePattern).find()) {
return eligibleFiles;
}
final Path dir = parent.toPath();
String fileName = file.getName();
final int suffixLength = suffixLength(fileName);
// use Pattern.quote to treat all initial parts of the fileName as literal
// this fixes issues with filenames containing 'magic' regex characters
if (suffixLength > 0) {
fileName = Pattern.quote(fileName.substring(0, fileName.length() - suffixLength)) + ".*";
} else {
fileName = Pattern.quote(fileName);
}
// since we insert a pattern inside a regex escaped string,
// surround it with quote characters so that (\d) is treated as a pattern and not a literal
final String filePattern = fileName.replaceFirst("0*\\u0000", "\\\\E(0?\\\\d+)\\\\Q");
final Pattern pattern = Pattern.compile(filePattern);
final Path current = currentFile.length() > 0 ? new File(currentFile).toPath() : null;
LOGGER.debug("Current file: {}", currentFile);
try (final DirectoryStream<Path> stream = Files.newDirectoryStream(dir)) {
for (final Path entry : stream) {
final Matcher matcher = pattern.matcher(entry.toFile().getName());
if (matcher.matches() && !entry.equals(current)) {
try {
final Integer index = Integers.parseInt(matcher.group(1));
eligibleFiles.put(index, entry);
} catch (NumberFormatException ex) {
LOGGER.debug(
"Ignoring file {} which matches pattern but the index is invalid.",
entry.toFile().getName());
}
}
}
} catch (final IOException ioe) {
throw new LoggingException("Error reading folder " + dir + " " + ioe.getMessage(), ioe);
}
return isAscending ? eligibleFiles : eligibleFiles.descendingMap();
}
}
| AbstractRolloverStrategy |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/util/WatchManager.java | {
"start": 2098,
"end": 2874
} | class ____ {
private final Watcher watcher;
// Only used for logging
private volatile long lastModifiedMillis;
public ConfigurationMonitor(final long lastModifiedMillis, final Watcher watcher) {
this.watcher = watcher;
this.lastModifiedMillis = lastModifiedMillis;
}
public Watcher getWatcher() {
return watcher;
}
private void setLastModifiedMillis(final long lastModifiedMillis) {
this.lastModifiedMillis = lastModifiedMillis;
}
@Override
public String toString() {
return "ConfigurationMonitor [watcher=" + watcher + ", lastModifiedMillis=" + lastModifiedMillis + "]";
}
}
private static | ConfigurationMonitor |
java | quarkusio__quarkus | test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusTestExtension.java | {
"start": 35371,
"end": 43040
} | class ____ "
+ requiredTestClass,
e);
}
initTestState(extensionContext, state);
return result;
}
private void initTestState(ExtensionContext extensionContext, QuarkusTestExtensionState state) {
try {
actualTestClass = extensionContext.getRequiredTestClass();
if (extensionContext.getRequiredTestClass().isAnnotationPresent(Nested.class)) {
Class<?> outerClass = actualTestClass.getEnclosingClass();
Constructor<?> declaredConstructor = actualTestClass.getDeclaredConstructor(outerClass);
declaredConstructor.setAccessible(true);
if (outerClass.isInstance(actualTestInstance)) {
outerInstances.add(actualTestInstance);
actualTestInstance = declaredConstructor.newInstance(actualTestInstance);
} else {
Object outerInstance = createActualTestInstance(outerClass, state);
invokeAfterConstructCallbacks(Object.class, outerInstance);
actualTestInstance = declaredConstructor.newInstance(outerInstance);
outerInstances.add(outerInstance);
}
} else {
outerInstances.clear();
actualTestInstance = createActualTestInstance(actualTestClass, state);
}
invokeAfterConstructCallbacks(Object.class, actualTestInstance);
} catch (Exception e) {
throw new TestInstantiationException("Failed to create test instance",
e instanceof InvocationTargetException ? e.getCause() : e);
}
}
private Object createActualTestInstance(Class<?> testClass, QuarkusTestExtensionState state)
throws ClassNotFoundException, IllegalAccessException, InvocationTargetException, NoSuchMethodException {
Object testInstance = runningQuarkusApplication.instance(testClass);
Class<?> resM = Thread.currentThread().getContextClassLoader().loadClass(TestHTTPResourceManager.class.getName());
resM.getDeclaredMethod("inject", Object.class, List.class).invoke(null, testInstance,
testHttpEndpointProviders);
state.testResourceManager.getClass().getMethod("inject", Object.class).invoke(state.testResourceManager,
testInstance);
return testInstance;
}
@Override
public void interceptBeforeEachMethod(Invocation<Void> invocation, ReflectiveInvocationContext<Method> invocationContext,
ExtensionContext extensionContext) throws Throwable {
if (isNativeOrIntegrationTest(extensionContext.getRequiredTestClass())) {
invocation.proceed();
return;
}
runExtensionMethod(invocationContext, extensionContext, true);
invocation.skip();
}
@Override
public void interceptTestMethod(Invocation<Void> invocation, ReflectiveInvocationContext<Method> invocationContext,
ExtensionContext extensionContext) throws Throwable {
if (isNativeOrIntegrationTest(extensionContext.getRequiredTestClass())) {
invocation.proceed();
return;
}
//as a convenience to the user we attach any exceptions from the server itself
//as suppressed exceptions from the failure
//this makes it easy to see why your request has failed in the test output itself
//instead of needed to look in the log output
List<Throwable> serverExceptions = new CopyOnWriteArrayList<>();
ExceptionReporting.setListener(serverExceptions::add);
try {
runExtensionMethod(invocationContext, extensionContext, true);
invocation.skip();
} catch (Throwable t) {
for (var serverException : serverExceptions) {
if (t == serverException) {
// do not add a suppressed exception to itself
continue;
}
t.addSuppressed(serverException);
}
throw t;
} finally {
ExceptionReporting.setListener(null);
}
}
@Override
public void interceptDynamicTest(Invocation<Void> invocation, ExtensionContext extensionContext) throws Throwable {
// TODO check if this is needed; the earlier interceptor may already have done it
if (runningQuarkusApplication == null) {
invocation.proceed();
return;
}
var old = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(runningQuarkusApplication.getClassLoader());
invocation.proceed();
} finally {
Thread.currentThread().setContextClassLoader(old);
}
}
@Override
public void interceptTestTemplateMethod(Invocation<Void> invocation, ReflectiveInvocationContext<Method> invocationContext,
ExtensionContext extensionContext) throws Throwable {
if (isNativeOrIntegrationTest(extensionContext.getRequiredTestClass())) {
invocation.proceed();
return;
}
runExtensionMethod(invocationContext, extensionContext);
invocation.skip();
}
@SuppressWarnings("unchecked")
@Override
public <T> T interceptTestFactoryMethod(Invocation<T> invocation,
ReflectiveInvocationContext<Method> invocationContext, ExtensionContext extensionContext) throws Throwable {
if (isNativeOrIntegrationTest(extensionContext.getRequiredTestClass())) {
return invocation.proceed();
}
T result = (T) runExtensionMethod(invocationContext, extensionContext);
invocation.skip();
return result;
}
// TODO can this and the other interceptions go away?
@Override
public void interceptAfterEachMethod(Invocation<Void> invocation, ReflectiveInvocationContext<Method> invocationContext,
ExtensionContext extensionContext) throws Throwable {
if (isNativeOrIntegrationTest(extensionContext.getRequiredTestClass())) {
invocation.proceed();
return;
}
runExtensionMethod(invocationContext, extensionContext, true);
invocation.skip();
}
@Override
public void interceptAfterAllMethod(Invocation<Void> invocation, ReflectiveInvocationContext<Method> invocationContext,
ExtensionContext extensionContext) throws Throwable {
if (runningQuarkusApplication == null || isNativeOrIntegrationTest(extensionContext.getRequiredTestClass())) {
invocation.proceed();
return;
}
runExtensionMethod(invocationContext, extensionContext);
invocation.skip();
}
private Object runExtensionMethod(ReflectiveInvocationContext<Method> invocationContext, ExtensionContext extensionContext)
throws Throwable {
return runExtensionMethod(invocationContext, extensionContext, false);
}
private Object runExtensionMethod(ReflectiveInvocationContext<Method> invocationContext, ExtensionContext extensionContext,
boolean testMethodInvokersAllowed)
throws Throwable {
resetHangTimeout();
try {
Class<?> testClassFromTCCL = extensionContext.getRequiredTestClass();
Map<Class<?>, Object> allTestsClasses = new HashMap<>();
// static loading
allTestsClasses.put(testClassFromTCCL, actualTestInstance);
// this is needed to support before*** and after*** methods that are part of | is |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/transformer/AnnotationTransformerLoader.java | {
"start": 5770,
"end": 7724
} | class ____ not be found: {}",
type.getCanonicalName(), e, e);
}
}
protected boolean acceptClass(Class<?> type) {
return Transformer.class.isAssignableFrom(type) && type.isAnnotationPresent(DataTypeTransformer.class);
}
protected void findPackages(Set<String> packages, ClassLoader classLoader) throws IOException {
Enumeration<URL> resources = classLoader.getResources(META_INF_SERVICES);
while (resources.hasMoreElements()) {
URL url = resources.nextElement();
String path = url.getPath();
if (!visitedURIs.contains(path)) {
// remember we have visited this uri so we wont read it twice
visitedURIs.add(path);
LOG.debug("Loading file {} to retrieve list of packages, from url: {}", META_INF_SERVICES, url);
try (BufferedReader reader
= IOHelper.buffered(new InputStreamReader(url.openStream(), StandardCharsets.UTF_8))) {
while (true) {
String line = reader.readLine();
if (line == null) {
break;
}
line = line.trim();
if (line.startsWith("#") || line.isEmpty()) {
continue;
}
packages.add(line);
}
}
}
}
}
@Override
public void transform(Message message, DataType from, DataType to) throws Exception {
// noop
}
public void setPackageName(String packageName) {
this.packageName = packageName;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
}
| could |
java | square__moshi | moshi/src/test/java/com/squareup/moshi/internal/ClassJsonAdapterTest.java | {
"start": 8232,
"end": 8727
} | class ____ {
NoArgConstructorThrowsUncheckedException() throws Exception {
throw new UnsupportedOperationException("foo");
}
}
@Test
public void noArgConstructorThrowsUncheckedException() throws Exception {
try {
fromJson(NoArgConstructorThrowsUncheckedException.class, "{}");
fail();
} catch (UnsupportedOperationException expected) {
assertThat(expected).hasMessageThat().isEqualTo("foo");
}
}
static | NoArgConstructorThrowsUncheckedException |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unused/UnusedExclusionTest.java | {
"start": 3463,
"end": 5023
} | class ____ {
public void test(BeanContainer beanContainer) {
// This should trigger the warning - Gama was removed
Gama gama = beanContainer.beanInstanceFactory(Gama.class).create().get();
// Test that fallback was used - no injection was performed
Assertions.assertNull(gama.beanManager);
}
}
@Test
public void testBeans() {
ArcContainer container = Arc.container();
String expectedBeanResponse = "ok";
InstanceHandle<Alpha> alphaInstance = container.instance(Alpha.class);
Assertions.assertTrue(alphaInstance.isAvailable());
Assertions.assertEquals(expectedBeanResponse, alphaInstance.get().ping());
InstanceHandle<Beta> betaInstance = container.instance(Beta.class);
Assertions.assertTrue(betaInstance.isAvailable());
Assertions.assertEquals(expectedBeanResponse, betaInstance.get().ping());
InstanceHandle<Charlie> charlieInstance = container.instance(Charlie.class);
Assertions.assertTrue(charlieInstance.isAvailable());
Assertions.assertEquals(expectedBeanResponse, charlieInstance.get().ping());
InstanceHandle<Delta> deltaInstance = container.instance(Delta.class);
Assertions.assertTrue(deltaInstance.isAvailable());
Assertions.assertEquals(expectedBeanResponse, deltaInstance.get().ping());
Assertions.assertFalse(container.instance(Gama.class).isAvailable());
}
// unused bean, won't be removed
@ApplicationScoped
static | TestRecorder |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RateLimiterConfig.java | {
"start": 686,
"end": 2034
} | class ____ {
private RateType rateType;
private Long rateInterval;
private Long rate;
public RateLimiterConfig(RateType rateType, Long rateInterval, Long rate) {
super();
this.rateType = rateType;
this.rateInterval = rateInterval;
this.rate = rate;
}
/**
* Returns current rate type set through {@link RRateLimiter#trySetRate(RateType, long, long, RateIntervalUnit)}
* or {@link RRateLimiter#trySetRateAsync(RateType, long, long, RateIntervalUnit)} method.
*
* @return rate type
*/
public RateType getRateType() {
return rateType;
}
/**
* Returns current rate time interval value set through {@link RRateLimiter#trySetRate(RateType, long, long, RateIntervalUnit)}
* or {@link RRateLimiter#trySetRateAsync(RateType, long, long, RateIntervalUnit)} method.
*
* @return rate time interval in milliseconds
*/
public Long getRateInterval() {
return rateInterval;
}
/**
* Returns current rate value set through {@link RRateLimiter#trySetRate(RateType, long, long, RateIntervalUnit)}
* or {@link RRateLimiter#trySetRateAsync(RateType, long, long, RateIntervalUnit)} method.
*
* @return rate
*/
public Long getRate() {
return rate;
}
}
| RateLimiterConfig |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RBlockingQueueRx.java | {
"start": 962,
"end": 1091
} | interface ____ BlockingQueue
*
* @author Nikita Koksharov
* @param <V> the type of elements held in this collection
*/
public | for |
java | apache__flink | flink-filesystems/flink-oss-fs-hadoop/src/main/java/org/apache/flink/fs/osshadoop/writer/OSSRecoverableMultipartUpload.java | {
"start": 6921,
"end": 8243
} | class ____ implements Runnable {
private final OSSAccessor ossAccessor;
private final String objectName;
private final String uploadId;
private final int partNumber;
private final RefCountedFSOutputStream file;
private final CompletableFuture<PartETag> future;
UploadTask(
OSSAccessor ossAccessor,
String objectName,
String uploadId,
int partNumber,
RefCountedFSOutputStream file,
CompletableFuture<PartETag> future) {
this.ossAccessor = ossAccessor;
this.objectName = objectName;
this.uploadId = uploadId;
checkArgument(partNumber >= 1 && partNumber <= 10_000);
this.partNumber = partNumber;
this.file = file;
this.future = future;
}
@Override
public void run() {
try {
PartETag partETag =
ossAccessor.uploadPart(
file.getInputFile(), objectName, uploadId, partNumber);
future.complete(partETag);
file.release();
} catch (Throwable t) {
future.completeExceptionally(t);
}
}
}
}
| UploadTask |
java | quarkusio__quarkus | integration-tests/reactive-messaging-context-propagation/src/test/java/io/quarkus/it/kafka/KafkaContextPropagationIT.java | {
"start": 175,
"end": 404
} | class ____ extends KafkaContextPropagationTest {
@Override
protected Matcher<String> assertBodyRequestScopedContextWasNotActive() {
return Matchers.not(Matchers.blankOrNullString());
}
}
| KafkaContextPropagationIT |
java | apache__flink | flink-end-to-end-tests/flink-stream-sql-test/src/main/java/org/apache/flink/sql/tests/StreamSQLTestProgram.java | {
"start": 11964,
"end": 13644
} | class ____
implements MapFunction<Row, Row>, CheckpointedFunction, ResultTypeQueryable {
// counts all processed records of all previous execution attempts
private int saveRecordCnt = 0;
// counts all processed records of this execution attempt
private int lostRecordCnt = 0;
private ListState<Integer> state = null;
@Override
public Row map(Row value) {
// the both counts are the same only in the first execution attempt
if (saveRecordCnt == 1 && lostRecordCnt == 1) {
throw new RuntimeException("Kill this Job!");
}
// update checkpointed counter
saveRecordCnt++;
// update non-checkpointed counter
lostRecordCnt++;
// forward record
return value;
}
@Override
public TypeInformation getProducedType() {
return Types.ROW(Types.INT, Types.SQL_TIMESTAMP);
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
state =
context.getOperatorStateStore()
.getListState(
new ListStateDescriptor<Integer>(
"state", IntSerializer.INSTANCE));
for (Integer i : state.get()) {
saveRecordCnt += i;
}
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {
state.update(Collections.singletonList(saveRecordCnt));
}
}
}
| KillMapper |
java | spring-projects__spring-boot | module/spring-boot-devtools/src/test/java/org/springframework/boot/devtools/autoconfigure/AbstractDevToolsDataSourceAutoConfigurationTests.java | {
"start": 1905,
"end": 5121
} | class ____ {
@Test
void singleManuallyConfiguredDataSourceIsNotClosed() throws Exception {
try (ConfigurableApplicationContext context = getContext(
() -> createContext(SingleDataSourceConfiguration.class))) {
DataSource dataSource = context.getBean(DataSource.class);
Statement statement = configureDataSourceBehavior(dataSource);
then(statement).should(never()).execute("SHUTDOWN");
}
}
@Test
void multipleDataSourcesAreIgnored() throws Exception {
try (ConfigurableApplicationContext context = getContext(
() -> createContext(MultipleDataSourcesConfiguration.class))) {
Collection<DataSource> dataSources = context.getBeansOfType(DataSource.class).values();
for (DataSource dataSource : dataSources) {
Statement statement = configureDataSourceBehavior(dataSource);
then(statement).should(never()).execute("SHUTDOWN");
}
}
}
@Test
void emptyFactoryMethodMetadataIgnored() {
try (AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext()) {
DataSource dataSource = mock(DataSource.class);
AnnotatedGenericBeanDefinition beanDefinition = new AnnotatedGenericBeanDefinition(dataSource.getClass());
context.registerBeanDefinition("dataSource", beanDefinition);
context.register(DevToolsDataSourceAutoConfiguration.class);
context.refresh();
}
}
protected final Statement configureDataSourceBehavior(DataSource dataSource) throws SQLException {
Connection connection = mock(Connection.class);
Statement statement = mock(Statement.class);
willReturn(connection).given(dataSource).getConnection();
given(connection.createStatement()).willReturn(statement);
return statement;
}
protected ConfigurableApplicationContext getContext(Supplier<ConfigurableApplicationContext> supplier)
throws Exception {
AtomicReference<ConfigurableApplicationContext> atomicReference = new AtomicReference<>();
Thread thread = new Thread(() -> {
ConfigurableApplicationContext context = supplier.get();
atomicReference.getAndSet(context);
});
thread.start();
thread.join();
ConfigurableApplicationContext context = atomicReference.get();
assertThat(context).isNotNull();
return context;
}
protected final ConfigurableApplicationContext createContext(Class<?>... classes) {
return createContext(null, classes);
}
protected final ConfigurableApplicationContext createContext(@Nullable String driverClassName,
Class<?>... classes) {
return createContext(driverClassName, null, classes);
}
protected final ConfigurableApplicationContext createContext(@Nullable String driverClassName, @Nullable String url,
Class<?>... classes) {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(classes);
context.register(DevToolsDataSourceAutoConfiguration.class);
if (driverClassName != null) {
TestPropertyValues.of("spring.datasource.driver-class-name:" + driverClassName).applyTo(context);
}
if (url != null) {
TestPropertyValues.of("spring.datasource.url:" + url).applyTo(context);
}
context.refresh();
return context;
}
@Configuration(proxyBeanMethods = false)
static | AbstractDevToolsDataSourceAutoConfigurationTests |
java | apache__camel | components/camel-netty/src/main/java/org/apache/camel/component/netty/SingleUDPNettyServerBootstrapFactory.java | {
"start": 8055,
"end": 9251
} | interface ____ for '" + networkInterface + "'.");
LOG.info("ConnectionlessBootstrap joining {}:{} using network interface: {}", configuration.getHost(),
configuration.getPort(), multicastNetworkInterface.getName());
datagramChannel.joinGroup(hostAddress, multicastNetworkInterface).syncUninterruptibly();
allChannels.add(datagramChannel);
} else {
LOG.info("ConnectionlessBootstrap binding to {}:{}", configuration.getHost(), configuration.getPort());
ChannelFuture channelFuture = bootstrap.bind(hostAddress).sync();
channel = channelFuture.channel();
allChannels.add(channel);
}
}
protected void stopServerBootstrap() {
// close all channels
LOG.info("ConnectionlessBootstrap disconnecting from {}:{}", configuration.getHost(), configuration.getPort());
LOG.trace("Closing {} channels", allChannels.size());
allChannels.close().awaitUninterruptibly();
// and then shutdown the thread pools
if (workerGroup != null) {
workerGroup.shutdownGracefully();
workerGroup = null;
}
}
}
| found |
java | apache__camel | components/camel-test/camel-test-spring-junit5/src/main/java/org/apache/camel/test/spring/junit5/CamelSpringTestContextLoader.java | {
"start": 2961,
"end": 4625
} | class ____ executes. Thus the listeners, which only run if the application context is
* successfully initialized are insufficient to provide the behavior described above.
*/
@Override
public ApplicationContext loadContext(MergedContextConfiguration mergedConfig) throws Exception {
Class<?> testClass = getTestClass();
if (LOG.isDebugEnabled()) {
LOG.debug("Loading ApplicationContext for merged context configuration [{}].", mergedConfig);
}
try {
GenericApplicationContext context = createContext(testClass, mergedConfig);
prepareContext(context, mergedConfig);
loadBeanDefinitions(context, mergedConfig);
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
customizeContext(context, mergedConfig);
return loadContext(context, testClass);
} finally {
CamelAnnotationsHandler.cleanup();
}
}
/**
* Modeled after the Spring implementation in {@link AbstractGenericContextLoader}, this method creates and
* refreshes the application context while providing for processing of additional Camel specific post-refresh
* actions. We do not provide the pre-post hooks for customization seen in {@link AbstractGenericContextLoader}
* because they probably are unnecessary for 90+% of users.
* <p/>
* For some functionality, we cannot use {@link org.springframework.test.context.TestExecutionListener} because we
* need to both produce the desired outcome during application context loading, and also cleanup after ourselves
* even if the test | never |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/ast/tree/expression/XmlAttributes.java | {
"start": 284,
"end": 703
} | class ____ implements SqlAstNode {
private final Map<String, Expression> attributes;
public XmlAttributes(Map<String, Expression> attributes) {
this.attributes = attributes;
}
public Map<String, Expression> getAttributes() {
return attributes;
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
throw new UnsupportedOperationException("XmlAttributes doesn't support walking");
}
}
| XmlAttributes |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/operators/AbstractUdfOperator.java | {
"start": 1323,
"end": 2478
} | class ____ the user function. */
protected final UserCodeWrapper<FT> userFunction;
/** The extra inputs which parameterize the user function. */
protected final Map<String, Operator<?>> broadcastInputs = new HashMap<>();
// --------------------------------------------------------------------------------------------
/**
* Creates a new abstract operator with the given name wrapping the given user function.
*
* @param function The wrapper object containing the user function.
* @param name The given name for the operator, used in plans, logs and progress messages.
*/
protected AbstractUdfOperator(
UserCodeWrapper<FT> function, OperatorInformation<OUT> operatorInfo, String name) {
super(operatorInfo, name);
this.userFunction = function;
}
// --------------------------------------------------------------------------------------------
/**
* Gets the function that is held by this operator. The function is the actual implementation of
* the user code.
*
* <p>This throws an exception if the pact does not contain an object but a | containing |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/impl/SpringProduceConsumeImplicitPropertyTest.java | {
"start": 1095,
"end": 1624
} | class ____ extends SpringTestSupport {
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/spring/impl/SpringProduceConsumeImplicitPropertyTest.xml");
}
@Test
public void testImplicit() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("seda:input", "Hello World");
assertMockEndpointsSatisfied();
}
}
| SpringProduceConsumeImplicitPropertyTest |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/settings/AbstractSettings.java | {
"start": 1051,
"end": 2595
} | class ____ implements Serializable {
@Serial
private static final long serialVersionUID = 7434920549178503670L;
private final Map<String, Object> settings;
protected AbstractSettings(Map<String, Object> settings) {
Assert.notEmpty(settings, "settings cannot be empty");
this.settings = Collections.unmodifiableMap(new HashMap<>(settings));
}
/**
* Returns a configuration setting.
* @param name the name of the setting
* @param <T> the type of the setting
* @return the value of the setting, or {@code null} if not available
*/
@SuppressWarnings("unchecked")
public <T> T getSetting(String name) {
Assert.hasText(name, "name cannot be empty");
return (T) getSettings().get(name);
}
/**
* Returns a {@code Map} of the configuration settings.
* @return a {@code Map} of the configuration settings
*/
public Map<String, Object> getSettings() {
return this.settings;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
AbstractSettings that = (AbstractSettings) obj;
return this.settings.equals(that.settings);
}
@Override
public int hashCode() {
return Objects.hash(this.settings);
}
@Override
public String toString() {
return "AbstractSettings {" + "settings=" + this.settings + '}';
}
/**
* A builder for subclasses of {@link AbstractSettings}.
*
* @param <T> the type of object
* @param <B> the type of the builder
*/
protected abstract static | AbstractSettings |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java | {
"start": 74237,
"end": 74572
} | class ____<@ImmutableTypeParameter X> {}
""")
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.annotations.ImmutableTypeParameter;
import com.google.errorprone.annotations.Immutable;
@Immutable
// BUG: Diagnostic contains: 'S' required instantiation of 'X' with type parameters, but was raw
| S |
java | spring-projects__spring-boot | module/spring-boot-graphql/src/test/java/org/springframework/boot/graphql/autoconfigure/GraphQlAutoConfigurationTests.java | {
"start": 12978,
"end": 13218
} | class ____ {
@Bean
DataFetcherExceptionResolver customDataFetcherExceptionResolver() {
return mock(DataFetcherExceptionResolver.class);
}
}
@Configuration(proxyBeanMethods = false)
static | DataFetcherExceptionResolverConfiguration |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDataFrameAnalyticsAction.java | {
"start": 1751,
"end": 3688
} | class ____ extends LegacyActionRequest {
public static final ParseField CONFIG = new ParseField("config");
private final DataFrameAnalyticsConfig config;
static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("preview_data_frame_analytics_response", Builder::new);
static {
PARSER.declareObject(Builder::setConfig, DataFrameAnalyticsConfig.STRICT_PARSER::apply, CONFIG);
}
public static Builder fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public Request(DataFrameAnalyticsConfig config) {
this.config = ExceptionsHelper.requireNonNull(config, CONFIG);
}
public Request(StreamInput in) throws IOException {
super(in);
this.config = new DataFrameAnalyticsConfig(in);
}
public DataFrameAnalyticsConfig getConfig() {
return config;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
config.writeTo(out);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return Objects.equals(config, request.config);
}
@Override
public int hashCode() {
return Objects.hash(config);
}
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) {
return new CancellableTask(id, type, action, format("preview_data_frame_analytics[%s]", config.getId()), parentTaskId, headers);
}
public static | Request |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/plugins/PluginIntrospectorTests.java | {
"start": 11896,
"end": 12452
} | class ____ extends BazIngestPlugin {}
assertThat(pluginIntrospector.overriddenMethods(BazIngestPlugin.class), contains("getProcessors"));
assertThat(pluginIntrospector.overriddenMethods(SubBazIngestPlugin.class), contains("getProcessors"));
assertThat(pluginIntrospector.interfaces(BazIngestPlugin.class), contains("IngestPlugin"));
assertThat(pluginIntrospector.interfaces(SubBazIngestPlugin.class), contains("IngestPlugin"));
}
public void testOverriddenMethodsCaseSensitivity() {
abstract | SubBazIngestPlugin |
java | grpc__grpc-java | alts/src/test/java/io/grpc/alts/internal/AltsProtocolNegotiatorTest.java | {
"start": 17441,
"end": 17836
} | class ____ implements TsiHandshakerFactory {
private TsiHandshakerFactory delegate;
DelegatingTsiHandshakerFactory(TsiHandshakerFactory delegate) {
this.delegate = delegate;
}
@Override
public TsiHandshaker newHandshaker(String authority, ChannelLogger logger) {
return delegate.newHandshaker(authority, logger);
}
}
private | DelegatingTsiHandshakerFactory |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/specialmappings/EarlyMetadataValidationTest.java | {
"start": 518,
"end": 977
} | class ____ {
@RegisterExtension
final static QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(DataIdentity.class, IdVersionPK.class, NormalPointEntity.class, PointEntity.class)
.addAsResource("application.properties"));
@Test
public void testSuccessfulBoot() {
// Should be able to boot with these entities
}
}
| EarlyMetadataValidationTest |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/LogManager.java | {
"start": 25552,
"end": 25838
} | class ____, and an appropriate one cannot be found.");
}
return candidate;
}
/**
* Returns a Logger with the name of the calling class.
*
* @return The Logger for the calling class.
* @throws UnsupportedOperationException if the calling | provided |
java | apache__kafka | metadata/src/test/java/org/apache/kafka/image/FakeSnapshotWriter.java | {
"start": 1057,
"end": 2491
} | class ____ implements SnapshotWriter<ApiMessageAndVersion> {
private final OffsetAndEpoch snapshotId;
private final List<List<ApiMessageAndVersion>> batches = new ArrayList<>();
private boolean frozen = false;
private boolean closed = false;
public List<List<ApiMessageAndVersion>> batches() {
return batches.stream().map(List::copyOf).toList();
}
public FakeSnapshotWriter() {
this(new OffsetAndEpoch(100L, 10));
}
public FakeSnapshotWriter(OffsetAndEpoch snapshotId) {
this.snapshotId = snapshotId;
}
@Override
public OffsetAndEpoch snapshotId() {
return snapshotId;
}
@Override
public long lastContainedLogOffset() {
return snapshotId().offset() - 1;
}
@Override
public int lastContainedLogEpoch() {
return snapshotId().epoch();
}
@Override
public boolean isFrozen() {
return frozen;
}
@Override
public void append(List<ApiMessageAndVersion> batch) {
if (frozen) {
throw new IllegalStateException("Append not supported. Snapshot is already frozen.");
}
batches.add(batch);
}
@Override
public long freeze() {
frozen = true;
return batches.size() * 100L;
}
@Override
public void close() {
closed = true;
}
public boolean isClosed() {
return closed;
}
} | FakeSnapshotWriter |
java | apache__avro | lang/java/java17-test/src/test/java/org/apache/avro/reflect/TestJavaRecords.java | {
"start": 1953,
"end": 3067
} | class ____ extends CustomEncoding<R1> {
{
schema = Schema.createRecord("R1", null, null, false,
Arrays.asList(new Schema.Field("value", Schema.create(Schema.Type.STRING), null, null)));
}
@Override
protected void write(Object datum, Encoder out) throws IOException {
if (datum instanceof R1 r1) {
out.writeString(r1.value());
} else {
throw new AvroTypeException("Expected R1, got " + datum.getClass());
}
}
@Override
protected R1 read(Object reuse, Decoder in) throws IOException {
return new R1(in.readString() + " used this");
}
}
<T> T readWrite(T object) throws IOException {
var schema = ReflectData.get().getSchema(object.getClass());
ReflectDatumWriter<T> writer = new ReflectDatumWriter<>(schema);
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.write(object, factory.directBinaryEncoder(out, null));
ReflectDatumReader<T> reader = new ReflectDatumReader<>(schema);
return reader.read(null, DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
}
}
| R1Encoding |
java | elastic__elasticsearch | build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistributionType.java | {
"start": 511,
"end": 980
} | interface ____ {
String getName();
default boolean isDocker() {
return false;
}
default boolean shouldExtract() {
return false;
};
default String getExtension(ElasticsearchDistribution.Platform platform) {
return getName();
}
default String getClassifier(ElasticsearchDistribution.Platform platform, Version version) {
return ":" + Architecture.current().classifier;
}
}
| ElasticsearchDistributionType |
java | apache__maven | impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/ModelVersionUtilsTest.java | {
"start": 6714,
"end": 9443
} | class ____ {
@ParameterizedTest(name = "from {0} to {1}")
@MethodSource("provideValidPathUpgradeVersions")
@DisplayName("should validate upgrade path")
void shouldValidateUpgradePath(String from, String to) {
assertTrue(ModelVersionUtils.canUpgrade(from, to));
}
private static Stream<Arguments> provideValidPathUpgradeVersions() {
return Stream.of(
Arguments.of("4.0.0", "4.1.0"), Arguments.of("4.1.0", "4.2.0"), Arguments.of("4.0.0", "4.2.0"));
}
@ParameterizedTest(name = "from {0} to {1}")
@MethodSource("provideInvalidPathUpgradeVersions")
@DisplayName("should reject downgrade")
void shouldRejectDowngrade(String from, String to) {
assertFalse(ModelVersionUtils.canUpgrade(from, to));
}
private static Stream<Arguments> provideInvalidPathUpgradeVersions() {
return Stream.of(
Arguments.of("4.1.0", "4.0.0"), Arguments.of("4.2.0", "4.1.0"), Arguments.of("4.2.0", "4.0.0"));
}
@ParameterizedTest(name = "from {0} to {0}")
@ValueSource(strings = {"4.0.0", "4.1.0", "4.2.0"})
@DisplayName("should reject upgrade to same version")
void shouldRejectUpgradeToSameVersion(String version) {
assertFalse(ModelVersionUtils.canUpgrade(version, version));
}
@ParameterizedTest(name = "from {0}")
@ValueSource(strings = {"3.0.0", "5.0.0"})
@DisplayName("should reject upgrade from unsupported version")
void shouldRejectUpgradeFromUnsupportedVersion(String unsupportedVersion) {
assertFalse(ModelVersionUtils.canUpgrade(unsupportedVersion, "4.1.0"));
}
@ParameterizedTest(name = "to {0}")
@ValueSource(strings = {"3.0.0", "5.0.0"})
@DisplayName("should reject upgrade to unsupported version")
void shouldRejectUpgradeToUnsupportedVersion(String unsupportedVersion) {
assertFalse(ModelVersionUtils.canUpgrade("4.0.0", unsupportedVersion));
}
@ParameterizedTest(name = "from {0} to {1}")
@MethodSource("provideNullVersionsInUpgradePairs")
@DisplayName("should handle null versions in upgrade validation")
void shouldHandleNullVersionsInUpgradeValidation(String from, String to) {
assertFalse(ModelVersionUtils.canUpgrade(from, to));
}
private static Stream<Arguments> provideNullVersionsInUpgradePairs() {
return Stream.of(Arguments.of(null, "4.1.0"), Arguments.of("4.0.0", null), Arguments.of(null, null));
}
}
@Nested
@DisplayName("Version Comparison")
| UpgradePathValidationTests |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/ReadOnlyList.java | {
"start": 1530,
"end": 6666
} | class ____ {
/** @return an empty list. */
public static <E> ReadOnlyList<E> emptyList() {
return ReadOnlyList.Util.asReadOnlyList(Collections.<E>emptyList());
}
/**
* The same as {@link Collections#binarySearch(List, Object)}
* except that the list is a {@link ReadOnlyList}.
*
* @return the insertion point defined
* in {@link Collections#binarySearch(List, Object)}.
*/
public static <K, E extends Comparable<K>> int binarySearch(
final ReadOnlyList<E> list, final K key) {
int lower = 0;
for(int upper = list.size() - 1; lower <= upper; ) {
final int mid = (upper + lower) >>> 1;
final int d = list.get(mid).compareTo(key);
if (d == 0) {
return mid;
} else if (d > 0) {
upper = mid - 1;
} else {
lower = mid + 1;
}
}
return -(lower + 1);
}
/**
* @return a {@link ReadOnlyList} view of the given list.
*/
public static <E> ReadOnlyList<E> asReadOnlyList(final List<E> list) {
return new ReadOnlyList<E>() {
@Override
public Iterator<E> iterator() {
return list.iterator();
}
@Override
public boolean isEmpty() {
return list.isEmpty();
}
@Override
public int size() {
return list.size();
}
@Override
public E get(int i) {
return list.get(i);
}
@Override
public String toString() {
return list.toString();
}
};
}
/**
* @return a {@link List} view of the given list.
*/
public static <E> List<E> asList(final ReadOnlyList<E> list) {
return new List<E>() {
@Override
public Iterator<E> iterator() {
return list.iterator();
}
@Override
public boolean isEmpty() {
return list.isEmpty();
}
@Override
public int size() {
return list.size();
}
@Override
public E get(int i) {
return list.get(i);
}
@Override
public Object[] toArray() {
final Object[] a = new Object[size()];
for(int i = 0; i < a.length; i++) {
a[i] = get(i);
}
return a;
}
//All methods below are not supported.
@Override
public boolean add(E e) {
throw new UnsupportedOperationException();
}
@Override
public void add(int index, E element) {
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(Collection<? extends E> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(int index, Collection<? extends E> c) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
@Override
public boolean contains(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean containsAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public int indexOf(Object o) {
throw new UnsupportedOperationException();
}
@Override
public int lastIndexOf(Object o) {
throw new UnsupportedOperationException();
}
@Override
public ListIterator<E> listIterator() {
throw new UnsupportedOperationException();
}
@Override
public ListIterator<E> listIterator(int index) {
throw new UnsupportedOperationException();
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override
public E remove(int index) {
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public E set(int index, E element) {
throw new UnsupportedOperationException();
}
@Override
public List<E> subList(int fromIndex, int toIndex) {
throw new UnsupportedOperationException();
}
@Override
public <T> T[] toArray(T[] a) {
throw new UnsupportedOperationException();
}
@Override
public String toString() {
if (list.isEmpty()) {
return "[]";
}
final Iterator<E> i = list.iterator();
final StringBuilder b = new StringBuilder("[").append(i.next());
for(; i.hasNext();) {
b.append(", ").append(i.next());
}
return b + "]";
}
};
}
}
}
| Util |
java | quarkusio__quarkus | test-framework/junit5-component/src/test/java/io/quarkus/test/component/declarative/ListAllMockTest.java | {
"start": 617,
"end": 1104
} | class ____ {
@Inject
ListAllComponent component;
@InjectMock
Delta delta;
@InjectMock
@SimpleQualifier
Bravo bravo;
@Test
public void testMock() {
Mockito.when(delta.ping()).thenReturn(false);
Mockito.when(bravo.ping()).thenReturn("ok");
assertFalse(component.ping());
assertEquals(1, component.bravos.size());
assertEquals("ok", component.bravos.get(0).ping());
}
@Singleton
static | ListAllMockTest |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/fixed/BindyMarshallUnmarshallWithDefaultValueTest.java | {
"start": 5514,
"end": 9415
} | class ____ {
@DataField(pos = 1, length = 2)
private int orderNr;
@DataField(pos = 3, length = 2)
private String clientNr;
@DataField(pos = 5, length = 9, defaultValue = "John", trim = true)
private String firstName;
@DataField(pos = 14, length = 5, align = "L", defaultValue = "Doe", trim = true)
private String lastName;
@DataField(pos = 19, length = 4)
private String instrumentCode;
@DataField(pos = 23, length = 10)
private String instrumentNumber;
@DataField(pos = 33, length = 3)
private String orderType;
@DataField(pos = 36, length = 5)
private String instrumentType;
@DataField(pos = 41, precision = 2, length = 12, paddingChar = '0')
private BigDecimal amount;
@DataField(pos = 53, length = 3)
private String currency;
@DataField(pos = 56, length = 10, pattern = "dd-MM-yyyy")
private Date orderDate;
@DataField(pos = 66, length = 10)
private String comment;
public int getOrderNr() {
return orderNr;
}
public void setOrderNr(int orderNr) {
this.orderNr = orderNr;
}
public String getClientNr() {
return clientNr;
}
public void setClientNr(String clientNr) {
this.clientNr = clientNr;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getInstrumentCode() {
return instrumentCode;
}
public void setInstrumentCode(String instrumentCode) {
this.instrumentCode = instrumentCode;
}
public String getInstrumentNumber() {
return instrumentNumber;
}
public void setInstrumentNumber(String instrumentNumber) {
this.instrumentNumber = instrumentNumber;
}
public String getOrderType() {
return orderType;
}
public void setOrderType(String orderType) {
this.orderType = orderType;
}
public String getInstrumentType() {
return instrumentType;
}
public void setInstrumentType(String instrumentType) {
this.instrumentType = instrumentType;
}
public BigDecimal getAmount() {
return amount;
}
public void setAmount(BigDecimal amount) {
this.amount = amount;
}
public String getCurrency() {
return currency;
}
public void setCurrency(String currency) {
this.currency = currency;
}
public Date getOrderDate() {
return orderDate;
}
public void setOrderDate(Date orderDate) {
this.orderDate = orderDate;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
@Override
public String toString() {
return "Model : " + Order.class.getName() + " : " + this.orderNr + ", " + this.orderType + ", "
+ String.valueOf(this.amount) + ", " + this.instrumentCode + ", "
+ this.instrumentNumber + ", " + this.instrumentType + ", " + this.currency + ", " + this.clientNr + ", "
+ this.firstName + ", " + this.lastName + ", "
+ String.valueOf(this.orderDate);
}
}
@FixedLengthRecord(length = 75, eol = "QWERTY")
public static | Order |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIndexedSort.java | {
"start": 7813,
"end": 8830
} | class ____ implements IndexedSortable {
private int comparisions;
private int swaps;
private final int maxcmp;
private final int maxswp;
private IndexedSortable s;
public MeasuredSortable(IndexedSortable s) {
this(s, Integer.MAX_VALUE);
}
public MeasuredSortable(IndexedSortable s, int maxcmp) {
this(s, maxcmp, Integer.MAX_VALUE);
}
public MeasuredSortable(IndexedSortable s, int maxcmp, int maxswp) {
this.s = s;
this.maxcmp = maxcmp;
this.maxswp = maxswp;
}
public int getCmp() { return comparisions; }
public int getSwp() { return swaps; }
@Override
public int compare(int i, int j) {
assertTrue(++comparisions < maxcmp,
"Expected fewer than " + maxcmp + " comparisons");
return s.compare(i, j);
}
@Override
public void swap(int i, int j) {
assertTrue(++swaps < maxswp, "Expected fewer than " + maxswp + " swaps");
s.swap(i, j);
}
}
private static | MeasuredSortable |
java | redisson__redisson | redisson/src/main/java/org/redisson/client/protocol/decoder/SearchResultDecoderV2.java | {
"start": 868,
"end": 1830
} | class ____ implements MultiDecoder<Object> {
@Override
public Object decode(List<Object> parts, State state) {
if (parts.isEmpty()) {
return new SearchResult(0, Collections.emptyList());
}
Map<String, Object> m = new HashMap<>();
for (int i = 0; i < parts.size(); i++) {
if (i % 2 != 0) {
m.put(parts.get(i-1).toString(), parts.get(i));
}
}
List<Document> docs = new ArrayList<>();
List<Map<String, Object>> results = (List<Map<String, Object>>) m.get("results");
for (Map<String, Object> result : results) {
String id = (String) result.get("id");
Map<String, Object> attrs = (Map<String, Object>) result.get("extra_attributes");
docs.add(new Document(id, attrs));
}
Long total = (Long) m.get("total_results");
return new SearchResult(total, docs);
}
}
| SearchResultDecoderV2 |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/context/EagerConfig.java | {
"start": 746,
"end": 1052
} | class ____ {
static boolean created = false;
private String something;
public EagerConfig() {
created = true;
}
public String getSomething() {
return something;
}
public void setSomething(String something) {
this.something = something;
}
}
| EagerConfig |
java | google__gson | gson/src/test/java/com/google/gson/internal/bind/DefaultDateTypeAdapterTest.java | {
"start": 1391,
"end": 10057
} | class ____ {
@Test
public void testFormattingInEnUs() {
assertFormattingAlwaysEmitsUsLocale(Locale.US);
}
@Test
public void testFormattingInFr() {
assertFormattingAlwaysEmitsUsLocale(Locale.FRANCE);
}
private static void assertFormattingAlwaysEmitsUsLocale(Locale locale) {
TimeZone defaultTimeZone = TimeZone.getDefault();
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
Locale defaultLocale = Locale.getDefault();
Locale.setDefault(locale);
try {
// The patterns here attempt to accommodate minor date-time formatting differences between JDK
// versions. Ideally Gson would serialize in a way that is independent of the JDK version.
// Note: \h means "horizontal space", because some JDK versions use Narrow No Break Space
// (U+202F) before the AM or PM indication.
String utcFull = "(Coordinated Universal Time|UTC)";
assertFormatted("Jan 1, 1970,? 12:00:00\\hAM", DefaultDateTypeAdapter.DEFAULT_STYLE_FACTORY);
assertFormatted(
"1/1/70,? 12:00\\hAM",
DateType.DATE.createAdapterFactory(DateFormat.SHORT, DateFormat.SHORT));
assertFormatted(
"Jan 1, 1970,? 12:00:00\\hAM",
DateType.DATE.createAdapterFactory(DateFormat.MEDIUM, DateFormat.MEDIUM));
assertFormatted(
"January 1, 1970(,| at)? 12:00:00\\hAM UTC",
DateType.DATE.createAdapterFactory(DateFormat.LONG, DateFormat.LONG));
assertFormatted(
"Thursday, January 1, 1970(,| at)? 12:00:00\\hAM " + utcFull,
DateType.DATE.createAdapterFactory(DateFormat.FULL, DateFormat.FULL));
} finally {
TimeZone.setDefault(defaultTimeZone);
Locale.setDefault(defaultLocale);
}
}
@Test
public void testParsingDatesFormattedWithSystemLocale() throws Exception {
TimeZone defaultTimeZone = TimeZone.getDefault();
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
Locale defaultLocale = Locale.getDefault();
Locale.setDefault(Locale.FRANCE);
try {
Date date = new Date(0);
assertParsed(
DateFormat.getDateTimeInstance(DateFormat.MEDIUM, DateFormat.MEDIUM).format(date),
DefaultDateTypeAdapter.DEFAULT_STYLE_FACTORY);
assertParsed(
DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT).format(date),
DateType.DATE.createAdapterFactory(DateFormat.SHORT, DateFormat.SHORT));
assertParsed(
DateFormat.getDateTimeInstance(DateFormat.MEDIUM, DateFormat.MEDIUM).format(date),
DateType.DATE.createAdapterFactory(DateFormat.MEDIUM, DateFormat.MEDIUM));
assertParsed(
DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.LONG).format(date),
DateType.DATE.createAdapterFactory(DateFormat.LONG, DateFormat.LONG));
assertParsed(
DateFormat.getDateTimeInstance(DateFormat.FULL, DateFormat.FULL).format(date),
DateType.DATE.createAdapterFactory(DateFormat.FULL, DateFormat.FULL));
} finally {
TimeZone.setDefault(defaultTimeZone);
Locale.setDefault(defaultLocale);
}
}
@Test
public void testParsingDatesFormattedWithUsLocale() throws Exception {
TimeZone defaultTimeZone = TimeZone.getDefault();
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
Locale defaultLocale = Locale.getDefault();
Locale.setDefault(Locale.US);
try {
assertParsed("Jan 1, 1970 0:00:00 AM", DefaultDateTypeAdapter.DEFAULT_STYLE_FACTORY);
assertParsed(
"1/1/70 0:00 AM", DateType.DATE.createAdapterFactory(DateFormat.SHORT, DateFormat.SHORT));
assertParsed(
"Jan 1, 1970 0:00:00 AM",
DateType.DATE.createAdapterFactory(DateFormat.MEDIUM, DateFormat.MEDIUM));
assertParsed(
"January 1, 1970 0:00:00 AM UTC",
DateType.DATE.createAdapterFactory(DateFormat.LONG, DateFormat.LONG));
assertParsed(
"Thursday, January 1, 1970 0:00:00 AM UTC",
DateType.DATE.createAdapterFactory(DateFormat.FULL, DateFormat.FULL));
} finally {
TimeZone.setDefault(defaultTimeZone);
Locale.setDefault(defaultLocale);
}
}
@Test
public void testFormatUsesDefaultTimezone() throws Exception {
TimeZone defaultTimeZone = TimeZone.getDefault();
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
Locale defaultLocale = Locale.getDefault();
Locale.setDefault(Locale.US);
try {
assertFormatted("Dec 31, 1969,? 4:00:00\\hPM", DefaultDateTypeAdapter.DEFAULT_STYLE_FACTORY);
assertParsed("Dec 31, 1969 4:00:00 PM", DefaultDateTypeAdapter.DEFAULT_STYLE_FACTORY);
} finally {
TimeZone.setDefault(defaultTimeZone);
Locale.setDefault(defaultLocale);
}
}
@Test
public void testDateDeserializationISO8601() throws Exception {
TypeAdapterFactory adapterFactory = DefaultDateTypeAdapter.DEFAULT_STYLE_FACTORY;
assertParsed("1970-01-01T00:00:00.000Z", adapterFactory);
assertParsed("1970-01-01T00:00Z", adapterFactory);
assertParsed("1970-01-01T00:00:00+00:00", adapterFactory);
assertParsed("1970-01-01T01:00:00+01:00", adapterFactory);
assertParsed("1970-01-01T01:00:00+01", adapterFactory);
}
@Test
public void testDatePattern() {
String pattern = "yyyy-MM-dd";
TypeAdapter<Date> dateTypeAdapter = dateAdapter(DateType.DATE.createAdapterFactory(pattern));
DateFormat formatter = new SimpleDateFormat(pattern);
Date currentDate = new Date();
String dateString = dateTypeAdapter.toJson(currentDate);
assertThat(dateString).isEqualTo(toLiteral(formatter.format(currentDate)));
}
@Test
public void testInvalidDatePattern() {
assertThrows(
IllegalArgumentException.class,
() -> DateType.DATE.createAdapterFactory("I am a bad Date pattern...."));
}
@Test
public void testNullValue() throws Exception {
TypeAdapter<Date> adapter = dateAdapter(DefaultDateTypeAdapter.DEFAULT_STYLE_FACTORY);
assertThat(adapter.fromJson("null")).isNull();
assertThat(adapter.toJson(null)).isEqualTo("null");
}
@Test
public void testUnexpectedToken() throws Exception {
TypeAdapter<Date> adapter = dateAdapter(DefaultDateTypeAdapter.DEFAULT_STYLE_FACTORY);
IllegalStateException e =
assertThrows(IllegalStateException.class, () -> adapter.fromJson("{}"));
assertThat(e).hasMessageThat().startsWith("Expected a string but was BEGIN_OBJECT");
}
@Test
public void testGsonDateFormat() {
TimeZone originalTimeZone = TimeZone.getDefault();
// Set the default timezone to UTC
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
try {
Gson gson = new GsonBuilder().setDateFormat("yyyy-MM-dd HH:mm z").create();
Date originalDate = new Date(0);
// Serialize the date object
String json = gson.toJson(originalDate);
assertThat(json).isEqualTo("\"1970-01-01 00:00 UTC\"");
// Deserialize a date string with the PST timezone
Date deserializedDate = gson.fromJson("\"1970-01-01 00:00 PST\"", Date.class);
// Assert that the deserialized date's time is correct
assertThat(deserializedDate.getTime()).isEqualTo(new Date(28800000).getTime());
// Serialize the deserialized date object again
String jsonAfterDeserialization = gson.toJson(deserializedDate);
// The expectation is that the date, after deserialization, when serialized again should still
// be in the UTC timezone
assertThat(jsonAfterDeserialization).isEqualTo("\"1970-01-01 08:00 UTC\"");
} finally {
TimeZone.setDefault(originalTimeZone);
}
}
private static TypeAdapter<Date> dateAdapter(TypeAdapterFactory adapterFactory) {
TypeAdapter<Date> adapter = adapterFactory.create(new Gson(), TypeToken.get(Date.class));
assertThat(adapter).isNotNull();
return adapter;
}
private static void assertFormatted(String formattedPattern, TypeAdapterFactory adapterFactory) {
TypeAdapter<Date> adapter = dateAdapter(adapterFactory);
String json = adapter.toJson(new Date(0));
assertThat(json).matches(toLiteral(formattedPattern));
}
@SuppressWarnings("UndefinedEquals")
private static void assertParsed(String date, TypeAdapterFactory adapterFactory)
throws IOException {
TypeAdapter<Date> adapter = dateAdapter(adapterFactory);
assertWithMessage(date).that(adapter.fromJson(toLiteral(date))).isEqualTo(new Date(0));
assertWithMessage("ISO 8601")
.that(adapter.fromJson(toLiteral("1970-01-01T00:00:00Z")))
.isEqualTo(new Date(0));
}
private static String toLiteral(String s) {
return '"' + s + '"';
}
}
| DefaultDateTypeAdapterTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/filter/ProblemHandlerLocation1440Test.java | {
"start": 3348,
"end": 5356
} | class ____ {
public String id;
public String type;
public String status;
public String context;
@JsonCreator
public ActivityEntity(@JsonProperty("id") final String id, @JsonProperty("type") final String type, @JsonProperty("status") final String status, @JsonProperty("context") final String context) {
this.id = id;
this.type = type;
this.status = status;
this.context = context;
}
}
/*
/**********************************************************
/* Test methods
/**********************************************************
*/
@Test
public void testIncorrectContext() throws Exception
{
// need invalid to trigger problem:
final String invalidInput = a2q(
"{'actor': {'id': 'actor_id','type': 'actor_type',"
+"'status': 'actor_status','context':'actor_context','invalid_1': 'actor_invalid_1'},"
+"'verb': 'verb','object': {'id': 'object_id','type': 'object_type',"
+"'invalid_2': 'object_invalid_2','status': 'object_status','context': 'object_context'},"
+"'target': {'id': 'target_id','type': 'target_type','invalid_3': 'target_invalid_3',"
+"'invalid_4': 'target_invalid_4','status': 'target_status','context': 'target_context'}}"
);
final DeserializationProblemLogger logger = new DeserializationProblemLogger();
ObjectMapper mapper = jsonMapperBuilder()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.addHandler(logger)
.build();
mapper.readValue(invalidInput, Activity.class);
List<String> probs = logger.problems();
assertEquals(4, probs.size());
assertEquals("actor.invalid_1#invalid_1", probs.get(0));
assertEquals("object.invalid_2#invalid_2", probs.get(1));
assertEquals("target.invalid_3#invalid_3", probs.get(2));
assertEquals("target.invalid_4#invalid_4", probs.get(3));
}
}
| ActivityEntity |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/accesstype/Hominidae.java | {
"start": 312,
"end": 572
} | class ____ extends Mammals {
private int intelligence;
public int getIntelligence() {
return intelligence;
}
public void setIntelligence(int intelligence) {
this.intelligence = intelligence;
}
public int getNonPersistent() {
return 0;
}
}
| Hominidae |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/InconsistentCapitalizationTest.java | {
"start": 6334,
"end": 6664
} | class ____ {
Object aa;
Test(Object aA) {
this.aa = aA;
if (aA == this.aa) {
for (Object i = aA; ; ) {}
}
}
}
""")
.addOutputLines(
"out/Test.java",
"""
| Test |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/operators/windowing/functions/InternalSingleValueWindowFunction.java | {
"start": 1448,
"end": 2480
} | class ____<IN, OUT, KEY, W extends Window>
extends WrappingFunction<WindowFunction<IN, OUT, KEY, W>>
implements InternalWindowFunction<IN, OUT, KEY, W> {
private static final long serialVersionUID = 1L;
public InternalSingleValueWindowFunction(WindowFunction<IN, OUT, KEY, W> wrappedFunction) {
super(wrappedFunction);
}
@Override
public void process(
KEY key, W window, InternalWindowContext context, IN input, Collector<OUT> out)
throws Exception {
wrappedFunction.apply(key, window, Collections.singletonList(input), out);
}
@Override
public void clear(W window, InternalWindowContext context) throws Exception {}
@Override
public RuntimeContext getRuntimeContext() {
throw new RuntimeException("This should never be called.");
}
@Override
public IterationRuntimeContext getIterationRuntimeContext() {
throw new RuntimeException("This should never be called.");
}
}
| InternalSingleValueWindowFunction |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/deployment/ConsumedSubpartitionContext.java | {
"start": 1517,
"end": 1637
} | class ____ to track and manage the relationships between shuffle descriptors and their
* associated subpartitions.
*/
| used |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/resource/ResourceHttpRequestHandlerIntegrationTests.java | {
"start": 9165,
"end": 9241
} | class ____ extends ResponseEntityExceptionHandler {
}
}
| GlobalExceptionHandler |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/enhanced/OrderedSequenceGenerator.java | {
"start": 779,
"end": 1428
} | class ____ extends SequenceStyleGenerator {
@Override
protected DatabaseStructure buildSequenceStructure(
Type type,
Properties params,
JdbcEnvironment jdbcEnvironment,
QualifiedName sequenceName,
int initialValue,
int incrementSize) {
final Object noCacheValue = params.get( "nocache" );
final boolean noCache = Boolean.TRUE.equals( noCacheValue )
|| noCacheValue instanceof String && Boolean.parseBoolean( noCacheValue.toString() );
return new OrderedSequenceStructure(
jdbcEnvironment,
sequenceName,
initialValue,
incrementSize,
noCache,
type.getReturnedClass()
);
}
}
| OrderedSequenceGenerator |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/ClassFieldTest.java | {
"start": 144,
"end": 383
} | class ____ extends TestCase {
public void test_writer_1() throws Exception {
VO vo = JSON.parseObject("{\"value\":\"int\"}", VO.class);
Assert.assertEquals(int.class, vo.getValue());
}
public static | ClassFieldTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/NaturalIdCache.java | {
"start": 920,
"end": 1107
} | interface ____ {
/**
* Specifies an explicit cache region name.
* <p>
* By default, the region name is {@code EntityName##NaturalId}.
*/
String region() default "";
}
| NaturalIdCache |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/ContextResolverTest.java | {
"start": 2389,
"end": 2468
} | enum ____ {
RED,
BLACK
}
@Provider
public static | Color |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/reactive/server/samples/bind/ControllerTests.java | {
"start": 1097,
"end": 1497
} | class ____ {
private WebTestClient client;
@BeforeEach
public void setUp() throws Exception {
this.client = WebTestClient.bindToController(new TestController()).build();
}
@Test
public void test() throws Exception {
this.client.get().uri("/test")
.exchange()
.expectStatus().isOk()
.expectBody(String.class).isEqualTo("It works!");
}
@RestController
static | ControllerTests |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng5783PluginDependencyFiltering.java | {
"start": 988,
"end": 2086
} | class ____ extends AbstractMavenIntegrationTestCase {
@Test
public void testSLF4j() throws Exception {
File testDir = extractResources("/mng-5783-plugin-dependency-filtering");
Verifier verifier = newVerifier(new File(testDir, "plugin").getAbsolutePath());
verifier.addCliArgument("install");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier = newVerifier(new File(testDir, "slf4j").getAbsolutePath());
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
// Note that plugin dependencies always include plugin itself and plexus-utils
List<String> dependencies = verifier.loadLines("target/dependencies.txt");
assertEquals(2, dependencies.size());
assertEquals(
"mng-5783-plugin-dependency-filtering:mng-5783-plugin-dependency-filtering-plugin:maven-plugin:0.1",
dependencies.get(0));
assertEquals("org.slf4j:slf4j-api:jar:1.7.5", dependencies.get(1));
}
}
| MavenITmng5783PluginDependencyFiltering |
java | quarkusio__quarkus | extensions/elytron-security-jdbc/runtime/src/main/java/io/quarkus/elytron/security/jdbc/PrincipalQueryConfig.java | {
"start": 320,
"end": 1061
} | interface ____ {
/**
* The sql query to find the password
*/
Optional<String> sql();
/**
* The data source to use
*/
Optional<String> datasource();
/**
* The definitions of the mapping between the database columns and the identity's attributes
*/
Map<String, AttributeMappingConfig> attributeMappings();
/**
* The "clear-password-mapper" configuration
*/
@WithName("clear-password-mapper")
ClearPasswordMapperConfig clearPasswordMapperConfig();
/**
* The "bcrypt-password-mapper" configuration
*/
@WithName("bcrypt-password-mapper")
BcryptPasswordKeyMapperConfig bcryptPasswordKeyMapperConfig();
String toString();
}
| PrincipalQueryConfig |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/MockRestartingContext.java | {
"start": 1742,
"end": 4663
} | class ____ extends MockStateWithExecutionGraphContext
implements Restarting.Context {
private final StateValidator<ExecutingTest.CancellingArguments> cancellingStateValidator =
new StateValidator<>("Cancelling");
private final StateValidator<ExecutionGraph> waitingForResourcesStateValidator =
new StateValidator<>("WaitingForResources");
private final StateValidator<ExecutionGraph> creatingExecutionGraphStateValidator =
new StateValidator<>("CreatingExecutionGraph");
@Nullable private VertexParallelism availableVertexParallelism;
public void setExpectCancelling(Consumer<ExecutingTest.CancellingArguments> asserter) {
cancellingStateValidator.expectInput(asserter);
}
public void setExpectWaitingForResources() {
waitingForResourcesStateValidator.expectInput(assertNonNull());
}
public void setExpectCreatingExecutionGraph() {
creatingExecutionGraphStateValidator.expectInput(assertNonNull());
}
public void setAvailableVertexParallelism(
@Nullable VertexParallelism availableVertexParallelism) {
this.availableVertexParallelism = availableVertexParallelism;
}
@Override
public void goToCanceling(
ExecutionGraph executionGraph,
ExecutionGraphHandler executionGraphHandler,
OperatorCoordinatorHandler operatorCoordinatorHandler,
List<ExceptionHistoryEntry> failureCollection) {
cancellingStateValidator.validateInput(
new ExecutingTest.CancellingArguments(
executionGraph, executionGraphHandler, operatorCoordinatorHandler));
hadStateTransition = true;
}
@Override
public void archiveFailure(RootExceptionHistoryEntry failure) {}
@Override
public void goToWaitingForResources(@Nullable ExecutionGraph previousExecutionGraph) {
waitingForResourcesStateValidator.validateInput(previousExecutionGraph);
hadStateTransition = true;
}
@Override
public void goToCreatingExecutionGraph(@Nullable ExecutionGraph previousExecutionGraph) {
creatingExecutionGraphStateValidator.validateInput(previousExecutionGraph);
hadStateTransition = true;
}
@Override
public ScheduledFuture<?> runIfState(State expectedState, Runnable action, Duration delay) {
if (!hadStateTransition) {
action.run();
}
return CompletedScheduledFuture.create(null);
}
@Override
public Optional<VertexParallelism> getAvailableVertexParallelism() {
return Optional.ofNullable(availableVertexParallelism);
}
@Override
public void close() throws Exception {
super.close();
cancellingStateValidator.close();
waitingForResourcesStateValidator.close();
creatingExecutionGraphStateValidator.close();
}
}
| MockRestartingContext |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit/jupiter/SpringJUnitJupiterAutowiredConstructorInjectionTests.java | {
"start": 2117,
"end": 3143
} | class ____ {
final ApplicationContext context;
final Person dilbert;
final Dog dog;
final Integer enigma;
BaseClass(ApplicationContext context, Person dilbert, Dog dog, Integer enigma) {
this.context = context;
this.dilbert = dilbert;
this.dog = dog;
this.enigma = enigma;
}
@Test
void applicationContextInjected() {
assertThat(context).as("ApplicationContext should have been injected").isNotNull();
assertThat(context.getBean("dilbert", Person.class)).isEqualTo(this.dilbert);
}
@Test
void beansInjected() {
assertThat(this.dilbert).as("Dilbert should have been injected").isNotNull();
assertThat(this.dilbert.getName()).as("Person's name").isEqualTo("Dilbert");
assertThat(this.dog).as("Dogbert should have been injected").isNotNull();
assertThat(this.dog.getName()).as("Dog's name").isEqualTo("Dogbert");
}
@Test
void propertyPlaceholderInjected() {
assertThat(this.enigma).as("Enigma should have been injected via @Value").isEqualTo(42);
}
}
}
| BaseClass |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/event/cluster/AdaptiveRefreshTriggeredEvent.java | {
"start": 3590,
"end": 4250
} | class ____ extends AdaptiveRefreshTriggeredEvent {
private final int slot;
public UncoveredSlotAdaptiveRefreshTriggeredEvent(Supplier<Partitions> partitionsSupplier,
Runnable topologyRefreshScheduler, int slot) {
super(partitionsSupplier, topologyRefreshScheduler, ClusterTopologyRefreshOptions.RefreshTrigger.UNCOVERED_SLOT);
this.slot = slot;
}
/**
* Return the slot that is not covered.
*
* @return the slot that is not covered.
*/
public int getSlot() {
return slot;
}
}
}
| UncoveredSlotAdaptiveRefreshTriggeredEvent |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/ListIndexJavaTypeAnnotation.java | {
"start": 475,
"end": 1651
} | class ____ implements ListIndexJavaType {
private java.lang.Class<? extends org.hibernate.type.descriptor.java.BasicJavaType<?>> value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public ListIndexJavaTypeAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public ListIndexJavaTypeAnnotation(ListIndexJavaType annotation, ModelsContext modelContext) {
this.value = annotation.value();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public ListIndexJavaTypeAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.value = (Class<? extends org.hibernate.type.descriptor.java.BasicJavaType<?>>) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return ListIndexJavaType.class;
}
@Override
public java.lang.Class<? extends org.hibernate.type.descriptor.java.BasicJavaType<?>> value() {
return value;
}
public void value(java.lang.Class<? extends org.hibernate.type.descriptor.java.BasicJavaType<?>> value) {
this.value = value;
}
}
| ListIndexJavaTypeAnnotation |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.