language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | alibaba__nacos | console/src/main/java/com/alibaba/nacos/console/handler/naming/ServiceHandler.java | {
"start": 1278,
"end": 5217
} | interface ____ {
/**
* Create a new service.
*
* @param serviceForm the service form containing the service details
* @param serviceMetadata the service metadata created from serviceForm
* @throws Exception if an error occurs during service creation
*/
void createService(ServiceForm serviceForm, ServiceMetadata serviceMetadata) throws Exception;
/**
* Delete an existing service.
*
* @param namespaceId the namespace ID
* @param serviceName the service name
* @param groupName the group name
* @throws Exception if an error occurs during service deletion
*/
void deleteService(String namespaceId, String serviceName, String groupName) throws Exception;
/**
* Update an existing service.
*
* @param serviceForm the service form containing the service details
* @param serviceMetadata the service metadata created from serviceForm
* @throws Exception if an error occurs during service update
*/
void updateService(ServiceForm serviceForm, ServiceMetadata serviceMetadata) throws Exception;
/**
* Get all selector types.
*
* @return a list of selector types
* @throws NacosException if an error occurs during get selector types
*/
List<String> getSelectorTypeList() throws NacosException;
/**
* Get the list of subscribers for a service.
*
* @param pageNo the page number
* @param pageSize the size of the page
* @param namespaceId the namespace ID
* @param serviceName the service name
* @param groupName the group name
* @param aggregation whether to aggregate the results
* @return a JSON node containing the list of subscribers
* @throws Exception if an error occurs during fetching subscribers
*/
Page<SubscriberInfo> getSubscribers(int pageNo, int pageSize, String namespaceId, String serviceName,
String groupName, boolean aggregation) throws Exception;
/**
* List service detail information.
*
* @param withInstances whether to include instances
* @param namespaceId the namespace ID
* @param pageNo the page number
* @param pageSize the size of the page
* @param serviceName the service name
* @param groupName the group name
* @param ignoreEmptyService whether to filter services with empty instances
* @return if withInstances is {@code true}, return Page of {@link ServiceDetailInfo}, otherwise return Page of {@link ServiceView}
* @throws NacosException if an error occurs during fetching service details
*/
Object getServiceList(boolean withInstances, String namespaceId, int pageNo, int pageSize, String serviceName,
String groupName, boolean ignoreEmptyService) throws NacosException;
/**
* Get the detail of a specific service.
*
* @param namespaceId the namespace ID
* @param serviceName the service name without group
* @param groupName the group name
* @return service detail information
* @throws NacosException if an error occurs during fetching service details
*/
ServiceDetailInfo getServiceDetail(String namespaceId, String serviceName, String groupName) throws NacosException;
/**
* Update the metadata of a cluster.
*
* @param namespaceId the namespace ID
* @param groupName the group name
* @param serviceName the service name
* @param clusterName the cluster name
* @param clusterMetadata the metadata for the cluster
* @throws Exception if the update operation fails
*/
void updateClusterMetadata(String namespaceId, String groupName, String serviceName, String clusterName,
ClusterMetadata clusterMetadata) throws Exception;
}
| ServiceHandler |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MemoizeConstantVisitorStateLookupsTest.java | {
"start": 7185,
"end": 7418
} | class ____ {
void innerMethod(VisitorState state) {
Name className = state.getName("java.lang.Class");
}
}
}
""")
.doTest();
}
}
| InnerClass |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/processor/src/main/java/org/jboss/resteasy/reactive/common/processor/TargetJavaVersion.java | {
"start": 277,
"end": 531
} | class ____ implements TargetJavaVersion {
public static final Unknown INSTANCE = new Unknown();
Unknown() {
}
@Override
public Status isJava19OrHigher() {
return Status.UNKNOWN;
}
}
}
| Unknown |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/benchmarks/HostAndPortBenchmark.java | {
"start": 827,
"end": 1489
} | class ____ {
@Param("192.168.0.1:8080")
private String host;
@Setup
public void setup() {
}
@Benchmark
public int parseIPv4Address() {
String host = this.host;
return HostAndPortImpl.parseIPv4Address(host, 0, host.length());
}
@Benchmark
public int parseHost() {
String host = this.host;
return HostAndPortImpl.parseHost(host, 0, host.length());
}
@Benchmark
public HostAndPortImpl parseAuthority() {
return HostAndPortImpl.parseAuthority(host, -1);
}
@Benchmark
public boolean isValidAuthority() {
return HostAndPortImpl.isValidAuthority(host);
}
}
| HostAndPortBenchmark |
java | netty__netty | codec-http3/src/test/java/io/netty/handler/codec/http3/QpackDecoderTest.java | {
"start": 1381,
"end": 6868
} | class ____ {
private static final String FOO = "foo";
private static final String BAR = "bar";
private QpackDecoderDynamicTable table;
private EmbeddedQuicStreamChannel decoderStream;
private QpackDecoder decoder;
private int inserted;
private int maxEntries;
private QpackAttributes attributes;
public static Collection<Object[]> data() {
int capacity = 128; // maxEntries = 128/32 = 4, maxIndex = 2*4 = 8
return asList(
new Object[]{capacity, 0},
new Object[]{capacity, 1},
new Object[]{capacity, 5},
new Object[]{capacity, 8},
new Object[]{capacity, 16},
new Object[]{capacity, 25},
new Object[]{capacity, 64},
new Object[]{capacity, 89}
);
}
@ParameterizedTest(name = "capacity: {0}, inserts: {1}")
@MethodSource("data")
public void requiredInsertCountAsInserted(int capacity, int insertionCount) throws Exception {
setup(capacity);
insertLiterals(insertionCount);
encodeDecodeVerifyRequiredInsertCount(inserted);
}
@ParameterizedTest(name = "capacity: {0}, inserts: {1}")
@MethodSource("data")
public void requiredInsertCountLessThanInserted(int capacity, int insertionCount) throws Exception {
setup(capacity);
assumeTrue(insertionCount > 0);
insertLiterals(insertionCount);
encodeDecodeVerifyRequiredInsertCount(insertionCount - 1);
}
@ParameterizedTest(name = "capacity: {0}, inserts: {1}")
@MethodSource("data")
public void requiredInsertCountBehindMax(int capacity, int insertionCount) throws Exception {
setup(capacity);
assumeTrue(insertionCount > maxEntries);
insertLiterals(insertionCount);
encodeDecodeVerifyRequiredInsertCount(insertionCount - maxEntries + 1);
}
@ParameterizedTest(name = "capacity: {0}, inserts: {1}")
@MethodSource("data")
public void getWithRelativeIndex(int capacity, int insertionCount) throws Exception {
setup(capacity);
assumeTrue(insertionCount > 3);
insertLiterals(insertionCount);
int requiredInsertCount = encodeDecodeRequiredInsertCount(insertionCount);
int base = encodeDecodeDeltaBase(requiredInsertCount, false, 1);
int relativeIndex = 1;
final QpackHeaderField entry = table.getEntryRelativeEncodedField(base - relativeIndex);
verifyField(entry, insertionCount - 2);
}
@ParameterizedTest(name = "capacity: {0}, inserts: {1}")
@MethodSource("data")
public void getWithPostBaseRelativeIndex(int capacity, int insertionCount) throws Exception {
setup(capacity);
assumeTrue(insertionCount > 2);
insertLiterals(insertionCount);
int requiredInsertCount = encodeDecodeRequiredInsertCount(insertionCount - 1);
int base = encodeDecodeDeltaBase(requiredInsertCount, true, 0);
int relativeIndex = 1;
final QpackHeaderField entry = table.getEntryRelativeEncodedField(base - relativeIndex);
verifyField(entry, insertionCount - 1);
}
private void setup(long capacity) throws QpackException {
long maxTableCapacity = MAX_UNSIGNED_INT;
inserted = 0;
this.maxEntries = toIntExact(QpackUtil.maxEntries(maxTableCapacity));
final DefaultHttp3SettingsFrame settings = new DefaultHttp3SettingsFrame();
settings.put(HTTP3_SETTINGS_QPACK_MAX_TABLE_CAPACITY, maxTableCapacity);
table = new QpackDecoderDynamicTable();
EmbeddedQuicChannel parent = new EmbeddedQuicChannel(true);
attributes = new QpackAttributes(parent, false);
decoderStream = new EmbeddedQuicStreamChannel();
attributes.decoderStream(decoderStream);
decoder = new QpackDecoder(maxTableCapacity, 0, table, ackEachInsert());
decoder.setDynamicTableCapacity(capacity);
}
private void encodeDecodeVerifyRequiredInsertCount(int count) throws QpackException {
final int ric = encodeDecodeRequiredInsertCount(count);
assertThat(ric, is(count));
}
private int encodeDecodeDeltaBase(int requiredInsertCount, boolean postBase, int deltaBase) throws QpackException {
final ByteBuf buf = Unpooled.buffer();
QpackUtil.encodePrefixedInteger(buf, (byte) (postBase ? 0b0 : 0b1000_0000), 8, deltaBase);
try {
return decoder.decodeBase(buf, requiredInsertCount);
} finally {
buf.release();
}
}
private int encodeDecodeRequiredInsertCount(int count) throws QpackException {
final ByteBuf buf = Unpooled.buffer();
QpackUtil.encodePrefixedInteger(buf, (byte) 0b0, 8, count == 0 ? 0 : count % (2L * maxEntries) + 1);
try {
return decoder.decodeRequiredInsertCount(attributes, buf);
} finally {
buf.release();
}
}
private void insertLiterals(int count) throws QpackException {
for (int i = 1; i <= count; i++) {
inserted++;
decoder.insertLiteral(decoderStream, FOO + i, BAR + i);
}
assertThat(decoderStream.finishAndReleaseAll(), is(count > 0));
}
private void verifyField(QpackHeaderField field, int fieldIndexWhenInserted) {
assertThat(field.name, is(FOO + fieldIndexWhenInserted));
assertThat(field.value, is(BAR + fieldIndexWhenInserted));
}
}
| QpackDecoderTest |
java | apache__camel | components/camel-telemetry/src/test/java/org/apache/camel/telemetry/SpanPropagationDownstreamTest.java | {
"start": 1270,
"end": 2620
} | class ____ extends ExchangeTestSupport {
MockTracer mockTracer;
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
this.mockTracer = new MockTracer();
CamelContextAware.trySetCamelContext(mockTracer, context);
mockTracer.init(context);
return context;
}
@Test
void testPropagateDownstreamTraceRequest() throws InterruptedException {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBody("direct:start", "Test");
mock.assertIsSatisfied();
mock.getExchanges().forEach(exchange -> {
assertTrue(
exchange.getIn().getHeader("traceparent", String.class).matches("^[a-z0-9]+-[a-z0-9]+$"),
"The traceparent header does not match with the expected format <traceid>-<spanid>");
});
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.routeId("start")
.log("A message")
.to("mock:result");
}
};
}
}
| SpanPropagationDownstreamTest |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/blink/Blink.java | {
"start": 128,
"end": 218
} | class ____ {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.blink);
}
| Blink |
java | spring-projects__spring-security | webauthn/src/test/java/org/springframework/security/web/webauthn/management/JdbcUserCredentialRepositoryTests.java | {
"start": 1772,
"end": 7909
} | class ____ {
private EmbeddedDatabase db;
private JdbcUserCredentialRepository jdbcUserCredentialRepository;
private static final String USER_CREDENTIALS_SQL_RESOURCE = "org/springframework/security/user-credentials-schema.sql";
@BeforeEach
void setUp() {
this.db = createDb();
JdbcOperations jdbcOperations = new JdbcTemplate(this.db);
this.jdbcUserCredentialRepository = new JdbcUserCredentialRepository(jdbcOperations);
}
@AfterEach
void tearDown() {
this.db.shutdown();
}
private static EmbeddedDatabase createDb() {
// @formatter:off
return new EmbeddedDatabaseBuilder()
.generateUniqueName(true)
.setType(EmbeddedDatabaseType.HSQL)
.setScriptEncoding("UTF-8")
.addScript(USER_CREDENTIALS_SQL_RESOURCE)
.build();
// @formatter:on
}
@Test
void constructorWhenJdbcOperationsIsNullThenThrowIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> new JdbcUserCredentialRepository(null))
.withMessage("jdbcOperations cannot be null");
// @formatter:on
}
@Test
void saveWhenCredentialRecordIsNullThenThrowIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.jdbcUserCredentialRepository.save(null))
.withMessage("record cannot be null");
// @formatter:on
}
@Test
void findByCredentialIdWheCredentialIdIsNullThenThrowIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.jdbcUserCredentialRepository.findByCredentialId(null))
.withMessage("credentialId cannot be null");
// @formatter:on
}
@Test
void findByCredentialIdWheUserIdIsNullThenThrowIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.jdbcUserCredentialRepository.findByUserId(null))
.withMessage("userId cannot be null");
// @formatter:on
}
@Test
void saveCredentialRecordWhenSaveThenReturnsSaved() {
CredentialRecord userCredential = TestCredentialRecords.fullUserCredential().build();
this.jdbcUserCredentialRepository.save(userCredential);
CredentialRecord savedUserCredential = this.jdbcUserCredentialRepository
.findByCredentialId(userCredential.getCredentialId());
assertThat(savedUserCredential).isNotNull();
assertThat(savedUserCredential.getCredentialId()).isEqualTo(userCredential.getCredentialId());
assertThat(savedUserCredential.getUserEntityUserId()).isEqualTo(userCredential.getUserEntityUserId());
assertThat(savedUserCredential.getLabel()).isEqualTo(userCredential.getLabel());
assertThat(savedUserCredential.getPublicKey().getBytes()).isEqualTo(userCredential.getPublicKey().getBytes());
assertThat(savedUserCredential.isBackupEligible()).isEqualTo(userCredential.isBackupEligible());
assertThat(savedUserCredential.isBackupState()).isEqualTo(userCredential.isBackupState());
assertThat(savedUserCredential.getCreated()).isNotNull();
assertThat(savedUserCredential.getLastUsed()).isNotNull();
assertThat(savedUserCredential.isUvInitialized()).isFalse();
assertThat(savedUserCredential.getSignatureCount()).isEqualTo(100);
assertThat(savedUserCredential.getCredentialType()).isEqualTo(PublicKeyCredentialType.PUBLIC_KEY);
assertThat(savedUserCredential.getTransports().contains(AuthenticatorTransport.HYBRID)).isTrue();
assertThat(savedUserCredential.getTransports().contains(AuthenticatorTransport.BLE)).isTrue();
assertThat(new String(savedUserCredential.getAttestationObject().getBytes())).isEqualTo("test");
assertThat(new String(savedUserCredential.getAttestationClientDataJSON().getBytes())).isEqualTo("test");
}
@Test
void saveCredentialRecordWhenRecordExistsThenReturnsUpdated() {
CredentialRecord userCredential = TestCredentialRecords.fullUserCredential().build();
this.jdbcUserCredentialRepository.save(userCredential);
// @formatter:off
CredentialRecord updatedRecord = ImmutableCredentialRecord.fromCredentialRecord(userCredential)
.backupEligible(false)
.uvInitialized(true)
.signatureCount(200).build();
// @formatter:on
this.jdbcUserCredentialRepository.save(updatedRecord);
CredentialRecord record = this.jdbcUserCredentialRepository
.findByCredentialId(userCredential.getCredentialId());
assertThat(record.getSignatureCount()).isEqualTo(200);
assertThat(record.isUvInitialized()).isTrue();
assertThat(record.isBackupEligible()).isFalse();
}
@Test
void findCredentialRecordByUserIdWhenRecordExistsThenReturnsSaved() {
CredentialRecord userCredential = TestCredentialRecords.fullUserCredential().build();
this.jdbcUserCredentialRepository.save(userCredential);
List<CredentialRecord> credentialRecords = this.jdbcUserCredentialRepository
.findByUserId(userCredential.getUserEntityUserId());
assertThat(credentialRecords).isNotNull();
assertThat(credentialRecords.size()).isEqualTo(1);
}
@Test
void findCredentialRecordByUserIdWhenRecordDoesNotExistThenReturnsEmpty() {
CredentialRecord userCredential = TestCredentialRecords.fullUserCredential().build();
List<CredentialRecord> credentialRecords = this.jdbcUserCredentialRepository
.findByUserId(userCredential.getUserEntityUserId());
assertThat(credentialRecords.size()).isEqualTo(0);
}
@Test
void findCredentialRecordByCredentialIdWhenRecordDoesNotExistThenReturnsNull() {
CredentialRecord userCredential = TestCredentialRecords.fullUserCredential().build();
CredentialRecord credentialRecord = this.jdbcUserCredentialRepository
.findByCredentialId(userCredential.getCredentialId());
assertThat(credentialRecord).isNull();
}
@Test
void deleteCredentialRecordWhenRecordExistThenSuccess() {
CredentialRecord userCredential = TestCredentialRecords.fullUserCredential().build();
this.jdbcUserCredentialRepository.save(userCredential);
this.jdbcUserCredentialRepository.delete(userCredential.getCredentialId());
CredentialRecord credentialRecord = this.jdbcUserCredentialRepository
.findByCredentialId(userCredential.getCredentialId());
assertThat(credentialRecord).isNull();
}
}
| JdbcUserCredentialRepositoryTests |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java | {
"start": 873,
"end": 5158
} | class ____ {
/**
* Returns the column headings for the given columns.
*/
static Iterator<? extends ToXContent> allColumns(List<ColumnInfoImpl> columns, String name) {
return ChunkedToXContentHelper.chunk((builder, params) -> {
builder.startArray(name);
for (ColumnInfo col : columns) {
col.toXContent(builder, params);
}
return builder.endArray();
});
}
/**
* Returns the column headings for the given columns, moving the heading
* for always-null columns to a {@code null_columns} section.
*/
static Iterator<? extends ToXContent> nonNullColumns(List<ColumnInfoImpl> columns, boolean[] nullColumns, String name) {
return ChunkedToXContentHelper.chunk((builder, params) -> {
builder.startArray(name);
for (int c = 0; c < columns.size(); c++) {
if (nullColumns[c] == false) {
columns.get(c).toXContent(builder, params);
}
}
return builder.endArray();
});
}
/** Returns the column values for the given pages (described by the column infos). */
static Iterator<? extends ToXContent> columnValues(
List<ColumnInfoImpl> columns,
List<Page> pages,
boolean columnar,
boolean[] nullColumns
) {
if (pages.isEmpty()) {
return Collections.emptyIterator();
} else if (columnar) {
return columnarValues(columns, pages, nullColumns);
} else {
return rowValues(columns, pages, nullColumns);
}
}
/** Returns a columnar based representation of the values in the given pages (described by the column infos). */
static Iterator<? extends ToXContent> columnarValues(List<ColumnInfoImpl> columns, List<Page> pages, boolean[] nullColumns) {
final BytesRef scratch = new BytesRef();
return Iterators.flatMap(Iterators.forRange(0, columns.size(), column -> {
if (nullColumns != null && nullColumns[column]) {
return Collections.emptyIterator();
}
return Iterators.concat(
Iterators.single(((builder, params) -> builder.startArray())),
Iterators.flatMap(pages.iterator(), page -> {
PositionToXContent toXContent = PositionToXContent.positionToXContent(
columns.get(column),
page.getBlock(column),
scratch
);
return Iterators.forRange(
0,
page.getPositionCount(),
position -> (builder, params) -> toXContent.positionToXContent(builder, params, position)
);
}),
ChunkedToXContentHelper.endArray()
);
}), Function.identity());
}
/** Returns a row based representation of the values in the given pages (described by the column infos). */
static Iterator<? extends ToXContent> rowValues(List<ColumnInfoImpl> columns, List<Page> pages, boolean[] nullColumns) {
final BytesRef scratch = new BytesRef();
return Iterators.flatMap(pages.iterator(), page -> {
final int columnCount = columns.size();
assert page.getBlockCount() == columnCount : page.getBlockCount() + " != " + columnCount;
final PositionToXContent[] toXContents = new PositionToXContent[columnCount];
for (int column = 0; column < columnCount; column++) {
Block block = page.getBlock(column);
toXContents[column] = PositionToXContent.positionToXContent(columns.get(column), block, scratch);
}
return Iterators.forRange(0, page.getPositionCount(), position -> (builder, params) -> {
builder.startArray();
for (int c = 0; c < columnCount; c++) {
if (nullColumns == null || nullColumns[c] == false) {
toXContents[c].positionToXContent(builder, params, position);
}
}
return builder.endArray();
});
});
}
}
| ResponseXContentUtils |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/MySQLDeleteOrUpsertOperation.java | {
"start": 772,
"end": 1531
} | class ____ extends DeleteOrUpsertOperation {
private Expectation customExpectation;
public MySQLDeleteOrUpsertOperation(EntityMutationTarget mutationTarget, EntityTableMapping tableMapping, UpsertOperation upsertOperation, OptionalTableUpdate optionalTableUpdate) {
super( mutationTarget, tableMapping, upsertOperation, optionalTableUpdate );
}
@Override
public void performMutation(JdbcValueBindings jdbcValueBindings, ValuesAnalysis valuesAnalysis, SharedSessionContractImplementor session) {
customExpectation = new MySQLRowCountExpectation();
super.performMutation( jdbcValueBindings, valuesAnalysis, session );
}
@Override
protected Expectation getExpectation() {
return customExpectation;
}
private static | MySQLDeleteOrUpsertOperation |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/metrics/TestRouterClientMetrics.java | {
"start": 2685,
"end": 6215
} | interface ____ the Namenode. */
private FileSystem nnFS;
@BeforeAll
public static void globalSetUp() throws Exception {
cluster = new MiniRouterDFSCluster(false, NUM_SUBCLUSTERS);
cluster.setNumDatanodesPerNameservice(NUM_DNS);
cluster.startCluster();
Configuration routerConf = new RouterConfigBuilder()
.metrics()
.rpc()
.quota()
.build();
cluster.addRouterOverrides(routerConf);
cluster.startRouters();
// Register and verify all NNs with all routers
cluster.registerNamenodes();
cluster.waitNamenodeRegistration();
}
@BeforeEach
public void testSetup() throws Exception {
// Create mock locations
cluster.installMockLocations();
// Delete all files via the NNs and verify
cluster.deleteAllFiles();
// Create test fixtures on NN
cluster.createTestDirectoriesNamenode();
// Wait to ensure NN has fully created its test directories
Thread.sleep(100);
routerContext = cluster.getRouters().get(0);
this.routerFS = routerContext.getFileSystem();
// Add extra location to the root mount / such that the root mount points:
// /
// ns0 -> /
// ns1 -> /
router = routerContext.getRouter();
MockResolver resolver = (MockResolver) router.getSubclusterResolver();
resolver.addLocation("/", cluster.getNameservices().get(1), "/");
}
@AfterAll
public static void tearDown() throws Exception {
cluster.shutdown();
}
@Test
public void testGetListing() throws IOException {
routerFS.listStatus(new Path("/"));
assertCounter("GetListingOps", 2L, getMetrics(ROUTER_METRICS));
assertCounter("ConcurrentGetListingOps", 1L, getMetrics(ROUTER_METRICS));
}
@Test
public void testCreate() throws IOException {
Path testFile = new Path("/testCreate");
routerFS.create(testFile);
assertCounter("CreateOps", 1L, getMetrics(ROUTER_METRICS));
}
@Test
public void testGetServerDefaults() throws IOException {
router.getRpcServer().getServerDefaults();
assertCounter("GetServerDefaultsOps", 1L, getMetrics(ROUTER_METRICS));
}
@Test
public void testSetQuota() throws Exception {
router.getRpcServer().setQuota("/", 1L, 1L, null);
assertCounter("SetQuotaOps", 2L, getMetrics(ROUTER_METRICS));
assertCounter("ConcurrentSetQuotaOps", 1L, getMetrics(ROUTER_METRICS));
}
@Test
public void testGetQuota() throws Exception {
router.getRpcServer().getQuotaUsage("/");
assertCounter("GetQuotaUsageOps", 2L, getMetrics(ROUTER_METRICS));
assertCounter("ConcurrentGetQuotaUsageOps", 1L, getMetrics(ROUTER_METRICS));
}
@Test
public void testRenewLease() throws Exception {
router.getRpcServer().renewLease("test", null);
assertCounter("RenewLeaseOps", 2L, getMetrics(ROUTER_METRICS));
assertCounter("ConcurrentRenewLeaseOps", 1L, getMetrics(ROUTER_METRICS));
}
@Test
public void testGetDatanodeReport() throws Exception {
router.getRpcServer().
getDatanodeReport(HdfsConstants.DatanodeReportType.LIVE);
assertCounter("GetDatanodeReportOps", 2L, getMetrics(ROUTER_METRICS));
assertCounter("ConcurrentGetDatanodeReportOps", 1L,
getMetrics(ROUTER_METRICS));
}
@Test
public void testGetSlowDatanodeReport() throws Exception {
router.getRpcServer().getSlowDatanodeReport();
assertCounter("GetSlowDatanodeReportOps", 2L, getMetrics(ROUTER_METRICS));
assertCounter("ConcurrentGetSlowDatanodeReportOps", 1L, getMetrics(ROUTER_METRICS));
}
}
| to |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/core/CompletableTransformer.java | {
"start": 710,
"end": 870
} | interface ____ callback used by the compose operator to turn a {@link Completable} into another
* {@code Completable} fluently.
*/
@FunctionalInterface
public | and |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/stream/JpaNativeQueryFlushSessionTest.java | {
"start": 1686,
"end": 1863
} | class ____ {
@Id
private Long id;
private String name;
public Person() {
}
public Person(Long id, String name) {
this.id = id;
this.name = name;
}
}
}
| Person |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/tests/EosTestClient.java | {
"start": 1878,
"end": 8988
} | class ____ extends SmokeTestUtil {
static final String APP_ID = "EosTest";
private final Properties properties;
private final boolean withRepartitioning;
private final AtomicBoolean notRunningCallbackReceived = new AtomicBoolean(false);
private static final List<CapturingConsumerWrapper> CAPTURING_CONSUMER_WRAPPERS = new ArrayList<>();
private int minGroupEpoch = 0;
private KafkaStreams streams;
private boolean uncaughtException;
EosTestClient(final Properties properties, final boolean withRepartitioning) {
super();
this.properties = properties;
this.withRepartitioning = withRepartitioning;
this.properties.put(StreamsConfig.InternalConfig.INTERNAL_CONSUMER_WRAPPER, CapturingConsumerWrapper.class);
CAPTURING_CONSUMER_WRAPPERS.clear();
}
private volatile boolean isRunning = true;
public void start() {
Exit.addShutdownHook("streams-shutdown-hook", () -> {
isRunning = false;
streams.close(Duration.ofSeconds(300));
// need to wait for callback to avoid race condition
// -> make sure the callback printout to stdout is there as it is expected test output
waitForStateTransitionCallback();
// do not remove these printouts since they are needed for health scripts
if (!uncaughtException) {
System.out.println(System.currentTimeMillis());
System.out.println("EOS-TEST-CLIENT-CLOSED");
System.out.flush();
}
});
while (isRunning) {
if (streams == null) {
uncaughtException = false;
streams = createKafkaStreams(properties);
streams.setUncaughtExceptionHandler(e -> {
System.out.println(System.currentTimeMillis());
System.out.println("EOS-TEST-CLIENT-EXCEPTION");
e.printStackTrace();
System.out.flush();
uncaughtException = true;
return StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT;
});
streams.setStateListener((newState, oldState) -> {
// don't remove this -- it's required test output
System.out.println(System.currentTimeMillis());
System.out.println("StateChange: " + oldState + " -> " + newState);
System.out.flush();
if (newState == KafkaStreams.State.NOT_RUNNING) {
notRunningCallbackReceived.set(true);
}
});
streams.start();
}
if (uncaughtException) {
streams.close(Duration.ofSeconds(60_000L));
streams = null;
}
logGroupEpochBump();
sleep(100);
}
}
private KafkaStreams createKafkaStreams(final Properties props) {
props.put(StreamsConfig.APPLICATION_ID_CONFIG, APP_ID);
props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 1);
props.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 2);
props.put(StreamsConfig.PROBING_REBALANCE_INTERVAL_MS_CONFIG, Duration.ofMinutes(1).toMillis());
props.put(StreamsConfig.MAX_WARMUP_REPLICAS_CONFIG, Integer.MAX_VALUE);
props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 3);
props.put(StreamsConfig.STATESTORE_CACHE_MAX_BYTES_CONFIG, 0);
props.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 5000L); // increase commit interval to make sure a client is killed having an open transaction
props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, Integer> data = builder.stream("data");
data.to("echo");
data.process(SmokeTestUtil.printProcessorSupplier("data"));
final KGroupedStream<String, Integer> groupedData = data.groupByKey();
// min
groupedData
.aggregate(
() -> Integer.MAX_VALUE,
(aggKey, value, aggregate) -> (value < aggregate) ? value : aggregate,
Materialized.with(null, intSerde))
.toStream()
.to("min", Produced.with(stringSerde, intSerde));
// sum
groupedData.aggregate(
() -> 0L,
(aggKey, value, aggregate) -> (long) value + aggregate,
Materialized.with(null, longSerde))
.toStream()
.to("sum", Produced.with(stringSerde, longSerde));
if (withRepartitioning) {
data.to("repartition");
final KStream<String, Integer> repartitionedData = builder.stream("repartition");
repartitionedData.process(SmokeTestUtil.printProcessorSupplier("repartition"));
final KGroupedStream<String, Integer> groupedDataAfterRepartitioning = repartitionedData.groupByKey();
// max
groupedDataAfterRepartitioning
.aggregate(
() -> Integer.MIN_VALUE,
(aggKey, value, aggregate) -> (value > aggregate) ? value : aggregate,
Materialized.with(null, intSerde))
.toStream()
.to("max", Produced.with(stringSerde, intSerde));
// count
groupedDataAfterRepartitioning.count()
.toStream()
.to("cnt", Produced.with(stringSerde, longSerde));
}
return new KafkaStreams(builder.build(), props);
}
private void waitForStateTransitionCallback() {
final long maxWaitTime = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(300);
while (!notRunningCallbackReceived.get() && System.currentTimeMillis() < maxWaitTime) {
try {
Thread.sleep(500);
} catch (final InterruptedException ignoreAndSwallow) { /* just keep waiting */ }
}
if (!notRunningCallbackReceived.get()) {
System.err.println("State transition callback to NOT_RUNNING never received. Timed out after 5 minutes.");
System.err.flush();
}
}
// Used in the streams group protocol
// Detect a completed rebalance by checking if the group epoch has been bumped for all threads.
private void logGroupEpochBump() {
int currentMin = Integer.MAX_VALUE;
for (final CapturingConsumerWrapper consumer : CAPTURING_CONSUMER_WRAPPERS) {
final int groupEpoch = consumer.lastSeenGroupEpoch;
if (groupEpoch < currentMin) {
currentMin = groupEpoch;
}
}
if (currentMin > minGroupEpoch) {
System.out.println("MemberEpochBump");
}
if (currentMin != Integer.MAX_VALUE) {
minGroupEpoch = currentMin;
}
}
public static | EosTestClient |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/callbacks/PreUpdateNewBidirectionalBagTest.java | {
"start": 2226,
"end": 2786
} | class ____ {
@Id
private int id;
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
private Instant lastUpdatedAt;
public Instant getLastUpdatedAt() {
return lastUpdatedAt;
}
public void setLastUpdatedAt(Instant lastUpdatedAt) {
this.lastUpdatedAt = lastUpdatedAt;
}
@OneToMany(mappedBy = "person", cascade = CascadeType.ALL, fetch = FetchType.LAZY)
private Collection<Tag> tags = new ArrayList<Tag>();
}
@Entity(name = "Tag")
public static | Person |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alterTable/MySqlAlterTableTest34_renameIndex.java | {
"start": 911,
"end": 1461
} | class ____ extends TestCase {
public void test_alter_add_key() throws Exception {
String sql = "alter table test.table rename index idx_status to idx_status_2";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("ALTER TABLE test.table\n" +
"\tRENAME INDEX idx_status TO idx_status_2", output);
}
}
| MySqlAlterTableTest34_renameIndex |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/lookup/PropertiesLookupTest.java | {
"start": 1301,
"end": 3893
} | class ____ {
@Test
void testLookupContextProperty() {
final StrLookup propertiesLookup =
new PropertiesLookup(Property.EMPTY_ARRAY, Collections.singletonMap("A", "1"));
assertEquals("1", propertiesLookup.lookup("A"));
final LookupResult lookupResult = propertiesLookup.evaluate("A");
assertEquals("1", lookupResult.value());
assertFalse(lookupResult.isLookupEvaluationAllowedInValue());
}
@Test
void testLookupConfigProperty() {
final StrLookup propertiesLookup =
new PropertiesLookup(new Property[] {Property.createProperty("A", "1")}, Collections.emptyMap());
assertEquals("1", propertiesLookup.lookup("A"));
final LookupResult lookupResult = propertiesLookup.evaluate("A");
assertEquals("1", lookupResult.value());
assertTrue(lookupResult.isLookupEvaluationAllowedInValue());
}
@Test
void testConfigPropertiesPreferredOverContextProperties() {
final StrLookup propertiesLookup = new PropertiesLookup(
new Property[] {Property.createProperty("A", "1")}, Collections.singletonMap("A", "2"));
assertEquals("1", propertiesLookup.lookup("A"));
final LookupResult lookupResult = propertiesLookup.evaluate("A");
assertEquals("1", lookupResult.value());
assertTrue(lookupResult.isLookupEvaluationAllowedInValue());
}
@Test
void testEvaluateResultsSupportRecursiveEvaluation() {
final PropertiesLookup lookup = new PropertiesLookup(Collections.singletonMap("key", "value"));
assertFalse(lookup.evaluate("key").isLookupEvaluationAllowedInValue());
}
@Test
void testEvaluateReturnsNullWhenKeyIsNotFound() {
final PropertiesLookup lookup = new PropertiesLookup(Collections.emptyMap());
assertNull(lookup.evaluate("key"));
}
@Test
void testEvaluateReturnsNullWhenKeyIsNull() {
final PropertiesLookup lookup = new PropertiesLookup(Collections.emptyMap());
assertNull(lookup.evaluate(null));
}
@Test
void testContextPropertiesAreMutable() {
final Map<String, String> contextProperties = new HashMap<>();
final PropertiesLookup lookup = new PropertiesLookup(Property.EMPTY_ARRAY, contextProperties);
assertNull(lookup.evaluate("key"));
contextProperties.put("key", "value");
final LookupResult result = lookup.evaluate("key");
assertEquals("value", result.value());
assertFalse(result.isLookupEvaluationAllowedInValue());
}
}
| PropertiesLookupTest |
java | redisson__redisson | redisson-hibernate/redisson-hibernate-6/src/main/java/org/redisson/hibernate/JndiRedissonRegionNativeFactory.java | {
"start": 1118,
"end": 1917
} | class ____ extends RedissonRegionNativeFactory {
private static final long serialVersionUID = -4814502675083325567L;
public static final String JNDI_NAME = CONFIG_PREFIX + "jndi_name";
@Override
protected RedissonClient createRedissonClient(StandardServiceRegistry registry, Map properties) {
String jndiName = ConfigurationHelper.getString(JNDI_NAME, properties);
if (jndiName == null) {
throw new CacheException(JNDI_NAME + " property not set");
}
try {
return (RedissonClient) registry.getService(JndiService.class).locate(jndiName);
} catch (JndiException e) {
throw new CacheException(e);
}
}
@Override
protected void releaseFromUse() {
}
}
| JndiRedissonRegionNativeFactory |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/dispatcher/cleanup/TestingResourceCleanerFactory.java | {
"start": 1283,
"end": 3614
} | class ____ implements ResourceCleanerFactory {
private final Collection<LocallyCleanableResource> locallyCleanableResources;
private final Collection<GloballyCleanableResource> globallyCleanableResources;
private final Executor cleanupExecutor;
private TestingResourceCleanerFactory(
Collection<LocallyCleanableResource> locallyCleanableResources,
Collection<GloballyCleanableResource> globallyCleanableResources,
Executor cleanupExecutor) {
this.locallyCleanableResources = locallyCleanableResources;
this.globallyCleanableResources = globallyCleanableResources;
this.cleanupExecutor = cleanupExecutor;
}
@Override
public ResourceCleaner createLocalResourceCleaner(
ComponentMainThreadExecutor mainThreadExecutor) {
return createResourceCleaner(
mainThreadExecutor,
locallyCleanableResources,
LocallyCleanableResource::localCleanupAsync);
}
@Override
public ResourceCleaner createGlobalResourceCleaner(
ComponentMainThreadExecutor mainThreadExecutor) {
return createResourceCleaner(
mainThreadExecutor,
globallyCleanableResources,
GloballyCleanableResource::globalCleanupAsync);
}
private <T> ResourceCleaner createResourceCleaner(
ComponentMainThreadExecutor mainThreadExecutor,
Collection<T> resources,
DefaultResourceCleaner.CleanupFn<T> cleanupFn) {
return jobId -> {
mainThreadExecutor.assertRunningInMainThread();
Throwable t = null;
for (T resource : resources) {
try {
cleanupFn.cleanupAsync(resource, jobId, cleanupExecutor).get();
} catch (Throwable throwable) {
t = ExceptionUtils.firstOrSuppressed(throwable, t);
}
}
return t != null
? FutureUtils.completedExceptionally(t)
: FutureUtils.completedVoidFuture();
};
}
public static Builder builder() {
return new Builder();
}
/** {@code Builder} for creating {@code TestingResourceCleanerFactory} instances. */
public static | TestingResourceCleanerFactory |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/web/authentication/OAuth2ErrorAuthenticationFailureHandler.java | {
"start": 1901,
"end": 3502
} | class ____ implements AuthenticationFailureHandler {
private final Log logger = LogFactory.getLog(getClass());
private HttpMessageConverter<OAuth2Error> errorResponseConverter = new OAuth2ErrorHttpMessageConverter();
@Override
public void onAuthenticationFailure(HttpServletRequest request, HttpServletResponse response,
AuthenticationException authenticationException) throws IOException, ServletException {
ServletServerHttpResponse httpResponse = new ServletServerHttpResponse(response);
httpResponse.setStatusCode(HttpStatus.BAD_REQUEST);
if (authenticationException instanceof OAuth2AuthenticationException oauth2AuthenticationException) {
OAuth2Error error = oauth2AuthenticationException.getError();
this.errorResponseConverter.write(error, null, httpResponse);
}
else {
if (this.logger.isWarnEnabled()) {
this.logger.warn(AuthenticationException.class.getSimpleName() + " must be of type "
+ OAuth2AuthenticationException.class.getName() + " but was "
+ authenticationException.getClass().getName());
}
}
}
/**
* Sets the {@link HttpMessageConverter} used for converting an {@link OAuth2Error} to
* an HTTP response.
* @param errorResponseConverter the {@link HttpMessageConverter} used for converting
* an {@link OAuth2Error} to an HTTP response
*/
public void setErrorResponseConverter(HttpMessageConverter<OAuth2Error> errorResponseConverter) {
Assert.notNull(errorResponseConverter, "errorResponseConverter cannot be null");
this.errorResponseConverter = errorResponseConverter;
}
}
| OAuth2ErrorAuthenticationFailureHandler |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/reflect/ConstructorUtils.java | {
"start": 10321,
"end": 10650
} | class ____ the right constructor from the list of parameter types.
*
* <p>
* This locates and calls a constructor. The constructor signature must match the parameter types by assignment compatibility.
* </p>
*
* @param <T> the type to be constructed.
* @param cls the | choosing |
java | google__guice | core/src/com/google/inject/util/Modules.java | {
"start": 15537,
"end": 16043
} | class ____ implements Module {
@Override
public void configure(Binder binder) {
binder.requireExplicitBindings();
}
}
/**
* Returns a module that will configure the injector to require {@literal @}{@link Inject} on
* constructors.
*
* @since 4.2.3
* @see Binder#requireAtInjectOnConstructors
*/
public static Module requireAtInjectOnConstructorsModule() {
return new RequireAtInjectOnConstructorsModule();
}
private static final | RequireExplicitBindingsModule |
java | elastic__elasticsearch | x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatPipeTests.java | {
"start": 1420,
"end": 7585
} | class ____ extends AbstractNodeTestCase<DateTimeFormatPipe, Pipe> {
public static DateTimeFormatPipe randomDateTimeFormatPipe() {
List<Pipe> functions = new ArrayList<>();
functions.add(new DateTimeFormat(randomSource(), randomDatetimeLiteral(), randomStringLiteral(), randomZone()).makePipe());
functions.add(new Format(randomSource(), randomDatetimeLiteral(), randomStringLiteral(), randomZone()).makePipe());
return (DateTimeFormatPipe) randomFrom(functions);
}
@Override
protected DateTimeFormatPipe randomInstance() {
return randomDateTimeFormatPipe();
}
private Expression randomDateTimeFormatPipeExpression() {
return randomDateTimeFormatPipe().expression();
}
@Override
public void testTransform() {
// test transforming only the properties (source, expression),
// skipping the children (the two parameters of the binary function) which are tested separately
DateTimeFormatPipe b1 = randomInstance();
Expression newExpression = randomValueOtherThan(b1.expression(), this::randomDateTimeFormatPipeExpression);
DateTimeFormatPipe newB = new DateTimeFormatPipe(b1.source(), newExpression, b1.left(), b1.right(), b1.zoneId(), b1.formatter());
assertEquals(newB, b1.transformPropertiesOnly(Expression.class, v -> Objects.equals(v, b1.expression()) ? newExpression : v));
DateTimeFormatPipe b2 = randomInstance();
Source newLoc = randomValueOtherThan(b2.source(), SourceTests::randomSource);
newB = new DateTimeFormatPipe(newLoc, b2.expression(), b2.left(), b2.right(), b2.zoneId(), b2.formatter());
assertEquals(newB, b2.transformPropertiesOnly(Source.class, v -> Objects.equals(v, b2.source()) ? newLoc : v));
DateTimeFormatPipe b3 = randomInstance();
Formatter newFormatter = randomValueOtherThan(b3.formatter(), () -> randomFrom(Formatter.values()));
newB = new DateTimeFormatPipe(b3.source(), b3.expression(), b3.left(), b3.right(), b3.zoneId(), newFormatter);
assertEquals(newB, b3.transformPropertiesOnly(Formatter.class, v -> Objects.equals(v, b3.formatter()) ? newFormatter : v));
DateTimeFormatPipe b4 = randomInstance();
ZoneId newZI = randomValueOtherThan(b4.zoneId(), ESTestCase::randomZone);
newB = new DateTimeFormatPipe(b4.source(), b4.expression(), b4.left(), b4.right(), newZI, b4.formatter());
assertEquals(newB, b4.transformPropertiesOnly(ZoneId.class, v -> Objects.equals(v, b4.zoneId()) ? newZI : v));
}
@Override
public void testReplaceChildren() {
DateTimeFormatPipe b = randomInstance();
Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), FunctionTestUtils::randomDatetimeLiteral)));
Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), FunctionTestUtils::randomStringLiteral)));
ZoneId newZoneId = randomValueOtherThan(b.zoneId(), ESTestCase::randomZone);
DateTimeFormatPipe newB = new DateTimeFormatPipe(b.source(), b.expression(), b.left(), b.right(), newZoneId, b.formatter());
BinaryPipe transformed = newB.replaceChildren(newLeft, b.right());
assertEquals(transformed.left(), newLeft);
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), b.right());
transformed = newB.replaceChildren(b.left(), newRight);
assertEquals(transformed.left(), b.left());
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), newRight);
transformed = newB.replaceChildren(newLeft, newRight);
assertEquals(transformed.left(), newLeft);
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), newRight);
}
@Override
protected DateTimeFormatPipe mutate(DateTimeFormatPipe instance) {
List<Function<DateTimeFormatPipe, DateTimeFormatPipe>> randoms = new ArrayList<>();
randoms.add(
f -> new DateTimeFormatPipe(
f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomDatetimeLiteral))),
f.right(),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone),
f.formatter()
)
);
randoms.add(
f -> new DateTimeFormatPipe(
f.source(),
f.expression(),
f.left(),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomStringLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone),
f.formatter()
)
);
randoms.add(
f -> new DateTimeFormatPipe(
f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomDatetimeLiteral))),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomStringLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone),
f.formatter()
)
);
randoms.add(
f -> new DateTimeFormatPipe(
f.source(),
f.expression(),
f.left(),
f.right(),
f.zoneId(),
randomValueOtherThan(f.formatter(), () -> randomFrom(Formatter.values()))
)
);
return randomFrom(randoms).apply(instance);
}
@Override
protected DateTimeFormatPipe copy(DateTimeFormatPipe instance) {
return new DateTimeFormatPipe(
instance.source(),
instance.expression(),
instance.left(),
instance.right(),
instance.zoneId(),
instance.formatter()
);
}
}
| DateTimeFormatPipeTests |
java | quarkusio__quarkus | test-framework/junit5-config/src/main/java/io/quarkus/test/config/TestConfigProviderResolver.java | {
"start": 797,
"end": 4443
} | class ____ *extends* and *consumes* SmallRyeConfigProviderResolver. Every method in SmallRyeConfigProviderResolver should be replicated here with a delegation to the instance variable, or there will be subtle and horrible bugs.
private final SmallRyeConfigProviderResolver resolver;
private final ClassLoader classLoader;
private final Map<LaunchMode, SmallRyeConfig> configs;
TestConfigProviderResolver() {
this.resolver = (SmallRyeConfigProviderResolver) SmallRyeConfigProviderResolver.instance();
this.classLoader = Thread.currentThread().getContextClassLoader();
this.configs = new ConcurrentHashMap<>();
}
@Override
public Config getConfig() {
return resolver.getConfig();
}
/**
* Registers a config in the Test classloader, by {@link LaunchMode}. Required for tests that launch Quarkus in
* Dev mode (which uses the <code>dev</code> config profile, instead of <code>test</code>.
* <p>
* Retrieving the {@link Config} in a {@link LaunchMode} other than {@link LaunchMode#TEST}, must call
* {@link TestConfigProviderResolver#restoreConfig()} after using the config, to avoid mismatches in the config
* profile through the stack.
*
* @param mode the {@link LaunchMode}
* @return the registed {@link Config} instance
*/
public Config getConfig(final LaunchMode mode) {
if (classLoader.equals(Thread.currentThread().getContextClassLoader())) {
resolver.releaseConfig(classLoader);
SmallRyeConfig config = configs.computeIfAbsent(mode, new Function<LaunchMode, SmallRyeConfig>() {
@Override
public SmallRyeConfig apply(final LaunchMode launchMode) {
LaunchMode current = LaunchMode.current();
LaunchMode.set(launchMode);
SmallRyeConfig config = ConfigUtils.configBuilder()
.withCustomizers(new TestConfigCustomizer(mode))
.build();
LaunchMode.set(current);
return config;
}
});
resolver.registerConfig(config, classLoader);
return config;
}
throw new IllegalStateException("Context ClassLoader mismatch. Should be " + classLoader + " but was "
+ Thread.currentThread().getContextClassLoader());
}
public void restoreConfig() {
if (classLoader.equals(Thread.currentThread().getContextClassLoader())) {
resolver.releaseConfig(classLoader);
resolver.registerConfig(configs.get(LaunchMode.TEST), classLoader);
} else {
throw new IllegalStateException("Context ClassLoader mismatch. Should be " + classLoader + " but was "
+ Thread.currentThread().getContextClassLoader());
}
}
public void restore() {
this.configs.clear();
ConfigProviderResolver.setInstance(resolver);
}
@Override
public Config getConfig(final ClassLoader loader) {
return resolver.getConfig(loader);
}
@Override
public SmallRyeConfigBuilder getBuilder() {
return resolver.getBuilder();
}
@Override
public void registerConfig(final Config config, final ClassLoader classLoader) {
resolver.registerConfig(config, classLoader);
}
@Override
public void releaseConfig(final Config config) {
resolver.releaseConfig(config);
}
@Override
public void releaseConfig(final ClassLoader classLoader) {
resolver.releaseConfig(classLoader);
}
}
| both |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/asyncprocessing/operators/windowing/triggers/AsyncContinuousEventTimeTrigger.java | {
"start": 1861,
"end": 6720
} | class ____<W extends Window> extends AsyncTrigger<Object, W> {
private static final long serialVersionUID = 1L;
private final long interval;
/** When merging we take the lowest of all fire timestamps as the new fire timestamp. */
private final ReducingStateDescriptor<Long> stateDesc =
new ReducingStateDescriptor<>("fire-time", new Min(), LongSerializer.INSTANCE);
private AsyncContinuousEventTimeTrigger(long interval) {
this.interval = interval;
}
@Override
public StateFuture<TriggerResult> onElement(
Object element, long timestamp, W window, TriggerContext ctx) throws Exception {
if (window.maxTimestamp() <= ctx.getCurrentWatermark()) {
// if the watermark is already past the window fire immediately
return StateFutureUtils.completedFuture(TriggerResult.FIRE);
} else {
ctx.registerEventTimeTimer(window.maxTimestamp());
}
ReducingState<Long> fireTimestampState = ctx.getPartitionedState(stateDesc);
return fireTimestampState
.asyncGet()
.thenCompose(
ts -> {
if (ts == null) {
registerNextFireTimestamp(
timestamp - (timestamp % interval),
window,
ctx,
fireTimestampState);
}
return StateFutureUtils.completedFuture(TriggerResult.CONTINUE);
});
}
@Override
public StateFuture<TriggerResult> onEventTime(long time, W window, TriggerContext ctx)
throws Exception {
if (time == window.maxTimestamp()) {
return StateFutureUtils.completedFuture(TriggerResult.FIRE);
}
ReducingState<Long> fireTimestampState = ctx.getPartitionedState(stateDesc);
return fireTimestampState
.asyncGet()
.thenCompose(
fireTimestamp -> {
if (fireTimestamp != null && fireTimestamp == time) {
return fireTimestampState
.asyncClear()
.thenCompose(
(ignore) ->
registerNextFireTimestamp(
time,
window,
ctx,
fireTimestampState))
.thenApply(ignore -> TriggerResult.FIRE);
}
return StateFutureUtils.completedFuture(TriggerResult.CONTINUE);
});
}
@Override
public StateFuture<TriggerResult> onProcessingTime(long time, W window, TriggerContext ctx)
throws Exception {
return StateFutureUtils.completedFuture(TriggerResult.CONTINUE);
}
@Override
public StateFuture<Void> clear(W window, TriggerContext ctx) throws Exception {
ReducingState<Long> fireTimestamp = ctx.getPartitionedState(stateDesc);
return fireTimestamp
.asyncGet()
.thenCompose(
ts -> {
if (ts != null) {
ctx.deleteEventTimeTimer(ts);
return fireTimestamp.asyncClear();
} else {
return StateFutureUtils.completedVoidFuture();
}
});
}
@Override
public boolean canMerge() {
return true;
}
@Override
public void onMerge(W window, OnMergeContext ctx) throws Exception {
throw new RuntimeException("Merge window not support");
}
@Override
public String toString() {
return "ContinuousEventTimeTrigger(" + interval + ")";
}
@VisibleForTesting
public long getInterval() {
return interval;
}
/**
* Creates a trigger that continuously fires based on the given interval.
*
* @param interval The time interval at which to fire.
* @param <W> The type of {@link Window Windows} on which this trigger can operate.
*/
public static <W extends Window> AsyncContinuousEventTimeTrigger<W> of(Duration interval) {
return new AsyncContinuousEventTimeTrigger<>(interval.toMillis());
}
private static | AsyncContinuousEventTimeTrigger |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/support/TestPropertySourceAttributes.java | {
"start": 12226,
"end": 12571
} | class ____");
return (Class<?>) source;
}
/**
* Determine if the supplied list contains no descriptor with locations.
*/
private static boolean hasNoLocations(List<PropertySourceDescriptor> descriptors) {
return descriptors.stream().map(PropertySourceDescriptor::locations)
.flatMap(List::stream).findAny().isEmpty();
}
}
| available |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterRpcStoragePolicySatisfier.java | {
"start": 2222,
"end": 2316
} | interface ____ the Router. */
private static ClientProtocol routerProtocol;
/** Filesystem | to |
java | apache__kafka | test-common/test-common-internal-api/src/main/java/org/apache/kafka/common/test/api/ClusterTestDefaults.java | {
"start": 1404,
"end": 1750
} | interface ____ {
Type[] types() default {Type.KRAFT, Type.CO_KRAFT};
int brokers() default 1;
int controllers() default 1;
int disksPerBroker() default 1;
boolean autoStart() default true;
// Set default server properties for all @ClusterTest(s)
ClusterConfigProperty[] serverProperties() default {};
}
| ClusterTestDefaults |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/convert/ConvertingSerializerTest.java | {
"start": 1582,
"end": 1796
} | class ____ {
@JsonSerialize(converter=PointConverter.class)
public Point value;
public PointWrapper(int x, int y) {
value = new Point(x, y);
}
}
static | PointWrapper |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/property/access/internal/PropertyAccessStrategyResolverStandardImpl.java | {
"start": 1156,
"end": 3215
} | class ____ implements PropertyAccessStrategyResolver {
private final ServiceRegistry serviceRegistry;
public PropertyAccessStrategyResolverStandardImpl(ServiceRegistry serviceRegistry) {
this.serviceRegistry = serviceRegistry;
}
@Override
public PropertyAccessStrategy resolvePropertyAccessStrategy(
Class<?> containerClass,
String explicitAccessStrategyName,
RepresentationMode representationMode) {
if ( isManagedType( containerClass ) ) {
if ( BASIC.getExternalName().equals( explicitAccessStrategyName ) ) {
return PropertyAccessStrategyEnhancedImpl.PROPERTY;
}
else if ( FIELD.getExternalName().equals( explicitAccessStrategyName ) ) {
return PropertyAccessStrategyEnhancedImpl.FIELD;
}
else if ( MIXED.getExternalName().equals( explicitAccessStrategyName ) ) {
return PropertyAccessStrategyEnhancedImpl.STANDARD;
}
}
if ( isNotEmpty( explicitAccessStrategyName ) ) {
return resolveExplicitlyNamedPropertyAccessStrategy( explicitAccessStrategyName );
}
else if ( representationMode == RepresentationMode.MAP ) {
return MAP.getStrategy();
}
else {
return BASIC.getStrategy();
}
}
protected PropertyAccessStrategy resolveExplicitlyNamedPropertyAccessStrategy(String explicitAccessStrategyName) {
final var builtInStrategyEnum = BuiltInPropertyAccessStrategies.interpret( explicitAccessStrategyName );
return builtInStrategyEnum != null
? builtInStrategyEnum.getStrategy()
: strategySelectorService().resolveStrategy( PropertyAccessStrategy.class, explicitAccessStrategyName );
}
private StrategySelector strategySelectorService;
protected StrategySelector strategySelectorService() {
if ( strategySelectorService == null ) {
if ( serviceRegistry == null ) {
throw new HibernateException( "ServiceRegistry not yet injected; PropertyAccessStrategyResolver not ready for use." );
}
strategySelectorService = serviceRegistry.requireService( StrategySelector.class );
}
return strategySelectorService;
}
}
| PropertyAccessStrategyResolverStandardImpl |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/checkpoints/CheckpointHandlersTest.java | {
"start": 3058,
"end": 19718
} | class ____ {
private static final Duration TIMEOUT = Duration.ofSeconds(10);
private static final JobID JOB_ID = new JobID();
private static final Long COMPLETED_CHECKPOINT_ID = 123456L;
private static CheckpointHandlers.CheckpointTriggerHandler checkpointTriggerHandler;
private static CheckpointHandlers.CheckpointStatusHandler checkpointStatusHandler;
@BeforeAll
static void setUp() throws Exception {
GatewayRetriever<RestfulGateway> leaderRetriever =
() -> CompletableFuture.completedFuture(null);
checkpointTriggerHandler =
new CheckpointHandlers.CheckpointTriggerHandler(
leaderRetriever, TIMEOUT, Collections.emptyMap());
checkpointStatusHandler =
new CheckpointHandlers.CheckpointStatusHandler(
leaderRetriever, TIMEOUT, Collections.emptyMap());
}
@Test
void testCheckpointTriggerCompletedSuccessfully() throws Exception {
final OperationResult<Long> successfulResult =
OperationResult.success(COMPLETED_CHECKPOINT_ID);
final CompletableFuture<CheckpointType> checkpointPropertiesFuture =
new CompletableFuture<>();
final AtomicReference<AsynchronousJobOperationKey> keyReference = new AtomicReference<>();
final TestingRestfulGateway testingRestfulGateway =
new TestingRestfulGateway.Builder()
.setTriggerCheckpointFunction(
(AsynchronousJobOperationKey key,
CheckpointType checkpointType) -> {
keyReference.set(key);
checkpointPropertiesFuture.complete(checkpointType);
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setGetCheckpointStatusFunction(
(AsynchronousJobOperationKey operationKey) -> {
if (operationKey.equals(keyReference.get())) {
return CompletableFuture.completedFuture(successfulResult);
}
throw new RuntimeException(
"Expected operation key "
+ keyReference.get()
+ ", but received "
+ operationKey);
})
.build();
final CheckpointType checkpointType = CheckpointType.FULL;
final TriggerId triggerId =
checkpointTriggerHandler
.handleRequest(
triggerCheckpointRequest(checkpointType, null),
testingRestfulGateway)
.get()
.getTriggerId();
final AsynchronousOperationResult<CheckpointInfo> checkpointTriggerResponseBody =
checkpointStatusHandler
.handleRequest(
checkpointTriggerStatusRequest(triggerId), testingRestfulGateway)
.get();
assertThat(checkpointTriggerResponseBody.queueStatus().getId())
.isEqualTo(QueueStatus.Id.COMPLETED);
assertThat(checkpointTriggerResponseBody.resource()).isNotNull();
assertThat(checkpointTriggerResponseBody.resource().getCheckpointId())
.isEqualTo(COMPLETED_CHECKPOINT_ID);
assertThat(checkpointPropertiesFuture.get()).isEqualTo(CheckpointType.FULL);
}
@Test
void testTriggerCheckpointNoCheckpointType() throws Exception {
final OperationResult<Long> successfulResult =
OperationResult.success(COMPLETED_CHECKPOINT_ID);
final CompletableFuture<CheckpointType> checkpointTypeFuture = new CompletableFuture<>();
final AtomicReference<AsynchronousJobOperationKey> keyReference = new AtomicReference<>();
final TestingRestfulGateway testingRestfulGateway =
new TestingRestfulGateway.Builder()
.setTriggerCheckpointFunction(
(AsynchronousJobOperationKey key,
CheckpointType checkpointType) -> {
keyReference.set(key);
checkpointTypeFuture.complete(checkpointType);
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setGetCheckpointStatusFunction(
(AsynchronousJobOperationKey operationKey) -> {
if (operationKey.equals(keyReference.get())) {
return CompletableFuture.completedFuture(successfulResult);
}
throw new RuntimeException(
"Expected operation key "
+ keyReference.get()
+ ", but received "
+ operationKey);
})
.build();
final TriggerId triggerId =
checkpointTriggerHandler
.handleRequest(triggerCheckpointRequest(null, null), testingRestfulGateway)
.get()
.getTriggerId();
AsynchronousOperationResult<CheckpointInfo> checkpointTriggerResponseBody;
checkpointTriggerResponseBody =
checkpointStatusHandler
.handleRequest(
checkpointTriggerStatusRequest(triggerId), testingRestfulGateway)
.get();
assertThat(checkpointTriggerResponseBody.queueStatus().getId())
.isEqualTo(QueueStatus.Id.COMPLETED);
assertThat(checkpointTriggerResponseBody.resource()).isNotNull();
assertThat(checkpointTriggerResponseBody.resource().getCheckpointId())
.isEqualTo(COMPLETED_CHECKPOINT_ID);
assertThat(checkpointTypeFuture.get()).isEqualTo(CheckpointType.DEFAULT);
}
@Test
void testDisallowTriggeringIncrementalCheckpoint() throws Exception {
final OperationResult<Long> successfulResult =
OperationResult.success(COMPLETED_CHECKPOINT_ID);
final CompletableFuture<CheckpointType> checkpointTypeFuture = new CompletableFuture<>();
final AtomicReference<AsynchronousJobOperationKey> keyReference = new AtomicReference<>();
final TestingRestfulGateway testingRestfulGateway =
new TestingRestfulGateway.Builder()
.setTriggerCheckpointFunction(
(AsynchronousJobOperationKey key,
CheckpointType checkpointType) -> {
keyReference.set(key);
checkpointTypeFuture.complete(checkpointType);
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setGetCheckpointStatusFunction(
(AsynchronousJobOperationKey operationKey) -> {
if (operationKey.equals(keyReference.get())) {
return CompletableFuture.completedFuture(successfulResult);
}
throw new RuntimeException(
"Expected operation key "
+ keyReference.get()
+ ", but received "
+ operationKey);
})
.build();
final CheckpointType checkpointType = CheckpointType.INCREMENTAL;
assertThrows(
IllegalStateException.class,
() ->
checkpointTriggerHandler
.handleRequest(
triggerCheckpointRequest(checkpointType, null),
testingRestfulGateway)
.get());
assertThat(checkpointTypeFuture.isDone()).isFalse();
}
@Test
void testCheckpointCompletedWithException() throws Exception {
final OperationResult<Long> failedResult =
OperationResult.failure(new RuntimeException("expected"));
final AtomicReference<AsynchronousJobOperationKey> keyReference = new AtomicReference<>();
final TestingRestfulGateway testingRestfulGateway =
new TestingRestfulGateway.Builder()
.setTriggerCheckpointFunction(
(AsynchronousJobOperationKey key,
CheckpointType checkpointType) -> {
keyReference.set(key);
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setGetCheckpointStatusFunction(
(AsynchronousJobOperationKey operationKey) -> {
if (operationKey.equals(keyReference.get())) {
return CompletableFuture.completedFuture(failedResult);
}
throw new RuntimeException(
"Expected operation key "
+ keyReference.get()
+ ", but received "
+ operationKey);
})
.build();
final TriggerId triggerId =
checkpointTriggerHandler
.handleRequest(triggerCheckpointRequest(null, null), testingRestfulGateway)
.get()
.getTriggerId();
AsynchronousOperationResult<CheckpointInfo> checkpointTriggerResponseBody;
checkpointTriggerResponseBody =
checkpointStatusHandler
.handleRequest(
checkpointTriggerStatusRequest(triggerId), testingRestfulGateway)
.get();
assertThat(checkpointTriggerResponseBody.queueStatus().getId())
.isEqualTo(QueueStatus.Id.COMPLETED);
assertThat(checkpointTriggerResponseBody.resource()).isNotNull();
assertThat(checkpointTriggerResponseBody.resource().getFailureCause()).isNotNull();
final Throwable checkpointError =
checkpointTriggerResponseBody
.resource()
.getFailureCause()
.deserializeError(ClassLoader.getSystemClassLoader());
assertThat(checkpointError.getMessage()).matches("expected");
assertThat(checkpointError).isInstanceOf(RuntimeException.class);
}
@Test
void testProvidedTriggerId() throws Exception {
final OperationResult<Long> successfulResult =
OperationResult.success(COMPLETED_CHECKPOINT_ID);
final AtomicReference<AsynchronousJobOperationKey> keyReference = new AtomicReference<>();
final TestingRestfulGateway testingRestfulGateway =
new TestingRestfulGateway.Builder()
.setTriggerCheckpointFunction(
(AsynchronousJobOperationKey key,
CheckpointType checkpointType) -> {
keyReference.set(key);
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setGetCheckpointStatusFunction(
(AsynchronousJobOperationKey operationKey) -> {
if (operationKey.equals(keyReference.get())) {
return CompletableFuture.completedFuture(successfulResult);
}
throw new RuntimeException(
"Expected operation key "
+ keyReference.get()
+ ", but received "
+ operationKey);
})
.build();
final TriggerId providedTriggerId = new TriggerId();
final TriggerId returnedTriggerId =
checkpointTriggerHandler
.handleRequest(
triggerCheckpointRequest(CheckpointType.FULL, providedTriggerId),
testingRestfulGateway)
.get()
.getTriggerId();
assertThat(providedTriggerId).isEqualTo(returnedTriggerId);
AsynchronousOperationResult<CheckpointInfo> checkpointTriggerResponseBody;
checkpointTriggerResponseBody =
checkpointStatusHandler
.handleRequest(
checkpointTriggerStatusRequest(providedTriggerId),
testingRestfulGateway)
.get();
assertThat(checkpointTriggerResponseBody.queueStatus().getId())
.isEqualTo(QueueStatus.Id.COMPLETED);
assertThat(checkpointTriggerResponseBody.resource()).isNotNull();
assertThat(checkpointTriggerResponseBody.resource().getCheckpointId())
.isEqualTo(COMPLETED_CHECKPOINT_ID);
}
@Test
void testQueryStatusOfUnknownOperationReturnsError()
throws HandlerRequestException, RestHandlerException {
final TestingRestfulGateway testingRestfulGateway =
new TestingRestfulGateway.Builder()
.setGetCheckpointStatusFunction(
key ->
FutureUtils.completedExceptionally(
new UnknownOperationKeyException(key)))
.build();
final CompletableFuture<AsynchronousOperationResult<CheckpointInfo>> statusFuture =
checkpointStatusHandler.handleRequest(
checkpointTriggerStatusRequest(new TriggerId()), testingRestfulGateway);
assertThat(statusFuture)
.matches(RestMatchers.respondsWithError(HttpResponseStatus.NOT_FOUND)::matches);
}
private static HandlerRequest<CheckpointTriggerRequestBody> triggerCheckpointRequest(
final CheckpointType checkpointType, @Nullable final TriggerId triggerId)
throws HandlerRequestException {
return HandlerRequest.resolveParametersAndCreate(
new CheckpointTriggerRequestBody(checkpointType, triggerId),
new CheckpointTriggerMessageParameters(),
Collections.singletonMap(JobIDPathParameter.KEY, JOB_ID.toString()),
Collections.emptyMap(),
Collections.emptyList());
}
private static HandlerRequest<EmptyRequestBody> checkpointTriggerStatusRequest(
final TriggerId triggerId) throws HandlerRequestException {
final Map<String, String> pathParameters = new HashMap<>();
pathParameters.put(JobIDPathParameter.KEY, JOB_ID.toString());
pathParameters.put(TriggerIdPathParameter.KEY, triggerId.toString());
return HandlerRequest.resolveParametersAndCreate(
EmptyRequestBody.getInstance(),
new CheckpointStatusMessageParameters(),
pathParameters,
Collections.emptyMap(),
Collections.emptyList());
}
}
| CheckpointHandlersTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/EntryPointAssertions_withinPercentage_Test.java | {
"start": 1088,
"end": 2793
} | class ____ extends EntryPointAssertionsBaseTest {
@ParameterizedTest
@MethodSource("doublePercentageFactories")
void should_create_Double_offset(Function<Double, Percentage> percentageFactory) {
// GIVEN
Double value = 90.0;
// WHEN
Percentage percentage = percentageFactory.apply(value);
// THEN
then(percentage).isEqualTo(withPercentage(value));
}
private static Stream<Function<Double, Percentage>> doublePercentageFactories() {
return Stream.of(Assertions::withinPercentage, BDDAssertions::withinPercentage, withAssertions::withinPercentage);
}
@ParameterizedTest
@MethodSource("integerPercentageFactories")
void should_create_Integer_offset(Function<Integer, Percentage> percentageFactory) {
// GIVEN
Integer value = 90;
// WHEN
Percentage percentage = percentageFactory.apply(value);
// THEN
then(percentage).isEqualTo(withPercentage(value));
}
private static Stream<Function<Integer, Percentage>> integerPercentageFactories() {
return Stream.of(Assertions::withinPercentage, BDDAssertions::withinPercentage, withAssertions::withinPercentage);
}
@ParameterizedTest
@MethodSource("longPercentageFactories")
void should_create_Long_offset(Function<Long, Percentage> percentageFactory) {
// GIVEN
Long value = 90L;
// WHEN
Percentage percentage = percentageFactory.apply(value);
// THEN
then(percentage).isEqualTo(withPercentage(value));
}
private static Stream<Function<Long, Percentage>> longPercentageFactories() {
return Stream.of(Assertions::withinPercentage, BDDAssertions::withinPercentage, withAssertions::withinPercentage);
}
}
| EntryPointAssertions_withinPercentage_Test |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/launcher/TestServiceConf.java | {
"start": 1388,
"end": 4840
} | class ____
extends AbstractServiceLauncherTestBase {
@Test
public void testRunService() throws Throwable {
assertRuns(LaunchableRunningService.NAME);
}
@Test
public void testConfPropagationOverInitBindings() throws Throwable {
Configuration conf = newConf(RunningService.FAIL_IN_RUN, "true");
assertLaunchOutcome(EXIT_FAIL,
"failed",
LaunchableRunningService.NAME,
ARG_CONF_PREFIXED,
configFile(conf));
}
@Test
public void testUnbalancedConfArg() throws Throwable {
assertLaunchOutcome(EXIT_COMMAND_ARGUMENT_ERROR,
E_PARSE_FAILED,
LaunchableRunningService.NAME,
ARG_CONF_PREFIXED);
}
@Test
public void testConfArgMissingFile() throws Throwable {
assertLaunchOutcome(EXIT_COMMAND_ARGUMENT_ERROR,
E_PARSE_FAILED,
LaunchableRunningService.NAME,
ARG_CONF_PREFIXED,
"no-file.xml");
}
@Test
public void testConfPropagation() throws Throwable {
Configuration conf = newConf(RunningService.FAIL_IN_RUN, "true");
assertLaunchOutcome(EXIT_EXCEPTION_THROWN,
RunningService.FAILURE_MESSAGE,
RunningService.NAME,
ARG_CONF_PREFIXED,
configFile(conf));
}
/**
* Low level conf value extraction test...just to make sure
* that all works at the lower level.
* @throws Throwable
*/
@Test
public void testConfExtraction() throws Throwable {
ExitTrackingServiceLauncher<Service> launcher =
new ExitTrackingServiceLauncher<>(RunningService.NAME);
launcher.bindCommandOptions();
Configuration conf = newConf("propagated", "true");
assertEquals("true", conf.get("propagated", "unset"));
Configuration extracted = new Configuration(false);
List<String> argsList =
asList("Name", ARG_CONF_PREFIXED, configFile(conf));
List<String> args = launcher.extractCommandOptions(extracted,
argsList);
if (!args.isEmpty()) {
assertEquals(0, args.size(), "args beginning with " + args.get(0));
}
assertEquals("true", extracted.get("propagated", "unset"));
}
@Test
public void testDualConfArgs() throws Throwable {
ExitTrackingServiceLauncher<Service> launcher =
new ExitTrackingServiceLauncher<>(RunningService.NAME);
launcher.bindCommandOptions();
String key1 = "key1";
Configuration conf1 = newConf(key1, "true");
String key2 = "file2";
Configuration conf2 = newConf(key2, "7");
Configuration extracted = new Configuration(false);
List<String> argsList =
asList("Name",
ARG_CONF_PREFIXED, configFile(conf1),
ARG_CONF_PREFIXED, configFile(conf2));
List<String> args = launcher.extractCommandOptions(extracted, argsList);
if (!args.isEmpty()) {
assertEquals(0, args.size(), "args beginning with " + args.get(0));
}
assertTrue(extracted.getBoolean(key1, false));
assertEquals(7, extracted.getInt(key2, -1));
}
@Test
public void testConfArgWrongFiletype(TestInfo testInfo) throws Throwable {
new File(CONF_FILE_DIR).mkdirs();
File file = new File(CONF_FILE_DIR, testInfo.getDisplayName());
try (FileWriter fileWriter = new FileWriter(file)) {
fileWriter.write("not-a-conf-file");
fileWriter.close();
}
assertLaunchOutcome(EXIT_COMMAND_ARGUMENT_ERROR,
"",
RunningService.NAME,
ARG_CONF_PREFIXED,
file.getAbsolutePath());
}
}
| TestServiceConf |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/injection/superclass/SuperclassInjectionTest.java | {
"start": 2398,
"end": 2573
} | class ____ {
String id;
@PostConstruct
void init() {
this.id = UUID.randomUUID().toString();
}
}
@Singleton
static | Head |
java | elastic__elasticsearch | modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java | {
"start": 5116,
"end": 62175
} | class ____ extends ESSingleNodeTestCase {
private Directory directory;
private IndexWriter indexWriter;
private DirectoryReader directoryReader;
private IndexService indexService;
private MapperService mapperService;
private PercolatorFieldMapper fieldMapper;
private PercolatorFieldMapper.PercolatorFieldType fieldType;
private List<Query> queries;
private PercolateQuery.QueryStore queryStore;
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singleton(PercolatorPlugin.class);
}
@Before
public void init() throws Exception {
directory = newDirectory();
IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer());
config.setMergePolicy(NoMergePolicy.INSTANCE);
indexWriter = new IndexWriter(directory, config);
String indexName = "test";
indexService = createIndex(indexName, Settings.EMPTY);
mapperService = indexService.mapperService();
String mapper = Strings.toString(
XContentFactory.jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject("int_field")
.field("type", "integer")
.endObject()
.startObject("long_field")
.field("type", "long")
.endObject()
.startObject("half_float_field")
.field("type", "half_float")
.endObject()
.startObject("float_field")
.field("type", "float")
.endObject()
.startObject("double_field")
.field("type", "double")
.endObject()
.startObject("ip_field")
.field("type", "ip")
.endObject()
.startObject("field")
.field("type", "keyword")
.endObject()
.endObject()
.endObject()
.endObject()
);
mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE);
String queryField = "query_field";
String percolatorMapper = Strings.toString(
XContentFactory.jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject(queryField)
.field("type", "percolator")
.endObject()
.endObject()
.endObject()
.endObject()
);
mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper().mappers().getMapper(queryField);
fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
queries = new ArrayList<>();
queryStore = ctx -> docId -> this.queries.get(docId);
}
@After
public void deinit() throws Exception {
directoryReader.close();
directory.close();
}
public void testDuel() throws Exception {
int numFields = randomIntBetween(1, 3);
Map<String, List<String>> stringContent = new HashMap<>();
for (int i = 0; i < numFields; i++) {
int numTokens = randomIntBetween(1, 64);
List<String> values = new ArrayList<>();
for (int j = 0; j < numTokens; j++) {
values.add(randomAlphaOfLength(8));
}
stringContent.put("field" + i, values);
}
List<String> stringFields = new ArrayList<>(stringContent.keySet());
int numValues = randomIntBetween(16, 64);
List<Integer> intValues = new ArrayList<>(numValues);
for (int j = 0; j < numValues; j++) {
intValues.add(randomInt());
}
Collections.sort(intValues);
try (SearchContext searchContext = createSearchContext(indexService)) {
SearchExecutionContext context = searchContext.getSearchExecutionContext();
MappedFieldType intFieldType = mapperService.fieldType("int_field");
List<Supplier<Query>> queryFunctions = new ArrayList<>();
queryFunctions.add(MatchNoDocsQuery::new);
queryFunctions.add(MatchAllDocsQuery::new);
queryFunctions.add(() -> new TermQuery(new Term("unknown_field", "value")));
String field1 = randomFrom(stringFields);
queryFunctions.add(() -> new TermQuery(new Term(field1, randomFrom(stringContent.get(field1)))));
String field2 = randomFrom(stringFields);
queryFunctions.add(() -> new TermQuery(new Term(field2, randomFrom(stringContent.get(field2)))));
queryFunctions.add(() -> intFieldType.termQuery(randomFrom(intValues), context));
queryFunctions.add(() -> intFieldType.termsQuery(Arrays.asList(randomFrom(intValues), randomFrom(intValues)), context));
queryFunctions.add(
() -> intFieldType.rangeQuery(
intValues.get(4),
intValues.get(intValues.size() - 4),
true,
true,
ShapeRelation.WITHIN,
null,
null,
context
)
);
queryFunctions.add(
() -> new TermInSetQuery(
field1,
List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1))))
)
);
queryFunctions.add(
() -> new TermInSetQuery(
field2,
List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1))))
)
);
// many iterations with boolean queries, which are the most complex queries to deal with when nested
int numRandomBoolQueries = 1000;
for (int i = 0; i < numRandomBoolQueries; i++) {
queryFunctions.add(() -> createRandomBooleanQuery(1, stringFields, stringContent, intFieldType, intValues, context));
}
queryFunctions.add(() -> {
int numClauses = randomIntBetween(1, 1 << randomIntBetween(2, 4));
List<Query> clauses = new ArrayList<>();
for (int i = 0; i < numClauses; i++) {
String field = randomFrom(stringFields);
clauses.add(new TermQuery(new Term(field, randomFrom(stringContent.get(field)))));
}
return new DisjunctionMaxQuery(clauses, 0.01f);
});
queryFunctions.add(() -> {
Float minScore = randomBoolean() ? null : (float) randomIntBetween(1, 1000);
Query innerQuery;
if (randomBoolean()) {
innerQuery = new TermQuery(new Term(field1, randomFrom(stringContent.get(field1))));
} else {
innerQuery = new PhraseQuery(field1, randomFrom(stringContent.get(field1)), randomFrom(stringContent.get(field1)));
}
return new FunctionScoreQuery(innerQuery, minScore, 1f);
});
List<LuceneDocument> documents = new ArrayList<>();
for (Supplier<Query> queryFunction : queryFunctions) {
Query query = queryFunction.get();
addQuery(query, documents);
}
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);
LuceneDocument document = new LuceneDocument();
for (Map.Entry<String, List<String>> entry : stringContent.entrySet()) {
String value = entry.getValue().stream().collect(Collectors.joining(" "));
document.add(new TextField(entry.getKey(), value, Field.Store.NO));
}
for (Integer intValue : intValues) {
NumberFieldMapper.NumberType.INTEGER.addFields(document, "int_field", intValue, IndexType.points(true, true), false);
}
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
}
private BooleanQuery createRandomBooleanQuery(
int depth,
List<String> fields,
Map<String, List<String>> content,
MappedFieldType intFieldType,
List<Integer> intValues,
SearchExecutionContext context
) {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
int numClauses = randomIntBetween(1, 1 << randomIntBetween(2, 4)); // use low numbers of clauses more often
int numShouldClauses = 0;
boolean onlyShouldClauses = rarely();
for (int i = 0; i < numClauses; i++) {
Occur occur;
if (onlyShouldClauses) {
occur = Occur.SHOULD;
if (randomBoolean()) {
String field = randomFrom(fields);
builder.add(new TermQuery(new Term(field, randomFrom(content.get(field)))), occur);
} else {
builder.add(intFieldType.termQuery(randomFrom(intValues), context), occur);
}
} else if (rarely() && depth <= 3) {
occur = randomFrom(Arrays.asList(Occur.FILTER, Occur.MUST, Occur.SHOULD));
builder.add(createRandomBooleanQuery(depth + 1, fields, content, intFieldType, intValues, context), occur);
} else if (rarely()) {
if (randomBoolean()) {
occur = randomFrom(Arrays.asList(Occur.FILTER, Occur.MUST, Occur.SHOULD));
if (randomBoolean()) {
builder.add(new TermQuery(new Term("unknown_field", randomAlphaOfLength(8))), occur);
} else {
builder.add(intFieldType.termQuery(randomFrom(intValues), context), occur);
}
} else if (randomBoolean()) {
String field = randomFrom(fields);
builder.add(new TermQuery(new Term(field, randomFrom(content.get(field)))), occur = Occur.MUST_NOT);
} else {
builder.add(intFieldType.termQuery(randomFrom(intValues), context), occur = Occur.MUST_NOT);
}
} else {
if (randomBoolean()) {
occur = randomFrom(Arrays.asList(Occur.FILTER, Occur.MUST, Occur.SHOULD));
if (randomBoolean()) {
String field = randomFrom(fields);
builder.add(new TermQuery(new Term(field, randomFrom(content.get(field)))), occur);
} else {
builder.add(intFieldType.termQuery(randomFrom(intValues), context), occur);
}
} else {
builder.add(new TermQuery(new Term("unknown_field", randomAlphaOfLength(8))), occur = Occur.MUST_NOT);
}
}
if (occur == Occur.SHOULD) {
numShouldClauses++;
}
}
builder.setMinimumNumberShouldMatch(randomIntBetween(0, numShouldClauses));
return builder.build();
}
public void testDuel2() throws Exception {
List<String> stringValues = new ArrayList<>();
stringValues.add("value1");
stringValues.add("value2");
stringValues.add("value3");
MappedFieldType intFieldType = mapperService.fieldType("int_field");
List<int[]> ranges = new ArrayList<>();
ranges.add(new int[] { -5, 5 });
ranges.add(new int[] { 0, 10 });
ranges.add(new int[] { 15, 50 });
try (SearchContext searchContext = createSearchContext(indexService)) {
SearchExecutionContext context = searchContext.getSearchExecutionContext();
List<LuceneDocument> documents = new ArrayList<>();
{
addQuery(new TermQuery(new Term("string_field", randomFrom(stringValues))), documents);
}
{
addQuery(new PhraseQuery(0, "string_field", stringValues.toArray(new String[0])), documents);
}
{
int[] range = randomFrom(ranges);
Query rangeQuery = intFieldType.rangeQuery(range[0], range[1], true, true, null, null, null, context);
addQuery(rangeQuery, documents);
}
{
int numBooleanQueries = randomIntBetween(1, 5);
for (int i = 0; i < numBooleanQueries; i++) {
Query randomBQ = randomBQ(1, stringValues, ranges, intFieldType, context);
addQuery(randomBQ, documents);
}
}
{
addQuery(new MatchNoDocsQuery(), documents);
}
{
addQuery(new MatchAllDocsQuery(), documents);
}
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);
LuceneDocument document = new LuceneDocument();
for (String value : stringValues) {
document.add(new TextField("string_field", value, Field.Store.NO));
logger.info("Test with document: {}" + document);
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
for (int[] range : ranges) {
NumberFieldMapper.NumberType.INTEGER.addFields(
document,
"int_field",
between(range[0], range[1]),
IndexType.points(true, true),
false
);
logger.info("Test with document: {}" + document);
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
}
}
private BooleanQuery randomBQ(
int depth,
List<String> stringValues,
List<int[]> ranges,
MappedFieldType intFieldType,
SearchExecutionContext context
) {
final int numClauses = randomIntBetween(1, 4);
final boolean onlyShouldClauses = randomBoolean();
final BooleanQuery.Builder builder = new BooleanQuery.Builder();
int numShouldClauses = 0;
for (int i = 0; i < numClauses; i++) {
Query subQuery;
if (randomBoolean() && depth <= 3) {
subQuery = randomBQ(depth + 1, stringValues, ranges, intFieldType, context);
} else if (randomBoolean()) {
int[] range = randomFrom(ranges);
subQuery = intFieldType.rangeQuery(range[0], range[1], true, true, null, null, null, context);
} else {
subQuery = new TermQuery(new Term("string_field", randomFrom(stringValues)));
}
Occur occur;
if (onlyShouldClauses) {
occur = Occur.SHOULD;
} else {
occur = randomFrom(Arrays.asList(Occur.FILTER, Occur.MUST, Occur.SHOULD));
}
if (occur == Occur.SHOULD) {
numShouldClauses++;
}
builder.add(subQuery, occur);
}
builder.setMinimumNumberShouldMatch(randomIntBetween(0, numShouldClauses));
return builder.build();
}
public void testDuelIdBased() throws Exception {
List<Function<String, Query>> queryFunctions = new ArrayList<>();
queryFunctions.add((id) -> new PrefixQuery(new Term("field", id)));
queryFunctions.add((id) -> new WildcardQuery(new Term("field", id + "*")));
queryFunctions.add((id) -> new CustomQuery(new Term("field", id)));
queryFunctions.add((id) -> new SpanTermQuery(new Term("field", id)));
queryFunctions.add((id) -> new TermQuery(new Term("field", id)));
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", id)), Occur.MUST);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), Occur.MUST_NOT);
}
if (randomBoolean()) {
builder.add(new CustomQuery(new Term("field", id)), Occur.MUST);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", id)), Occur.SHOULD);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), Occur.MUST_NOT);
}
if (randomBoolean()) {
builder.add(new CustomQuery(new Term("field", id)), Occur.SHOULD);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new MatchAllDocsQuery(), Occur.MUST);
builder.add(new MatchAllDocsQuery(), Occur.MUST);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), Occur.MUST_NOT);
} else if (randomBoolean()) {
builder.add(new MatchAllDocsQuery(), Occur.MUST_NOT);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new MatchAllDocsQuery(), Occur.SHOULD);
builder.add(new MatchAllDocsQuery(), Occur.SHOULD);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), Occur.MUST_NOT);
} else if (randomBoolean()) {
builder.add(new MatchAllDocsQuery(), Occur.MUST_NOT);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new MatchAllDocsQuery(), Occur.SHOULD);
builder.add(new TermQuery(new Term("field", id)), Occur.SHOULD);
if (randomBoolean()) {
builder.add(new MatchAllDocsQuery(), Occur.SHOULD);
}
if (randomBoolean()) {
builder.setMinimumNumberShouldMatch(2);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setMinimumNumberShouldMatch(randomIntBetween(0, 4));
builder.add(new TermQuery(new Term("field", id)), Occur.SHOULD);
builder.add(new CustomQuery(new Term("field", id)), Occur.SHOULD);
return builder.build();
});
queryFunctions.add((id) -> new MatchAllDocsQuery());
queryFunctions.add((id) -> new MatchNoDocsQuery("no reason at all"));
int numDocs = randomIntBetween(queryFunctions.size(), queryFunctions.size() * 3);
List<LuceneDocument> documents = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
Query query = queryFunctions.get(i % queryFunctions.size()).apply(id);
addQuery(query, documents);
}
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
Iterable<? extends IndexableField> doc = Collections.singleton(new StringField("field", id, Field.Store.NO));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
Iterable<? extends IndexableField> doc = Collections.singleton(new StringField("field", "value", Field.Store.NO));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
// Empty percolator doc:
memoryIndex = new MemoryIndex();
duelRun(queryStore, memoryIndex, shardSearcher);
}
public void testDuelSpecificQueries() throws Exception {
List<LuceneDocument> documents = new ArrayList<>();
BlendedTermQuery blendedTermQuery = BlendedTermQuery.dismaxBlendedQuery(
new Term[] { new Term("field", "quick"), new Term("field", "brown"), new Term("field", "fox") },
1.0f
);
addQuery(blendedTermQuery, documents);
SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("field", true).addClause(new SpanTermQuery(new Term("field", "quick")))
.addClause(new SpanTermQuery(new Term("field", "brown")))
.addClause(new SpanTermQuery(new Term("field", "fox")))
.build();
addQuery(spanNearQuery, documents);
SpanNearQuery spanNearQuery2 = new SpanNearQuery.Builder("field", true).addClause(new SpanTermQuery(new Term("field", "the")))
.addClause(new SpanTermQuery(new Term("field", "lazy")))
.addClause(new SpanTermQuery(new Term("field", "doc")))
.build();
SpanOrQuery spanOrQuery = new SpanOrQuery(spanNearQuery, spanNearQuery2);
addQuery(spanOrQuery, documents);
SpanNotQuery spanNotQuery = new SpanNotQuery(spanNearQuery, spanNearQuery);
addQuery(spanNotQuery, documents);
long lowerLong = randomIntBetween(0, 256);
long upperLong = lowerLong + randomIntBetween(0, 32);
addQuery(LongPoint.newRangeQuery("long_field", lowerLong, upperLong), documents);
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);
Document document = new Document();
document.add(new TextField("field", "the quick brown fox jumps over the lazy dog", Field.Store.NO));
long randomLong = randomIntBetween((int) lowerLong, (int) upperLong);
document.add(new LongPoint("long_field", randomLong));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
public void testRangeQueries() throws Exception {
List<LuceneDocument> docs = new ArrayList<>();
addQuery(IntPoint.newRangeQuery("int_field", 0, 5), docs);
addQuery(LongPoint.newRangeQuery("long_field", 5L, 10L), docs);
addQuery(HalfFloatPoint.newRangeQuery("half_float_field", 10, 15), docs);
addQuery(FloatPoint.newRangeQuery("float_field", 15, 20), docs);
addQuery(DoublePoint.newRangeQuery("double_field", 20, 25), docs);
addQuery(InetAddressPoint.newRangeQuery("ip_field", forString("192.168.0.1"), forString("192.168.0.10")), docs);
indexWriter.addDocuments(docs);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);
IndexVersion v = IndexVersionUtils.randomCompatibleVersion(random());
MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query query = fieldType.percolateQuery(
"_name",
queryStore,
Collections.singletonList(new BytesArray("{}")),
percolateSearcher,
false,
v
);
TopDocs topDocs = shardSearcher.search(query, 1);
assertEquals(1L, topDocs.totalHits.value());
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(0, topDocs.scoreDocs[0].doc);
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new LongPoint("long_field", 7L)), new WhitespaceAnalyzer());
percolateSearcher = memoryIndex.createSearcher();
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
topDocs = shardSearcher.search(query, 1);
assertEquals(1L, topDocs.totalHits.value());
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(1, topDocs.scoreDocs[0].doc);
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new HalfFloatPoint("half_float_field", 12)), new WhitespaceAnalyzer());
percolateSearcher = memoryIndex.createSearcher();
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
topDocs = shardSearcher.search(query, 1);
assertEquals(1L, topDocs.totalHits.value());
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(2, topDocs.scoreDocs[0].doc);
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new FloatPoint("float_field", 17)), new WhitespaceAnalyzer());
percolateSearcher = memoryIndex.createSearcher();
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
topDocs = shardSearcher.search(query, 1);
assertEquals(1, topDocs.totalHits.value());
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(3, topDocs.scoreDocs[0].doc);
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new DoublePoint("double_field", 21)), new WhitespaceAnalyzer());
percolateSearcher = memoryIndex.createSearcher();
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
topDocs = shardSearcher.search(query, 1);
assertEquals(1, topDocs.totalHits.value());
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(4, topDocs.scoreDocs[0].doc);
memoryIndex = MemoryIndex.fromDocument(
Collections.singleton(new InetAddressPoint("ip_field", forString("192.168.0.4"))),
new WhitespaceAnalyzer()
);
percolateSearcher = memoryIndex.createSearcher();
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
topDocs = shardSearcher.search(query, 1);
assertEquals(1, topDocs.totalHits.value());
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(5, topDocs.scoreDocs[0].doc);
}
public void testDuelRangeQueries() throws Exception {
List<LuceneDocument> documents = new ArrayList<>();
int lowerInt = randomIntBetween(0, 256);
int upperInt = lowerInt + randomIntBetween(0, 32);
addQuery(IntPoint.newRangeQuery("int_field", lowerInt, upperInt), documents);
long lowerLong = randomIntBetween(0, 256);
long upperLong = lowerLong + randomIntBetween(0, 32);
addQuery(LongPoint.newRangeQuery("long_field", lowerLong, upperLong), documents);
float lowerHalfFloat = randomIntBetween(0, 256);
float upperHalfFloat = lowerHalfFloat + randomIntBetween(0, 32);
addQuery(HalfFloatPoint.newRangeQuery("half_float_field", lowerHalfFloat, upperHalfFloat), documents);
float lowerFloat = randomIntBetween(0, 256);
float upperFloat = lowerFloat + randomIntBetween(0, 32);
addQuery(FloatPoint.newRangeQuery("float_field", lowerFloat, upperFloat), documents);
double lowerDouble = randomDoubleBetween(0, 256, true);
double upperDouble = lowerDouble + randomDoubleBetween(0, 32, true);
addQuery(DoublePoint.newRangeQuery("double_field", lowerDouble, upperDouble), documents);
int lowerIpPart = randomIntBetween(0, 255);
int upperIpPart = randomIntBetween(lowerIpPart, 255);
addQuery(
InetAddressPoint.newRangeQuery("ip_field", forString("192.168.1." + lowerIpPart), forString("192.168.1." + upperIpPart)),
documents
);
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);
int randomInt = randomIntBetween(lowerInt, upperInt);
Iterable<? extends IndexableField> doc = Collections.singleton(new IntPoint("int_field", randomInt));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
TopDocs result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(0));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new IntPoint("int_field", randomInt()));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
long randomLong = randomIntBetween((int) lowerLong, (int) upperLong);
doc = Collections.singleton(new LongPoint("long_field", randomLong));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(1));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new LongPoint("long_field", randomLong()));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
float randomHalfFloat = randomIntBetween((int) lowerHalfFloat, (int) upperHalfFloat);
doc = Collections.singleton(new HalfFloatPoint("half_float_field", randomHalfFloat));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(2));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new HalfFloatPoint("half_float_field", randomFloat()));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
float randomFloat = randomIntBetween((int) lowerFloat, (int) upperFloat);
doc = Collections.singleton(new FloatPoint("float_field", randomFloat));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(3));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new FloatPoint("float_field", randomFloat()));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
double randomDouble = randomDoubleBetween(lowerDouble, upperDouble, true);
doc = Collections.singleton(new DoublePoint("double_field", randomDouble));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(4));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new DoublePoint("double_field", randomFloat()));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new InetAddressPoint("ip_field", forString("192.168.1." + randomIntBetween(lowerIpPart, upperIpPart))));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(5));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new InetAddressPoint("ip_field", forString("192.168.1." + randomIntBetween(0, 255))));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
public void testPercolateMatchAll() throws Exception {
List<LuceneDocument> docs = new ArrayList<>();
addQuery(new MatchAllDocsQuery(), docs);
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", "value1")), Occur.MUST);
builder.add(new MatchAllDocsQuery(), Occur.MUST);
addQuery(builder.build(), docs);
builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", "value2")), Occur.MUST);
builder.add(new MatchAllDocsQuery(), Occur.MUST);
builder.add(new MatchAllDocsQuery(), Occur.MUST);
addQuery(builder.build(), docs);
builder = new BooleanQuery.Builder();
builder.add(new MatchAllDocsQuery(), Occur.MUST);
builder.add(new MatchAllDocsQuery(), Occur.MUST_NOT);
addQuery(builder.build(), docs);
builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", "value2")), Occur.SHOULD);
builder.add(new MatchAllDocsQuery(), Occur.SHOULD);
addQuery(builder.build(), docs);
indexWriter.addDocuments(docs);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value1", new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery(
"_name",
queryStore,
Collections.singletonList(new BytesArray("{}")),
percolateSearcher,
false,
IndexVersion.current()
);
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
assertEquals(3L, topDocs.totalHits.value());
assertEquals(3, topDocs.scoreDocs.length);
assertEquals(0, topDocs.scoreDocs[0].doc);
assertEquals(1, topDocs.scoreDocs[1].doc);
assertEquals(4, topDocs.scoreDocs[2].doc);
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
assertEquals(3L, topDocs.totalHits.value());
assertEquals(3, topDocs.scoreDocs.length);
assertEquals(0, topDocs.scoreDocs[0].doc);
assertEquals(1, topDocs.scoreDocs[1].doc);
assertEquals(4, topDocs.scoreDocs[2].doc);
}
public void testFunctionScoreQuery() throws Exception {
List<LuceneDocument> docs = new ArrayList<>();
addQuery(new FunctionScoreQuery(new TermQuery(new Term("field", "value")), null, 1f), docs);
addQuery(new FunctionScoreQuery(new TermQuery(new Term("field", "value")), 10f, 1f), docs);
addQuery(new FunctionScoreQuery(new MatchAllDocsQuery(), null, 1f), docs);
addQuery(new FunctionScoreQuery(new MatchAllDocsQuery(), 10F, 1f), docs);
indexWriter.addDocuments(docs);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value", new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery(
"_name",
queryStore,
Collections.singletonList(new BytesArray("{}")),
percolateSearcher,
false,
IndexVersion.current()
);
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
assertEquals(2L, topDocs.totalHits.value());
assertEquals(2, topDocs.scoreDocs.length);
assertEquals(0, topDocs.scoreDocs[0].doc);
assertEquals(2, topDocs.scoreDocs[1].doc);
}
public void testPercolateSmallAndLargeDocument() throws Exception {
List<LuceneDocument> docs = new ArrayList<>();
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", "value1")), Occur.MUST);
builder.add(new TermQuery(new Term("field", "value2")), Occur.MUST);
addQuery(builder.build(), docs);
builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", "value2")), Occur.MUST);
builder.add(new TermQuery(new Term("field", "value3")), Occur.MUST);
addQuery(builder.build(), docs);
builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", "value3")), Occur.MUST);
builder.add(new TermQuery(new Term("field", "value4")), Occur.MUST);
addQuery(builder.build(), docs);
indexWriter.addDocuments(docs);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);
IndexVersion v = IndexVersion.current();
try (Directory directory = new ByteBuffersDirectory()) {
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
List<Document> documents = new ArrayList<>();
Document document = new Document();
document.add(new StringField("field", "value1", Field.Store.NO));
document.add(new StringField("field", "value2", Field.Store.NO));
documents.add(document);
document = new Document();
document.add(new StringField("field", "value5", Field.Store.NO));
document.add(new StringField("field", "value6", Field.Store.NO));
documents.add(document);
document = new Document();
document.add(new StringField("field", "value3", Field.Store.NO));
document.add(new StringField("field", "value4", Field.Store.NO));
documents.add(document);
iw.addDocuments(documents); // IW#addDocuments(...) ensures we end up with a single segment
}
try (IndexReader ir = DirectoryReader.open(directory)) {
IndexSearcher percolateSearcher = newSearcher(ir);
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery(
"_name",
queryStore,
Collections.singletonList(new BytesArray("{}")),
percolateSearcher,
false,
v
);
BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery();
assertThat(candidateQuery.clauses().get(0).query(), instanceOf(CoveringQuery.class));
TopDocs topDocs = shardSearcher.search(query, 10);
assertEquals(2L, topDocs.totalHits.value());
assertEquals(2, topDocs.scoreDocs.length);
assertEquals(0, topDocs.scoreDocs[0].doc);
assertEquals(2, topDocs.scoreDocs[1].doc);
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
assertEquals(2L, topDocs.totalHits.value());
assertEquals(2, topDocs.scoreDocs.length);
assertEquals(0, topDocs.scoreDocs[0].doc);
assertEquals(2, topDocs.scoreDocs[1].doc);
}
}
// This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery:
int origMaxClauseCount = IndexSearcher.getMaxClauseCount();
try (Directory directory = new ByteBuffersDirectory()) {
final int maxClauseCount = 100;
IndexSearcher.setMaxClauseCount(maxClauseCount);
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
Document document = new Document();
for (int i = 0; i < maxClauseCount; i++) {
int fieldNumber = 2 + i;
document.add(new StringField("field", "value" + fieldNumber, Field.Store.NO));
}
iw.addDocument(document);
}
try (IndexReader ir = DirectoryReader.open(directory)) {
IndexSearcher percolateSearcher = newSearcher(ir);
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery(
"_name",
queryStore,
Collections.singletonList(new BytesArray("{}")),
percolateSearcher,
false,
v
);
BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery();
assertThat(candidateQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class));
TopDocs topDocs = shardSearcher.search(query, 10);
assertEquals(2L, topDocs.totalHits.value());
assertEquals(2, topDocs.scoreDocs.length);
assertEquals(1, topDocs.scoreDocs[0].doc);
assertEquals(2, topDocs.scoreDocs[1].doc);
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
assertEquals(2L, topDocs.totalHits.value());
assertEquals(2, topDocs.scoreDocs.length);
assertEquals(1, topDocs.scoreDocs[0].doc);
assertEquals(2, topDocs.scoreDocs[1].doc);
}
} finally {
IndexSearcher.setMaxClauseCount(origMaxClauseCount);
}
}
public void testDuplicatedClauses() throws Exception {
List<LuceneDocument> docs = new ArrayList<>();
BooleanQuery.Builder builder = new BooleanQuery.Builder();
BooleanQuery.Builder builder1 = new BooleanQuery.Builder();
builder1.add(new TermQuery(new Term("field", "value1")), Occur.MUST);
builder1.add(new TermQuery(new Term("field", "value2")), Occur.MUST);
builder.add(builder1.build(), Occur.MUST);
BooleanQuery.Builder builder2 = new BooleanQuery.Builder();
builder2.add(new TermQuery(new Term("field", "value2")), Occur.MUST);
builder2.add(new TermQuery(new Term("field", "value3")), Occur.MUST);
builder.add(builder2.build(), Occur.MUST);
addQuery(builder.build(), docs);
builder = new BooleanQuery.Builder().setMinimumNumberShouldMatch(2);
builder1 = new BooleanQuery.Builder();
builder1.add(new TermQuery(new Term("field", "value1")), Occur.MUST);
builder1.add(new TermQuery(new Term("field", "value2")), Occur.MUST);
builder.add(builder1.build(), Occur.SHOULD);
builder2 = new BooleanQuery.Builder();
builder2.add(new TermQuery(new Term("field", "value2")), Occur.MUST);
builder2.add(new TermQuery(new Term("field", "value3")), Occur.MUST);
builder.add(builder2.build(), Occur.SHOULD);
BooleanQuery.Builder builder3 = new BooleanQuery.Builder();
builder3.add(new TermQuery(new Term("field", "value3")), Occur.MUST);
builder3.add(new TermQuery(new Term("field", "value4")), Occur.MUST);
builder.add(builder3.build(), Occur.SHOULD);
addQuery(builder.build(), docs);
indexWriter.addDocuments(docs);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);
IndexVersion v = IndexVersion.current();
List<BytesReference> sources = Collections.singletonList(new BytesArray("{}"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value1 value2 value3", new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
assertEquals(2L, topDocs.totalHits.value());
assertEquals(0, topDocs.scoreDocs[0].doc);
assertEquals(1, topDocs.scoreDocs[1].doc);
}
public void testDuplicatedClauses2() throws Exception {
List<LuceneDocument> docs = new ArrayList<>();
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setMinimumNumberShouldMatch(3);
builder.add(new TermQuery(new Term("field", "value1")), Occur.SHOULD);
builder.add(new TermQuery(new Term("field", "value2")), Occur.SHOULD);
builder.add(new TermQuery(new Term("field", "value2")), Occur.SHOULD);
builder.add(new TermQuery(new Term("field", "value3")), Occur.SHOULD);
builder.add(new TermQuery(new Term("field", "value3")), Occur.SHOULD);
builder.add(new TermQuery(new Term("field", "value3")), Occur.SHOULD);
builder.add(new TermQuery(new Term("field", "value4")), Occur.SHOULD);
builder.add(new TermQuery(new Term("field", "value5")), Occur.SHOULD);
addQuery(builder.build(), docs);
indexWriter.addDocuments(docs);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);
IndexVersion v = IndexVersion.current();
List<BytesReference> sources = Collections.singletonList(new BytesArray("{}"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value1 value4 value5", new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
assertEquals(1L, topDocs.totalHits.value());
assertEquals(0, topDocs.scoreDocs[0].doc);
memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value1 value2", new WhitespaceAnalyzer());
percolateSearcher = memoryIndex.createSearcher();
query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
assertEquals(1L, topDocs.totalHits.value());
assertEquals(0, topDocs.scoreDocs[0].doc);
memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value3", new WhitespaceAnalyzer());
percolateSearcher = memoryIndex.createSearcher();
query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
assertEquals(1L, topDocs.totalHits.value());
assertEquals(0, topDocs.scoreDocs[0].doc);
}
public void testMsmAndRanges_disjunction() throws Exception {
// Recreates a similar scenario that made testDuel() fail randomly:
// https://github.com/elastic/elasticsearch/issues/29393
List<LuceneDocument> docs = new ArrayList<>();
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setMinimumNumberShouldMatch(2);
BooleanQuery.Builder builder1 = new BooleanQuery.Builder();
builder1.add(new TermQuery(new Term("field", "value1")), Occur.FILTER);
builder.add(builder1.build(), Occur.SHOULD);
builder.add(new TermQuery(new Term("field", "value2")), Occur.MUST_NOT);
builder.add(IntPoint.newRangeQuery("int_field", 0, 5), Occur.SHOULD);
builder.add(IntPoint.newRangeQuery("int_field", 6, 10), Occur.SHOULD);
addQuery(builder.build(), docs);
indexWriter.addDocuments(docs);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);
IndexVersion v = IndexVersion.current();
List<BytesReference> sources = Collections.singletonList(new BytesArray("{}"));
Document document = new Document();
document.add(new IntPoint("int_field", 4));
document.add(new IntPoint("int_field", 7));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
assertEquals(1L, topDocs.totalHits.value());
assertEquals(0, topDocs.scoreDocs[0].doc);
}
private void duelRun(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher)
throws IOException {
boolean requireScore = randomBoolean();
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query percolateQuery = fieldType.percolateQuery(
"_name",
percolateQueryStore,
Collections.singletonList(new BytesArray("{}")),
percolateSearcher,
false,
IndexVersion.current()
);
Query query = requireScore ? percolateQuery : new ConstantScoreQuery(percolateQuery);
TopDocs topDocs = shardSearcher.search(query, 100);
Query controlQuery = new ControlQuery(memoryIndex, percolateQueryStore);
controlQuery = requireScore ? controlQuery : new ConstantScoreQuery(controlQuery);
TopDocs controlTopDocs = shardSearcher.search(controlQuery, 100);
try {
assertThat(topDocs.totalHits.value(), equalTo(controlTopDocs.totalHits.value()));
assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length));
for (int j = 0; j < topDocs.scoreDocs.length; j++) {
assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc));
assertThat(topDocs.scoreDocs[j].score, equalTo(controlTopDocs.scoreDocs[j].score));
if (requireScore) {
Explanation explain1 = shardSearcher.explain(query, topDocs.scoreDocs[j].doc);
Explanation explain2 = shardSearcher.explain(controlQuery, controlTopDocs.scoreDocs[j].doc);
assertThat(explain1.isMatch(), equalTo(explain2.isMatch()));
assertThat(explain1.getValue(), equalTo(explain2.getValue()));
}
}
} catch (AssertionError ae) {
logger.error("topDocs.totalHits={}", topDocs.totalHits);
logger.error("controlTopDocs.totalHits={}", controlTopDocs.totalHits);
logger.error("topDocs.scoreDocs.length={}", topDocs.scoreDocs.length);
logger.error("controlTopDocs.scoreDocs.length={}", controlTopDocs.scoreDocs.length);
for (int i = 0; i < topDocs.scoreDocs.length; i++) {
logger.error("topDocs.scoreDocs[{}].doc={}", i, topDocs.scoreDocs[i].doc);
logger.error("topDocs.scoreDocs[{}].score={}", i, topDocs.scoreDocs[i].score);
}
StoredFields storedFields = shardSearcher.storedFields();
for (int i = 0; i < controlTopDocs.scoreDocs.length; i++) {
logger.error("controlTopDocs.scoreDocs[{}].doc={}", i, controlTopDocs.scoreDocs[i].doc);
logger.error("controlTopDocs.scoreDocs[{}].score={}", i, controlTopDocs.scoreDocs[i].score);
// Additional stored information that is useful when debugging:
String queryToString = storedFields.document(controlTopDocs.scoreDocs[i].doc).get("query_to_string");
logger.error("controlTopDocs.scoreDocs[{}].query_to_string={}", i, queryToString);
TermsEnum tenum = MultiTerms.getTerms(shardSearcher.getIndexReader(), fieldType.queryTermsField.name()).iterator();
StringBuilder builder = new StringBuilder();
for (BytesRef term = tenum.next(); term != null; term = tenum.next()) {
PostingsEnum penum = tenum.postings(null);
if (penum.advance(controlTopDocs.scoreDocs[i].doc) == controlTopDocs.scoreDocs[i].doc) {
builder.append(term.utf8ToString()).append(',');
}
}
logger.error("controlTopDocs.scoreDocs[{}].query_terms_field={}", i, builder.toString());
NumericDocValues numericValues = MultiDocValues.getNumericValues(
shardSearcher.getIndexReader(),
fieldType.minimumShouldMatchField.name()
);
boolean exact = numericValues.advanceExact(controlTopDocs.scoreDocs[i].doc);
if (exact) {
logger.error("controlTopDocs.scoreDocs[{}].minimum_should_match_field={}", i, numericValues.longValue());
} else {
// Some queries do not have a msm field. (e.g. unsupported queries)
logger.error("controlTopDocs.scoreDocs[{}].minimum_should_match_field=[NO_VALUE]", i);
}
}
throw ae;
}
}
private void addQuery(Query query, List<LuceneDocument> docs) {
DocumentParserContext documentParserContext = new TestDocumentParserContext();
fieldMapper.processQuery(query, documentParserContext);
LuceneDocument queryDocument = documentParserContext.doc();
// Add to string representation of the query to make debugging easier:
queryDocument.add(new StoredField("query_to_string", query.toString()));
docs.add(queryDocument);
queries.add(query);
}
private TopDocs executeQuery(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher)
throws IOException {
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query percolateQuery = fieldType.percolateQuery(
"_name",
percolateQueryStore,
Collections.singletonList(new BytesArray("{}")),
percolateSearcher,
false,
IndexVersion.current()
);
return shardSearcher.search(percolateQuery, 10);
}
private static final | CandidateQueryTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/legacy/Container.java | {
"start": 306,
"end": 4206
} | class ____ {
private Simple simple;
private String name;
private One one;
private Many many;
private int count;
public void setSimple(Simple simple) {
this.simple = simple;
}
public Simple getSimple() {
return simple;
}
/**
* Returns the name.
* @return String
*/
public String getName() {
return name;
}
/**
* Sets the name.
* @param name The name to set
*/
public void setName(String name) {
this.name = name;
}
public String toString() {
return name + " = "
+ (simple==null ? "nil" : Integer.toString( simple.getCount() ) )
+ "/" + ( one==null ? "nil" : one.getKey().toString() )
+ "/" + ( many==null ? "nil" : many.getKey().toString() );
}
/**
* Returns the one.
* @return One
*/
public One getOne() {
return one;
}
/**
* Sets the one.
* @param one The one to set
*/
public void setOne(One one) {
this.one = one;
}
/**
* Returns the many.
* @return Many
*/
public Many getMany() {
return many;
}
/**
* Sets the many.
* @param many The many to set
*/
public void setMany(Many many) {
this.many = many;
}
/**
* Returns the count.
* @return int
*/
public int getCount() {
return count;
}
/**
* Sets the count.
* @param count The count to set
*/
public void setCount(int count) {
this.count = count;
}
public int hashCode() {
return count + name.hashCode();
}
public boolean equals(Object other) {
ContainerInnerClass cic = (ContainerInnerClass) other;
return cic.name.equals(name)
&& cic.count==count
&& cic.one.getKey().equals(one.getKey())
&& cic.many.getKey().equals(many.getKey())
&& cic.simple.getCount()==simple.getCount();
}
}
private List oneToMany;
private List manyToMany;
private List components;
private Set composites;
private Collection cascades;
private long id;
private Collection bag;
private Collection lazyBag = new ArrayList();
private Map ternaryMap;
private Set ternarySet;
/**
* Constructor for Container.
*/
public Container() {
super();
}
/**
* Returns the components.
* @return List
*/
public List getComponents() {
return components;
}
/**
* Returns the manyToMany.
* @return List
*/
public List getManyToMany() {
return manyToMany;
}
/**
* Returns the oneToMany.
* @return List
*/
public List getOneToMany() {
return oneToMany;
}
/**
* Sets the components.
* @param components The components to set
*/
public void setComponents(List components) {
this.components = components;
}
/**
* Sets the manyToMany.
* @param manyToMany The manyToMany to set
*/
public void setManyToMany(List manyToMany) {
this.manyToMany = manyToMany;
}
/**
* Sets the oneToMany.
* @param oneToMany The oneToMany to set
*/
public void setOneToMany(List oneToMany) {
this.oneToMany = oneToMany;
}
/**
* Returns the id.
* @return long
*/
public long getId() {
return id;
}
/**
* Sets the id.
* @param id The id to set
*/
public void setId(long id) {
this.id = id;
}
/**
* Gets the composites.
* @return Returns a Set
*/
public Set getComposites() {
return composites;
}
/**
* Sets the composites.
* @param composites The composites to set
*/
public void setComposites(Set composites) {
this.composites = composites;
}
/**
* Returns the bag.
* @return Collection
*/
public Collection getBag() {
return bag;
}
/**
* Sets the bag.
* @param bag The bag to set
*/
public void setBag(Collection bag) {
this.bag = bag;
}
/**
* Returns the ternary.
* @return Map
*/
public Map getTernaryMap() {
return ternaryMap;
}
/**
* Sets the ternary.
* @param ternary The ternary to set
*/
public void setTernaryMap(Map ternary) {
this.ternaryMap = ternary;
}
public static final | ContainerInnerClass |
java | elastic__elasticsearch | modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java | {
"start": 10350,
"end": 29041
} | class ____ extends MappedFieldType {
MappedFieldType queryTermsField;
MappedFieldType extractionResultField;
MappedFieldType queryBuilderField;
MappedFieldType minimumShouldMatchField;
RangeFieldMapper.RangeFieldType rangeField;
boolean mapUnmappedFieldsAsText;
private PercolatorFieldType(String name, Map<String, String> meta) {
super(name, IndexType.NONE, false, meta);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Query termQuery(Object value, SearchExecutionContext context) {
throw new QueryShardException(context, "Percolator fields are not searchable directly, use a percolate query instead");
}
@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
return SourceValueFetcher.identity(name(), context, format);
}
Query percolateQuery(
String name,
PercolateQuery.QueryStore queryStore,
List<BytesReference> documents,
IndexSearcher searcher,
boolean excludeNestedDocuments,
IndexVersion indexVersion
) throws IOException {
IndexReader indexReader = searcher.getIndexReader();
Tuple<BooleanQuery, Boolean> t = createCandidateQuery(indexReader);
Query candidateQuery = t.v1();
boolean canUseMinimumShouldMatchField = t.v2();
Query verifiedMatchesQuery;
// We can only skip the MemoryIndex verification when percolating a single non nested document. We cannot
// skip MemoryIndex verification when percolating multiple documents, because when terms and
// ranges are extracted from IndexReader backed by a RamDirectory holding multiple documents we do
// not know to which document the terms belong too and for certain queries we incorrectly emit candidate
// matches as actual match.
if (canUseMinimumShouldMatchField && indexReader.maxDoc() == 1) {
verifiedMatchesQuery = new TermQuery(new Term(extractionResultField.name(), EXTRACTION_COMPLETE));
} else {
verifiedMatchesQuery = new MatchNoDocsQuery("multiple or nested docs or CoveringQuery could not be used");
}
Query filter = null;
if (excludeNestedDocuments) {
filter = Queries.newNonNestedFilter(indexVersion);
}
return new PercolateQuery(name, queryStore, documents, candidateQuery, searcher, filter, verifiedMatchesQuery);
}
Tuple<BooleanQuery, Boolean> createCandidateQuery(IndexReader indexReader) throws IOException {
Tuple<List<BytesRef>, Map<String, List<byte[]>>> t = extractTermsAndRanges(indexReader);
List<BytesRef> extractedTerms = t.v1();
Map<String, List<byte[]>> encodedPointValuesByField = t.v2();
// `1 + ` is needed to take into account the EXTRACTION_FAILED should clause
boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= IndexSearcher
.getMaxClauseCount();
List<Query> subQueries = new ArrayList<>();
for (Map.Entry<String, List<byte[]>> entry : encodedPointValuesByField.entrySet()) {
String rangeFieldName = entry.getKey();
List<byte[]> encodedPointValues = entry.getValue();
byte[] min = encodedPointValues.get(0);
byte[] max = encodedPointValues.get(1);
Query query = BinaryRange.newIntersectsQuery(rangeField.name(), encodeRange(rangeFieldName, min, max));
subQueries.add(query);
}
BooleanQuery.Builder candidateQuery = new BooleanQuery.Builder();
if (canUseMinimumShouldMatchField) {
LongValuesSource valuesSource = LongValuesSource.fromIntField(minimumShouldMatchField.name());
for (BytesRef extractedTerm : extractedTerms) {
subQueries.add(new TermQuery(new Term(queryTermsField.name(), extractedTerm)));
}
candidateQuery.add(new CoveringQuery(subQueries, valuesSource), BooleanClause.Occur.SHOULD);
} else {
candidateQuery.add(new TermInSetQuery(queryTermsField.name(), extractedTerms), BooleanClause.Occur.SHOULD);
for (Query subQuery : subQueries) {
candidateQuery.add(subQuery, BooleanClause.Occur.SHOULD);
}
}
// include extractionResultField:failed, because docs with this term have no extractedTermsField
// and otherwise we would fail to return these docs. Docs that failed query term extraction
// always need to be verified by MemoryIndex:
candidateQuery.add(new TermQuery(new Term(extractionResultField.name(), EXTRACTION_FAILED)), BooleanClause.Occur.SHOULD);
return new Tuple<>(candidateQuery.build(), canUseMinimumShouldMatchField);
}
// This was extracted the method above, because otherwise it is difficult to test what terms are included in
// the query in case a CoveringQuery is used (it does not have a getter to retrieve the clauses)
static Tuple<List<BytesRef>, Map<String, List<byte[]>>> extractTermsAndRanges(IndexReader indexReader) throws IOException {
List<BytesRef> extractedTerms = new ArrayList<>();
Map<String, List<byte[]>> encodedPointValuesByField = new HashMap<>();
LeafReader reader = indexReader.leaves().get(0).reader();
for (FieldInfo info : reader.getFieldInfos()) {
Terms terms = reader.terms(info.name);
if (terms != null) {
BytesRef fieldBr = new BytesRef(info.name);
TermsEnum tenum = terms.iterator();
for (BytesRef term = tenum.next(); term != null; term = tenum.next()) {
BytesRefBuilder builder = new BytesRefBuilder();
builder.append(fieldBr);
builder.append(FIELD_VALUE_SEPARATOR);
builder.append(term);
extractedTerms.add(builder.toBytesRef());
}
}
if (info.getPointIndexDimensionCount() == 1) { // not != 0 because range fields are not supported
PointValues values = reader.getPointValues(info.name);
List<byte[]> encodedPointValues = new ArrayList<>();
encodedPointValues.add(values.getMinPackedValue().clone());
encodedPointValues.add(values.getMaxPackedValue().clone());
encodedPointValuesByField.put(info.name, encodedPointValues);
}
}
return new Tuple<>(extractedTerms, encodedPointValuesByField);
}
}
private final Supplier<SearchExecutionContext> searchExecutionContext;
private final KeywordFieldMapper queryTermsField;
private final KeywordFieldMapper extractionResultField;
private final BinaryFieldMapper queryBuilderField;
private final NumberFieldMapper minimumShouldMatchFieldMapper;
private final RangeFieldMapper rangeFieldMapper;
private final boolean mapUnmappedFieldsAsText;
private final IndexSettings indexSettings;
private final Supplier<TransportVersion> clusterTransportVersion;
PercolatorFieldMapper(
String simpleName,
MappedFieldType mappedFieldType,
BuilderParams builderParams,
Supplier<SearchExecutionContext> searchExecutionContext,
KeywordFieldMapper queryTermsField,
KeywordFieldMapper extractionResultField,
BinaryFieldMapper queryBuilderField,
RangeFieldMapper rangeFieldMapper,
NumberFieldMapper minimumShouldMatchFieldMapper,
boolean mapUnmappedFieldsAsText,
IndexSettings indexSettings,
Supplier<TransportVersion> clusterTransportVersion
) {
super(simpleName, mappedFieldType, builderParams);
this.searchExecutionContext = searchExecutionContext;
this.queryTermsField = queryTermsField;
this.extractionResultField = extractionResultField;
this.queryBuilderField = queryBuilderField;
this.minimumShouldMatchFieldMapper = minimumShouldMatchFieldMapper;
this.rangeFieldMapper = rangeFieldMapper;
this.mapUnmappedFieldsAsText = mapUnmappedFieldsAsText;
this.indexSettings = indexSettings;
this.clusterTransportVersion = clusterTransportVersion;
}
@Override
protected boolean supportsParsingObject() {
return true;
}
@Override
public void parse(DocumentParserContext context) throws IOException {
SearchExecutionContext executionContext = this.searchExecutionContext.get();
if (context.doc().getField(queryBuilderField.fullPath()) != null) {
// If a percolator query has been defined in an array object then multiple percolator queries
// could be provided. In order to prevent this we fail if we try to parse more than one query
// for the current document.
throw new IllegalArgumentException("a document can only contain one percolator query");
}
executionContext = configureContext(executionContext, isMapUnmappedFieldAsText());
QueryBuilder queryBuilder = parseQueryBuilder(context);
// Fetching of terms, shapes and indexed scripts happen during this rewrite:
PlainActionFuture<QueryBuilder> future = new PlainActionFuture<>();
Rewriteable.rewriteAndFetch(queryBuilder, executionContext, future);
queryBuilder = future.actionGet();
IndexVersion indexVersion = context.indexSettings().getIndexVersionCreated();
createQueryBuilderField(indexVersion, clusterTransportVersion.get(), queryBuilderField, queryBuilder, context);
QueryBuilder queryBuilderForProcessing = queryBuilder.rewrite(new SearchExecutionContext(executionContext));
Query query = queryBuilderForProcessing.toQuery(executionContext);
processQuery(query, context);
}
static QueryBuilder parseQueryBuilder(DocumentParserContext context) {
XContentParser parser = context.parser();
try {
// make sure that we don't expand dots in field names while parsing, otherwise queries will
// fail parsing due to unsupported inner objects
context.path().setWithinLeafObject(true);
return parseTopLevelQuery(parser, queryName -> {
if (queryName.equals("has_child")) {
throw new IllegalArgumentException("the [has_child] query is unsupported inside a percolator query");
} else if (queryName.equals("has_parent")) {
throw new IllegalArgumentException("the [has_parent] query is unsupported inside a percolator query");
} else if (queryName.equals(KnnVectorQueryBuilder.NAME)) {
throw new IllegalArgumentException("the [knn] query is unsupported inside a percolator query");
}
});
} catch (IOException e) {
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
} finally {
context.path().setWithinLeafObject(false);
}
}
static void createQueryBuilderField(
IndexVersion indexVersion,
TransportVersion clusterTransportVersion,
BinaryFieldMapper qbField,
QueryBuilder queryBuilder,
DocumentParserContext context
) throws IOException {
try (
ByteArrayOutputStream stream = new ByteArrayOutputStream();
OutputStreamStreamOutput out = new OutputStreamStreamOutput(stream)
) {
if (indexVersion.before(IndexVersions.V_8_8_0)) {
// just use the index version directly
// there's a direct mapping from IndexVersion to TransportVersion before 8.8.0
out.setTransportVersion(TransportVersion.fromId(indexVersion.id()));
} else {
// write the version id to the stream first
TransportVersion.writeVersion(clusterTransportVersion, out);
out.setTransportVersion(clusterTransportVersion);
}
out.writeNamedWriteable(queryBuilder);
qbField.indexValue(context, stream.toByteArray());
}
}
void processQuery(Query query, DocumentParserContext context) {
LuceneDocument doc = context.doc();
PercolatorFieldType pft = (PercolatorFieldType) this.fieldType();
QueryAnalyzer.Result result;
result = QueryAnalyzer.analyze(query);
if (result == QueryAnalyzer.Result.UNKNOWN) {
doc.add(new StringField(pft.extractionResultField.name(), EXTRACTION_FAILED, Field.Store.NO));
return;
}
for (QueryAnalyzer.QueryExtraction extraction : result.extractions) {
if (extraction.term != null) {
BytesRefBuilder builder = new BytesRefBuilder();
builder.append(new BytesRef(extraction.field()));
builder.append(FIELD_VALUE_SEPARATOR);
builder.append(extraction.bytes());
doc.add(new StringField(queryTermsField.fullPath(), builder.toBytesRef(), Field.Store.NO));
} else if (extraction.range != null) {
byte[] min = extraction.range.lowerPoint;
byte[] max = extraction.range.upperPoint;
doc.add(new BinaryRange(rangeFieldMapper.fullPath(), encodeRange(extraction.range.fieldName, min, max)));
}
}
if (result.matchAllDocs) {
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_FAILED, Field.Store.NO));
if (result.verified) {
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_COMPLETE, Field.Store.NO));
}
} else if (result.verified) {
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_COMPLETE, Field.Store.NO));
} else {
doc.add(new StringField(extractionResultField.fullPath(), EXTRACTION_PARTIAL, Field.Store.NO));
}
context.addToFieldNames(fieldType().name());
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.fullPath(), result.minimumShouldMatch));
}
static SearchExecutionContext configureContext(SearchExecutionContext context, boolean mapUnmappedFieldsAsString) {
SearchExecutionContext wrapped = wrapAllEmptyTextFields(context);
// This means that fields in the query need to exist in the mapping prior to registering this query
// The reason that this is required, is that if a field doesn't exist then the query assumes defaults, which may be undesired.
//
// Even worse when fields mentioned in percolator queries do go added to map after the queries have been registered
// then the percolator queries don't work as expected any more.
//
// Query parsing can't introduce new fields in mappings (which happens when registering a percolator query),
// because field type can't be inferred from queries (like document do) so the best option here is to disallow
// the usage of unmapped fields in percolator queries to avoid unexpected behaviour
//
// if index.percolator.map_unmapped_fields_as_string is set to true, query can contain unmapped fields which will be mapped
// as an analyzed string.
wrapped.setAllowUnmappedFields(false);
wrapped.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString);
// We need to rewrite queries with name to Lucene NamedQuery to find matched sub-queries of percolator query
wrapped.setRewriteToNamedQueries();
return wrapped;
}
@Override
public Iterator<Mapper> iterator() {
return Arrays.<Mapper>asList(
queryTermsField,
extractionResultField,
queryBuilderField,
minimumShouldMatchFieldMapper,
rangeFieldMapper
).iterator();
}
@Override
protected void parseCreateField(DocumentParserContext context) {
throw new UnsupportedOperationException("should not be invoked");
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
boolean isMapUnmappedFieldAsText() {
return mapUnmappedFieldsAsText;
}
static byte[] encodeRange(String rangeFieldName, byte[] minEncoded, byte[] maxEncoded) {
assert minEncoded.length == maxEncoded.length;
byte[] bytes = new byte[BinaryRange.BYTES * 2];
// First compute hash for field name and write the full hash into the byte array
BytesRef fieldAsBytesRef = new BytesRef(rangeFieldName);
MurmurHash3.Hash128 hash = new MurmurHash3.Hash128();
MurmurHash3.hash128(fieldAsBytesRef.bytes, fieldAsBytesRef.offset, fieldAsBytesRef.length, 0, hash);
ByteBuffer bb = ByteBuffer.wrap(bytes);
bb.putLong(hash.h1).putLong(hash.h2).putLong(hash.h1).putLong(hash.h2);
assert bb.position() == bb.limit();
// Secondly, overwrite the min and max encoded values in the byte array
// This way we are able to reuse as much as possible from the hash for any range type.
int offset = BinaryRange.BYTES - minEncoded.length;
System.arraycopy(minEncoded, 0, bytes, offset, minEncoded.length);
System.arraycopy(maxEncoded, 0, bytes, BinaryRange.BYTES + offset, maxEncoded.length);
return bytes;
}
// When expanding wildcard fields for term queries, we don't expand to fields that are empty.
// This is sane behavior for typical usage. But for percolator, the fields for the may not have any terms
// Consequently, we may erroneously skip expanding those term fields.
// This override allows mapped field values to expand via wildcard input, even if the field is empty in the shard.
static SearchExecutionContext wrapAllEmptyTextFields(SearchExecutionContext searchExecutionContext) {
return new FilteredSearchExecutionContext(searchExecutionContext) {
@Override
public boolean fieldExistsInIndex(String fieldname) {
return true;
}
};
}
}
| PercolatorFieldType |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/LastValueAggFunctionWithOrderTest.java | {
"start": 5448,
"end": 6752
} | class ____
extends LastValueAggFunctionWithOrderTestBase<Boolean> {
@Override
protected List<List<Boolean>> getInputValueSets() {
return Arrays.asList(
Arrays.asList(false, false, false),
Arrays.asList(true, true, true),
Arrays.asList(true, false, null, true, false, true, null),
Arrays.asList(null, null, null),
Arrays.asList(null, true));
}
@Override
protected List<List<Long>> getInputOrderSets() {
return Arrays.asList(
Arrays.asList(6L, 2L, 3L),
Arrays.asList(1L, 2L, 3L),
Arrays.asList(10L, 2L, 5L, 3L, 11L, 7L, 5L),
Arrays.asList(6L, 9L, 5L),
Arrays.asList(4L, 3L));
}
@Override
protected List<Boolean> getExpectedResults() {
return Arrays.asList(false, true, false, null, true);
}
@Override
protected AggregateFunction<Boolean, RowData> getAggregator() {
return new LastValueAggFunction<>(DataTypes.BOOLEAN().getLogicalType());
}
}
/** Test for {@link DecimalType}. */
@Nested
final | BooleanLastValueAggFunctionWithOrderTest |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/MetricName.java | {
"start": 917,
"end": 1066
} | class ____ a metric's name, logical group and its related attributes. It should be constructed using metrics.metricName(...).
* <p>
* This | encapsulates |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/context/LifecycleAutoConfiguration.java | {
"start": 1498,
"end": 2039
} | class ____ {
@Bean(name = AbstractApplicationContext.LIFECYCLE_PROCESSOR_BEAN_NAME)
@ConditionalOnMissingBean(name = AbstractApplicationContext.LIFECYCLE_PROCESSOR_BEAN_NAME,
search = SearchStrategy.CURRENT)
DefaultLifecycleProcessor defaultLifecycleProcessor(LifecycleProperties properties) {
DefaultLifecycleProcessor lifecycleProcessor = new DefaultLifecycleProcessor();
lifecycleProcessor.setTimeoutPerShutdownPhase(properties.getTimeoutPerShutdownPhase().toMillis());
return lifecycleProcessor;
}
}
| LifecycleAutoConfiguration |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/batch/EmbeddableWithNullValuesExceptForAssociationsTest.java | {
"start": 2924,
"end": 3566
} | class ____ {
private Integer anInt;
@OneToOne
private EntityB entityB;
public EmbeddableA() {
}
public EmbeddableA(Integer anInt, EntityB entityB) {
this.anInt = anInt;
this.entityB = entityB;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
EmbeddableA that = (EmbeddableA) o;
return anInt == that.anInt && Objects.equals( entityB, that.entityB );
}
@Override
public int hashCode() {
return Objects.hash( anInt, entityB );
}
}
@Entity(name = "EntityB")
public static | EmbeddableA |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/oscar/Oscar.java | {
"start": 128,
"end": 218
} | class ____ {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.oscar);
}
| Oscar |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/refcolnames/mixed/Town.java | {
"start": 511,
"end": 1160
} | class ____ {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "id", nullable = false)
Integer id;
String name;
@NaturalId
@Embedded
TownCode townCode;
@Column(name = "region_id", nullable = false)
int regionId;
@ManyToOne
@JoinColumn(name = "region_id", referencedColumnName = "id", nullable = false, insertable = false, updatable = false)
@JoinColumn(name = "country_code", referencedColumnName = "country_code", nullable = false, insertable = false, updatable = false)
@JoinColumn(name = "zip_code", referencedColumnName = "zip_code", nullable = false, insertable = false, updatable = false)
Region region;
}
| Town |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/datasource/lookup/DataSourceLookup.java | {
"start": 1070,
"end": 1428
} | interface ____ {
/**
* Retrieve the DataSource identified by the given name.
* @param dataSourceName the name of the DataSource
* @return the DataSource (never {@code null})
* @throws DataSourceLookupFailureException if the lookup failed
*/
DataSource getDataSource(String dataSourceName) throws DataSourceLookupFailureException;
}
| DataSourceLookup |
java | quarkusio__quarkus | extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/RolesResource.java | {
"start": 323,
"end": 591
} | class ____ {
@Inject
JsonWebToken jwt;
@GET
public String get() {
if ("bob".equals(jwt.getName())) {
return "tester";
}
throw new ForbiddenException("Only user 'bob' is allowed to request roles");
}
}
| RolesResource |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsInOnlyDisableTimeToLiveTest.java | {
"start": 1576,
"end": 5040
} | class ____ extends AbstractJMSTest {
private static final Logger LOG = LoggerFactory.getLogger(JmsInOnlyDisableTimeToLiveTest.class);
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new TransientCamelContextExtension();
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
private final String urlTimeout = "activemq:JmsInOnlyDisableTimeToLiveTest.in?timeToLive=2000";
private final String urlTimeToLiveDisabled
= "activemq:JmsInOnlyDisableTimeToLiveTest.in?timeToLive=2000&disableTimeToLive=true";
private CountDownLatch messageWasExpiredCountDownLatch = new CountDownLatch(2);
@Test
public void testInOnlyExpired() throws Exception {
MyCoolBean cool = new MyCoolBean(consumer, template, "JmsInOnlyDisableTimeToLiveTest");
getMockEndpoint("mock:result").expectedBodiesReceived("World 1");
// setup a message that will timeout to prove the ttl is getting set
// and that the disableTimeToLive is defaulting to false
template.sendBody("direct:timeout", "World 1");
MockEndpoint.assertIsSatisfied(context);
// wait after the msg has expired
messageWasExpiredCountDownLatch.await(2000, TimeUnit.MILLISECONDS);
MockEndpoint.resetMocks(context);
getMockEndpoint("mock:end").expectedMessageCount(0);
cool.someBusinessLogic();
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testInOnlyDisabledTimeToLive() throws Exception {
MyCoolBean cool = new MyCoolBean(consumer, template, "JmsInOnlyDisableTimeToLiveTest");
getMockEndpoint("mock:result").expectedBodiesReceived("World 2");
// send a message that sets the requestTimeout to 2 secs with a
// disableTimeToLive set to true, this should timeout
// but leave the message on the queue to be processed
// by the CoolBean
template.sendBody("direct:disable", "World 2");
MockEndpoint.assertIsSatisfied(context);
// wait after the msg has expired
messageWasExpiredCountDownLatch.await(2000, TimeUnit.MILLISECONDS);
MockEndpoint.resetMocks(context);
getMockEndpoint("mock:end").expectedBodiesReceived("Hello World 2");
cool.someBusinessLogic();
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected String getComponentName() {
return "activemq";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:timeout")
.to(urlTimeout)
.to("mock:result");
from("direct:disable")
.to(urlTimeToLiveDisabled)
.to("mock:result");
from("activemq:JmsInOnlyDisableTimeToLiveTest.out")
.to("mock:end");
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
| JmsInOnlyDisableTimeToLiveTest |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/AbstractMessageConverterMethodArgumentResolver.java | {
"start": 14083,
"end": 15405
} | class ____ implements HttpInputMessage {
private final HttpHeaders headers;
private final @Nullable InputStream body;
public EmptyBodyCheckingHttpInputMessage(HttpInputMessage inputMessage) throws IOException {
this.headers = inputMessage.getHeaders();
InputStream inputStream = inputMessage.getBody();
if (inputStream.markSupported()) {
inputStream.mark(1);
this.body = (inputStream.read() != -1 ? inputStream : null);
inputStream.reset();
}
else {
PushbackInputStream pushbackInputStream = new PushbackInputStream(inputStream);
int b = pushbackInputStream.read();
if (b == -1) {
this.body = null;
}
else {
this.body = pushbackInputStream;
pushbackInputStream.unread(b);
}
}
}
@Override
public HttpHeaders getHeaders() {
return this.headers;
}
@Override
public InputStream getBody() {
return (this.body != null ? this.body : InputStream.nullInputStream());
}
public boolean hasBody() {
return (this.body != null);
}
}
/**
* Placeholder HttpMessageConverter type to pass to RequestBodyAdvice if there
* is no content-type and no content. In that case, we may not find a converter,
* but RequestBodyAdvice have a chance to provide it via handleEmptyBody.
*/
private static | EmptyBodyCheckingHttpInputMessage |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PrivateConstructorForUtilityClassTest.java | {
"start": 2848,
"end": 3218
} | class ____ implements Serializable {
private static final long serialVersionUID = 123456789012L;
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void privateScopedClassesGetLeftAlone() {
testHelper
.addInputLines(
"in/Test.java",
"""
final | Bar |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/internals/Fetcher.java | {
"start": 1594,
"end": 2671
} | class ____ the fetching process with the brokers.
* <p>
* Thread-safety:
* Requests and responses of Fetcher may be processed by different threads since heartbeat
* thread may process responses. Other operations are single-threaded and invoked only from
* the thread polling the consumer.
* <ul>
* <li>If a response handler accesses any shared state of the Fetcher (e.g. FetchSessionHandler),
* all access to that state must be synchronized on the Fetcher instance.</li>
* <li>If a response handler accesses any shared state of the coordinator (e.g. SubscriptionState),
* it is assumed that all access to that state is synchronized on the coordinator instance by
* the caller.</li>
* <li>At most one request is pending for each node at any time. Nodes with pending requests are
* tracked and updated after processing the response. This ensures that any state (e.g. epoch)
* updated while processing responses on one thread are visible while creating the subsequent request
* on a different thread.</li>
* </ul>
*/
public | manages |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_ascii_0_31.java | {
"start": 194,
"end": 3563
} | class ____ extends TestCase {
public void test_0() throws Exception {
for (int i = 0; i < 32; ++i) {
StringBuilder buf = new StringBuilder();
char ch = (char) i;
buf.append(ch);
String text = JSON.toJSONString(buf.toString(), SerializerFeature.BrowserCompatible);
switch (ch) {
case '"':
Assert.assertEquals("\"\\\"\"", text);
break;
case '/':
Assert.assertEquals("\"\\/\"", text);
break;
case '\\':
Assert.assertEquals("\"\\\\\"", text);
break;
case '\b':
Assert.assertEquals("\"\\b\"", text);
break;
case '\f':
Assert.assertEquals("\"\\f\"", text);
break;
case '\n':
Assert.assertEquals("\"\\n\"", text);
break;
case '\r':
Assert.assertEquals("\"\\r\"", text);
break;
case '\t':
Assert.assertEquals("\"\\t\"", text);
break;
default:
if (i < 16) {
Assert.assertEquals("\"\\u000" + Integer.toHexString(i).toUpperCase() + "\"", text);
} else {
Assert.assertEquals("\"\\u00" + Integer.toHexString(i).toUpperCase() + "\"", text);
}
break;
}
VO vo = new VO();
vo.setContent(buf.toString());
String voText = JSON.toJSONString(vo, SerializerFeature.BrowserCompatible);
switch (ch) {
case '"':
Assert.assertEquals("{\"content\":\"\\\"\"}", voText);
break;
case '/':
Assert.assertEquals("{\"content\":\"\\/\"}", voText);
break;
case '\\':
Assert.assertEquals("{\"content\":\"\\\\\"}", voText);
break;
case '\b':
Assert.assertEquals("{\"content\":\"\\b\"}", voText);
break;
case '\f':
Assert.assertEquals("{\"content\":\"\\f\"}", voText);
break;
case '\n':
Assert.assertEquals("{\"content\":\"\\n\"}", voText);
break;
case '\r':
Assert.assertEquals("{\"content\":\"\\r\"}", voText);
break;
case '\t':
Assert.assertEquals("{\"content\":\"\\t\"}", voText);
break;
default:
if (i < 16) {
Assert.assertEquals("{\"content\":\"\\u000" + Integer.toHexString(i).toUpperCase() + "\"}",
voText);
} else {
Assert.assertEquals("{\"content\":\"\\u00" + Integer.toHexString(i).toUpperCase() + "\"}",
voText);
}
break;
}
}
}
public static | Bug_for_ascii_0_31 |
java | apache__camel | components/camel-telegram/src/main/java/org/apache/camel/component/telegram/model/EditMessageReplyMarkupMessage.java | {
"start": 2552,
"end": 3814
} | class ____ {
protected String chatId;
private Integer messageId;
private String inlineMessageId;
private InlineKeyboardMarkup replyMarkup;
public Builder() {
}
public Builder messageId(Integer messageId) {
this.messageId = messageId;
return this;
}
public Builder inlineMessageId(String inlineMessageId) {
this.inlineMessageId = inlineMessageId;
return this;
}
public Builder replyMarkup(InlineKeyboardMarkup replyMarkup) {
this.replyMarkup = replyMarkup;
return this;
}
public Builder chatId(String chatId) {
this.chatId = chatId;
return this;
}
public EditMessageReplyMarkupMessage build() {
return new EditMessageReplyMarkupMessage(chatId, messageId, inlineMessageId, replyMarkup);
}
}
@Override
public String toString() {
return "EditMessageReplyMarkupMessage{"
+ "messageId=" + messageId
+ ", inlineMessageId='" + inlineMessageId + '\''
+ ", replyMarkup=" + replyMarkup
+ ", chatId='" + chatId + '\''
+ '}';
}
}
| Builder |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java | {
"start": 8810,
"end": 9968
} | class ____ implements com.jcraft.jsch.Logger {
static final Logger LOG = LoggerFactory.getLogger(
SshFenceByTcpPort.class.getName() + ".jsch");
@Override
public boolean isEnabled(int level) {
switch (level) {
case com.jcraft.jsch.Logger.DEBUG:
return LOG.isDebugEnabled();
case com.jcraft.jsch.Logger.INFO:
return LOG.isInfoEnabled();
case com.jcraft.jsch.Logger.WARN:
return LOG.isWarnEnabled();
case com.jcraft.jsch.Logger.ERROR:
case com.jcraft.jsch.Logger.FATAL:
return LOG.isErrorEnabled();
default:
return false;
}
}
@Override
public void log(int level, String message) {
switch (level) {
case com.jcraft.jsch.Logger.DEBUG:
LOG.debug(message);
break;
case com.jcraft.jsch.Logger.INFO:
LOG.info(message);
break;
case com.jcraft.jsch.Logger.WARN:
LOG.warn(message);
break;
case com.jcraft.jsch.Logger.ERROR:
case com.jcraft.jsch.Logger.FATAL:
LOG.error(message);
break;
default:
break;
}
}
}
}
| LogAdapter |
java | apache__camel | components/camel-mail/src/test/java/org/apache/camel/component/mail/MailMaxMessagesPerPollTest.java | {
"start": 1360,
"end": 3551
} | class ____ extends CamelTestSupport {
private static final MailboxUser jones = Mailbox.getOrCreateUser("jones", "secret");
@Override
public void doPreSetup() throws Exception {
prepareMailbox();
}
@Test
public void testBatchConsumer() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.setResultWaitTime(2000);
mock.expectedMessageCount(3);
mock.message(0).body().isEqualTo("Message 0");
mock.message(1).body().isEqualTo("Message 1");
mock.message(2).body().isEqualTo("Message 2");
mock.expectedPropertyReceived(Exchange.BATCH_SIZE, 3);
MockEndpoint.assertIsSatisfied(context);
mock.reset();
mock.expectedMessageCount(2);
mock.expectedPropertyReceived(Exchange.BATCH_SIZE, 2);
mock.message(0).body().isEqualTo("Message 3");
mock.message(1).body().isEqualTo("Message 4");
MockEndpoint.assertIsSatisfied(context);
}
private void prepareMailbox() throws Exception {
// connect to mailbox
Mailbox.clearAll();
JavaMailSender sender = new DefaultJavaMailSender();
Store store = sender.getSession().getStore("imap");
store.connect("localhost", Mailbox.getPort(Protocol.imap), jones.getLogin(), jones.getPassword());
Folder folder = store.getFolder("INBOX");
folder.open(Folder.READ_WRITE);
folder.expunge();
// inserts 5 new messages
Message[] messages = new Message[5];
for (int i = 0; i < 5; i++) {
messages[i] = new MimeMessage(sender.getSession());
messages[i].setHeader("Message-ID", Integer.toString(i));
messages[i].setText("Message " + i);
}
folder.appendMessages(messages);
folder.close(true);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(jones.uriPrefix(Protocol.imap) + "&initialDelay=100&delay=100&maxMessagesPerPoll=3"
+ "&delete=true").to("mock:result");
}
};
}
}
| MailMaxMessagesPerPollTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UseCorrectAssertInTestsTest.java | {
"start": 3959,
"end": 4272
} | class ____ {",
" void foo() {",
" assertThat(true).isTrue();",
" }",
"}")
.setArgs(TEST_ONLY)
.doTest();
}
@Test
public void assertInNonTestCode() {
refactoringHelper
.addInputLines(
INPUT,
"public | FooTest |
java | apache__flink | flink-core/src/test/java/org/apache/flink/util/MethodForwardingTestUtil.java | {
"start": 3663,
"end": 8073
} | class ____ the delegate.
* @param wrapperFactory factory that produces a wrapper from a delegate.
* @param delegateObjectSupplier supplier for the delegate object passed to the wrapper factory.
* @param skipMethodSet set of methods to ignore.
* @param <D> type of the delegate
* @param <W> type of the wrapper
* @param <I> type of the object created as delegate, is a subtype of D.
*/
public static <D, W, I extends D> void testMethodForwarding(
Class<D> delegateClass,
Function<I, W> wrapperFactory,
Supplier<I> delegateObjectSupplier,
Set<Method> skipMethodSet)
throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Preconditions.checkNotNull(delegateClass);
Preconditions.checkNotNull(wrapperFactory);
Preconditions.checkNotNull(skipMethodSet);
I delegate = delegateObjectSupplier.get();
// check if we need to wrap the delegate object as a spy, or if it is already testable with
// Mockito.
if (!MockUtil.isSpy(delegate) || !MockUtil.isMock(delegate)) {
delegate = spy(delegate);
}
W wrapper = wrapperFactory.apply(delegate);
// ensure that wrapper is a subtype of delegate
Preconditions.checkArgument(delegateClass.isAssignableFrom(wrapper.getClass()));
for (Method delegateMethod : delegateClass.getMethods()) {
if (checkSkipMethodForwardCheck(delegateMethod, skipMethodSet)) {
continue;
}
// find the correct method to substitute the bridge for erased generic types.
// if this doesn't work, the user need to exclude the method and write an additional
// test.
Method wrapperMethod =
wrapper.getClass()
.getMethod(
delegateMethod.getName(), delegateMethod.getParameterTypes());
// things get a bit fuzzy here, best effort to find a match but this might end up with a
// wrong method.
if (wrapperMethod.isBridge()) {
for (Method method : wrapper.getClass().getMethods()) {
if (!method.isBridge()
&& method.getName().equals(wrapperMethod.getName())
&& method.getParameterCount() == wrapperMethod.getParameterCount()) {
wrapperMethod = method;
break;
}
}
}
Class<?>[] parameterTypes = wrapperMethod.getParameterTypes();
Object[] arguments = new Object[parameterTypes.length];
for (int j = 0; j < arguments.length; j++) {
Class<?> parameterType = parameterTypes[j];
if (parameterType.isArray()) {
arguments[j] = Array.newInstance(parameterType.getComponentType(), 0);
} else if (parameterType.isPrimitive()) {
if (boolean.class.equals(parameterType)) {
arguments[j] = false;
} else if (char.class.equals(parameterType)) {
arguments[j] = 'a';
} else {
arguments[j] = (byte) 0;
}
} else {
arguments[j] = Mockito.mock(parameterType);
}
}
wrapperMethod.invoke(wrapper, arguments);
delegateMethod.invoke(Mockito.verify(delegate, Mockito.times(1)), arguments);
reset(delegate);
}
}
/**
* Test if this method should be skipped in our check for proper forwarding, e.g. because it is
* just a bridge.
*/
private static boolean checkSkipMethodForwardCheck(
Method delegateMethod, Set<Method> skipMethods) {
if (delegateMethod.isBridge()
|| delegateMethod.isDefault()
|| skipMethods.contains(delegateMethod)) {
return true;
}
// skip methods declared in Object (Mockito doesn't like them)
try {
Object.class.getMethod(delegateMethod.getName(), delegateMethod.getParameterTypes());
return true;
} catch (Exception ignore) {
}
return false;
}
}
| for |
java | eclipse-vertx__vert.x | vertx-core/src/main/generated/io/vertx/core/file/FileSystemOptionsConverter.java | {
"start": 338,
"end": 1578
} | class ____ {
static void fromJson(Iterable<java.util.Map.Entry<String, Object>> json, FileSystemOptions obj) {
for (java.util.Map.Entry<String, Object> member : json) {
switch (member.getKey()) {
case "classPathResolvingEnabled":
if (member.getValue() instanceof Boolean) {
obj.setClassPathResolvingEnabled((Boolean)member.getValue());
}
break;
case "fileCachingEnabled":
if (member.getValue() instanceof Boolean) {
obj.setFileCachingEnabled((Boolean)member.getValue());
}
break;
case "fileCacheDir":
if (member.getValue() instanceof String) {
obj.setFileCacheDir((String)member.getValue());
}
break;
}
}
}
static void toJson(FileSystemOptions obj, JsonObject json) {
toJson(obj, json.getMap());
}
static void toJson(FileSystemOptions obj, java.util.Map<String, Object> json) {
json.put("classPathResolvingEnabled", obj.isClassPathResolvingEnabled());
json.put("fileCachingEnabled", obj.isFileCachingEnabled());
if (obj.getFileCacheDir() != null) {
json.put("fileCacheDir", obj.getFileCacheDir());
}
}
}
| FileSystemOptionsConverter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/sql/exec/manytoone/ManyToOneTest.java | {
"start": 1302,
"end": 11015
} | class ____ {
@Test
public void testHqlSelectWithoutJoin(SessionFactoryScope scope) {
StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
session -> {
OtherEntity otherEntity = session.
createQuery( "from OtherEntity", OtherEntity.class )
.uniqueResult();
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
assertThat( otherEntity.getName(), is( "Bar" ) );
SimpleEntity simpleEntity = otherEntity.getSimpleEntity();
assertFalse( Hibernate.isInitialized( simpleEntity ) );
AnotherSimpleEntity anotherSimpleEntity = otherEntity.getAnotherSimpleEntity();
// the ManyToOne is eager but the value is null so a second query is not executed
assertTrue( Hibernate.isInitialized( anotherSimpleEntity ) );
assertThat( simpleEntity.getName(), is( "Fab" ) );
assertThat( statistics.getPrepareStatementCount(), is( 2L ) );
assertTrue( Hibernate.isInitialized( simpleEntity ) );
}
);
scope.inTransaction(
session -> {
OtherEntity otherEntity = session.
createQuery( "from OtherEntity", OtherEntity.class )
.uniqueResult();
AnotherSimpleEntity anotherSimpleEntity = new AnotherSimpleEntity();
anotherSimpleEntity.setId( 3 );
anotherSimpleEntity.setName( "other" );
session.persist( anotherSimpleEntity );
otherEntity.setAnotherSimpleEntity( anotherSimpleEntity );
}
);
statistics.clear();
scope.inTransaction(
session -> {
OtherEntity otherEntity = session.
createQuery( "from OtherEntity", OtherEntity.class )
.uniqueResult();
// the ManyToOne is eager but the value is not null so a second query is executed
assertThat( statistics.getPrepareStatementCount(), is( 2L ) );
assertThat( otherEntity.getName(), is( "Bar" ) );
SimpleEntity simpleEntity = otherEntity.getSimpleEntity();
assertFalse( Hibernate.isInitialized( simpleEntity ) );
AnotherSimpleEntity anotherSimpleEntity = otherEntity.getAnotherSimpleEntity();
assertTrue( Hibernate.isInitialized( anotherSimpleEntity ) );
assertThat( anotherSimpleEntity.getName(), is( "other" ) );
assertThat( statistics.getPrepareStatementCount(), is( 2L ) );
}
);
}
@Test
public void testHQLSelectWithJoin(SessionFactoryScope scope) {
StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
session -> {
OtherEntity otherEntity = session.
createQuery( "from OtherEntity o join o.simpleEntity", OtherEntity.class )
.uniqueResult();
// the eager association is null
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
assertThat( otherEntity.getName(), is( "Bar" ) );
SimpleEntity simpleEntity = otherEntity.getSimpleEntity();
assertFalse( Hibernate.isInitialized( simpleEntity ) );
assertThat( simpleEntity, notNullValue() );
assertThat( simpleEntity.getName(), is( "Fab" ) );
assertThat( statistics.getPrepareStatementCount(), is( 2L ) );
AnotherSimpleEntity anotherSimpleEntity = otherEntity.getAnotherSimpleEntity();
assertTrue( Hibernate.isInitialized( anotherSimpleEntity ) );
assertThat( statistics.getPrepareStatementCount(), is( 2L ) );
}
);
scope.inTransaction(
session -> {
OtherEntity otherEntity = session.
createQuery( "from OtherEntity", OtherEntity.class )
.uniqueResult();
AnotherSimpleEntity anotherSimpleEntity = new AnotherSimpleEntity();
anotherSimpleEntity.setId( 3 );
anotherSimpleEntity.setName( "other" );
session.persist( anotherSimpleEntity );
otherEntity.setAnotherSimpleEntity( anotherSimpleEntity );
}
);
statistics.clear();
scope.inTransaction(
session -> {
OtherEntity otherEntity = session.
createQuery( "from OtherEntity o join o.simpleEntity", OtherEntity.class )
.uniqueResult();
// the eager association is not null so a second select is executed
assertThat( statistics.getPrepareStatementCount(), is( 2L ) );
assertThat( otherEntity.getName(), is( "Bar" ) );
SimpleEntity simpleEntity = otherEntity.getSimpleEntity();
assertFalse( Hibernate.isInitialized( simpleEntity ) );
assertThat( simpleEntity, notNullValue() );
assertThat( simpleEntity.getName(), is( "Fab" ) );
assertThat( statistics.getPrepareStatementCount(), is( 3L ) );
AnotherSimpleEntity anotherSimpleEntity = otherEntity.getAnotherSimpleEntity();
assertTrue( Hibernate.isInitialized( anotherSimpleEntity ) );
assertThat( anotherSimpleEntity.getName(), is( "other" ) );
assertThat( statistics.getPrepareStatementCount(), is( 3L ) );
}
);
}
@Test
public void testHQLSelectWithFetchJoin(SessionFactoryScope scope) {
StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
session -> {
OtherEntity otherEntity = session.
createQuery( "from OtherEntity o join fetch o.simpleEntity", OtherEntity.class )
.uniqueResult();
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
assertThat( otherEntity.getName(), is( "Bar" ) );
SimpleEntity simpleEntity = otherEntity.getSimpleEntity();
assertTrue( Hibernate.isInitialized( simpleEntity ) );
assertThat( simpleEntity, notNullValue() );
assertThat( simpleEntity.getName(), is( "Fab" ) );
AnotherSimpleEntity anotherSimpleEntity = otherEntity.getAnotherSimpleEntity();
assertTrue( Hibernate.isInitialized( anotherSimpleEntity ) );
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
}
);
}
@Test
public void testSelectWithBothFetchJoin(SessionFactoryScope scope) {
StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
session -> {
OtherEntity otherEntity = session.
createQuery(
"from OtherEntity o join fetch o.simpleEntity left join fetch o.anotherSimpleEntity",
OtherEntity.class
)
.uniqueResult();
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
assertThat( otherEntity.getName(), is( "Bar" ) );
SimpleEntity simpleEntity = otherEntity.getSimpleEntity();
assertTrue( Hibernate.isInitialized( simpleEntity ) );
assertThat( simpleEntity, notNullValue() );
assertThat( simpleEntity.getName(), is( "Fab" ) );
assertTrue( Hibernate.isInitialized( otherEntity.getAnotherSimpleEntity() ) );
assertThat( otherEntity.getAnotherSimpleEntity(), nullValue() );
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
}
);
scope.inTransaction(
session -> {
OtherEntity otherEntity = session.
createQuery( "from OtherEntity", OtherEntity.class )
.uniqueResult();
AnotherSimpleEntity anotherSimpleEntity = new AnotherSimpleEntity();
anotherSimpleEntity.setId( 3 );
anotherSimpleEntity.setName( "other" );
session.persist( anotherSimpleEntity );
otherEntity.setAnotherSimpleEntity( anotherSimpleEntity );
}
);
statistics.clear();
scope.inTransaction(
session -> {
OtherEntity otherEntity = session.
createQuery(
"from OtherEntity o join fetch o.simpleEntity left join fetch o.anotherSimpleEntity",
OtherEntity.class
)
.uniqueResult();
// the ManyToOne is eager but the value is not null so a second query is executed
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
assertThat( otherEntity.getName(), is( "Bar" ) );
SimpleEntity simpleEntity = otherEntity.getSimpleEntity();
assertTrue( Hibernate.isInitialized( simpleEntity ) );
AnotherSimpleEntity anotherSimpleEntity = otherEntity.getAnotherSimpleEntity();
assertTrue( Hibernate.isInitialized( anotherSimpleEntity ) );
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
}
);
}
@Test
public void testGet(SessionFactoryScope scope) {
StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
session -> {
OtherEntity otherEntity = session.get( OtherEntity.class, 2 );
assertThat( otherEntity.getName(), is( "Bar" ) );
assertFalse( Hibernate.isInitialized( otherEntity.getSimpleEntity() ) );
assertTrue( Hibernate.isInitialized( otherEntity.getAnotherSimpleEntity() ) );
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
}
);
}
@Test
public void testDelete(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.remove( session.get( OtherEntity.class, 2 ) );
}
);
scope.inTransaction(
session -> {
assertThat( session.get( OtherEntity.class, 2 ), nullValue() );
assertThat( session.get( SimpleEntity.class, 1 ), notNullValue() );
}
);
}
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
SimpleEntity simpleEntity = new SimpleEntity();
simpleEntity.setId( 1 );
simpleEntity.setName( "Fab" );
session.persist( simpleEntity );
OtherEntity otherEntity = new OtherEntity();
otherEntity.setId( 2 );
otherEntity.setName( "Bar" );
otherEntity.setSimpleEntity( simpleEntity );
session.persist( otherEntity );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Entity(name = "OtherEntity")
@Table(name = "other_entity")
public static | ManyToOneTest |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/mock/http/client/MockClientHttpResponse.java | {
"start": 1119,
"end": 2983
} | class ____ extends MockHttpInputMessage implements ClientHttpResponse {
private final HttpStatusCode statusCode;
/**
* Create a {@code MockClientHttpResponse} with an empty response body and
* HTTP status code {@link HttpStatus#OK OK}.
* @since 6.0.3
*/
public MockClientHttpResponse() {
this(new byte[0], HttpStatus.OK);
}
/**
* Create a {@code MockClientHttpResponse} with response body as a byte array
* and the supplied HTTP status code.
*/
public MockClientHttpResponse(byte[] body, HttpStatusCode statusCode) {
super(body);
Assert.notNull(statusCode, "HttpStatusCode must not be null");
this.statusCode = statusCode;
}
/**
* Create a {@code MockClientHttpResponse} with response body as a byte array
* and a custom HTTP status code.
* @since 5.3.17
*/
public MockClientHttpResponse(byte[] body, int statusCode) {
this(body, HttpStatusCode.valueOf(statusCode));
}
/**
* Create a {@code MockClientHttpResponse} with response body as {@link InputStream}
* and the supplied HTTP status code.
*/
public MockClientHttpResponse(InputStream body, HttpStatusCode statusCode) {
super(body);
Assert.notNull(statusCode, "HttpStatusCode must not be null");
this.statusCode = statusCode;
}
/**
* Create a {@code MockClientHttpResponse} with response body as {@link InputStream}
* and a custom HTTP status code.
* @since 5.3.17
*/
public MockClientHttpResponse(InputStream body, int statusCode) {
this(body, HttpStatusCode.valueOf(statusCode));
}
@Override
public HttpStatusCode getStatusCode() {
return this.statusCode;
}
@Override
public String getStatusText() {
return (this.statusCode instanceof HttpStatus status ? status.getReasonPhrase() : "");
}
@Override
public void close() {
try {
getBody().close();
}
catch (IOException ignored) {
}
}
}
| MockClientHttpResponse |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/AnyKeyJavaClass.java | {
"start": 905,
"end": 983
} | interface ____ {
/**
* The Java Class
*/
Class<?> value();
}
| AnyKeyJavaClass |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/inheritance/SingleTableInheritanceLazyAssociationTest.java | {
"start": 5200,
"end": 5558
} | class ____ {
@Id
private final String userId;
@Version
private int version;
protected User() {
this.userId = null;
}
public User(String id) {
this.userId = id;
}
public String getId() {
return this.userId;
}
public abstract Address getAddress();
}
@Entity(name = "UserA")
@DiscriminatorValue("USER_A")
public static | User |
java | netty__netty | buffer/src/test/java/io/netty/buffer/ByteBufAllocatorTest.java | {
"start": 776,
"end": 7233
} | class ____ {
protected abstract int defaultMaxCapacity();
protected abstract int defaultMaxComponents();
protected abstract ByteBufAllocator newAllocator(boolean preferDirect);
@Test
public void testBuffer() {
testBuffer(true);
testBuffer(false);
}
private void testBuffer(boolean preferDirect) {
ByteBufAllocator allocator = newAllocator(preferDirect);
ByteBuf buffer = allocator.buffer(1);
try {
assertBuffer(buffer, isDirectExpected(preferDirect), 1, defaultMaxCapacity());
} finally {
buffer.release();
}
}
@Test
public void testBufferWithCapacity() {
testBufferWithCapacity(true, 8);
testBufferWithCapacity(false, 8);
}
private void testBufferWithCapacity(boolean preferDirect, int maxCapacity) {
ByteBufAllocator allocator = newAllocator(preferDirect);
ByteBuf buffer = allocator.buffer(1, maxCapacity);
try {
assertBuffer(buffer, isDirectExpected(preferDirect), 1, maxCapacity);
} finally {
buffer.release();
}
}
protected abstract boolean isDirectExpected(boolean preferDirect);
@Test
public void testHeapBuffer() {
testHeapBuffer(true);
testHeapBuffer(false);
}
private void testHeapBuffer(boolean preferDirect) {
ByteBufAllocator allocator = newAllocator(preferDirect);
ByteBuf buffer = allocator.heapBuffer(1);
try {
assertBuffer(buffer, false, 1, defaultMaxCapacity());
} finally {
buffer.release();
}
}
@Test
public void testHeapBufferMaxCapacity() {
testHeapBuffer(true, 8);
testHeapBuffer(false, 8);
}
private void testHeapBuffer(boolean preferDirect, int maxCapacity) {
ByteBufAllocator allocator = newAllocator(preferDirect);
ByteBuf buffer = allocator.heapBuffer(1, maxCapacity);
try {
assertBuffer(buffer, false, 1, maxCapacity);
} finally {
buffer.release();
}
}
@Test
public void testDirectBuffer() {
testDirectBuffer(true);
testDirectBuffer(false);
}
private void testDirectBuffer(boolean preferDirect) {
ByteBufAllocator allocator = newAllocator(preferDirect);
ByteBuf buffer = allocator.directBuffer(1);
try {
assertBuffer(buffer, true, 1, defaultMaxCapacity());
} finally {
buffer.release();
}
}
@Test
public void testDirectBufferMaxCapacity() {
testDirectBuffer(true, 8);
testDirectBuffer(false, 8);
}
private void testDirectBuffer(boolean preferDirect, int maxCapacity) {
ByteBufAllocator allocator = newAllocator(preferDirect);
ByteBuf buffer = allocator.directBuffer(1, maxCapacity);
try {
assertBuffer(buffer, true, 1, maxCapacity);
} finally {
buffer.release();
}
}
@Test
public void testCompositeBuffer() {
testCompositeBuffer(true);
testCompositeBuffer(false);
}
private void testCompositeBuffer(boolean preferDirect) {
ByteBufAllocator allocator = newAllocator(preferDirect);
CompositeByteBuf buffer = allocator.compositeBuffer();
try {
assertCompositeByteBuf(buffer, defaultMaxComponents());
} finally {
buffer.release();
}
}
@Test
public void testCompositeBufferWithCapacity() {
testCompositeHeapBufferWithCapacity(true, 8);
testCompositeHeapBufferWithCapacity(false, 8);
}
@Test
public void testCompositeHeapBuffer() {
testCompositeHeapBuffer(true);
testCompositeHeapBuffer(false);
}
private void testCompositeHeapBuffer(boolean preferDirect) {
ByteBufAllocator allocator = newAllocator(preferDirect);
CompositeByteBuf buffer = allocator.compositeHeapBuffer();
try {
assertCompositeByteBuf(buffer, defaultMaxComponents());
} finally {
buffer.release();
}
}
@Test
public void testCompositeHeapBufferWithCapacity() {
testCompositeHeapBufferWithCapacity(true, 8);
testCompositeHeapBufferWithCapacity(false, 8);
}
private void testCompositeHeapBufferWithCapacity(boolean preferDirect, int maxNumComponents) {
ByteBufAllocator allocator = newAllocator(preferDirect);
CompositeByteBuf buffer = allocator.compositeHeapBuffer(maxNumComponents);
try {
assertCompositeByteBuf(buffer, maxNumComponents);
} finally {
buffer.release();
}
}
@Test
public void testCompositeDirectBuffer() {
testCompositeDirectBuffer(true);
testCompositeDirectBuffer(false);
}
private void testCompositeDirectBuffer(boolean preferDirect) {
ByteBufAllocator allocator = newAllocator(preferDirect);
CompositeByteBuf buffer = allocator.compositeDirectBuffer();
try {
assertCompositeByteBuf(buffer, defaultMaxComponents());
} finally {
buffer.release();
}
}
@Test
public void testCompositeDirectBufferWithCapacity() {
testCompositeDirectBufferWithCapacity(true, 8);
testCompositeDirectBufferWithCapacity(false, 8);
}
private void testCompositeDirectBufferWithCapacity(boolean preferDirect, int maxNumComponents) {
ByteBufAllocator allocator = newAllocator(preferDirect);
CompositeByteBuf buffer = allocator.compositeDirectBuffer(maxNumComponents);
try {
assertCompositeByteBuf(buffer, maxNumComponents);
} finally {
buffer.release();
}
}
private static void assertBuffer(
ByteBuf buffer, boolean expectedDirect, int expectedCapacity, int expectedMaxCapacity) {
assertEquals(expectedDirect, buffer.isDirect());
assertEquals(expectedCapacity, buffer.capacity());
assertEquals(expectedMaxCapacity, buffer.maxCapacity());
}
private void assertCompositeByteBuf(
CompositeByteBuf buffer, int expectedMaxNumComponents) {
assertEquals(0, buffer.numComponents());
assertEquals(expectedMaxNumComponents, buffer.maxNumComponents());
assertBuffer(buffer, false, 0, defaultMaxCapacity());
}
}
| ByteBufAllocatorTest |
java | micronaut-projects__micronaut-core | http-server-netty/src/test/groovy/io/micronaut/docs/http/server/exception/OutOfStockException.java | {
"start": 674,
"end": 743
} | class ____ extends RuntimeException {
}
//end::clazz[] | OutOfStockException |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/function/ConcatPipeFunction.java | {
"start": 1193,
"end": 2683
} | class ____ extends AbstractSqmSelfRenderingFunctionDescriptor {
private final PatternRenderer clobPatternRenderer;
public ConcatPipeFunction(String clobPattern, TypeConfiguration typeConfiguration) {
super(
"concat",
StandardArgumentsValidators.min( 1 ),
StandardFunctionReturnTypeResolvers.invariant(
typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.STRING )
),
StandardFunctionArgumentTypeResolvers.impliedOrInvariant( typeConfiguration, STRING )
);
this.clobPatternRenderer = new PatternRenderer( clobPattern );
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
String separator = "(";
for ( int i = 0; i < sqlAstArguments.size(); i++ ) {
final Expression expression = (Expression) sqlAstArguments.get( i );
final JdbcType jdbcType = expression.getExpressionType().getSingleJdbcMapping().getJdbcType();
sqlAppender.appendSql( separator );
switch ( jdbcType.getDdlTypeCode() ) {
case SqlTypes.CLOB:
case SqlTypes.NCLOB:
clobPatternRenderer.render( sqlAppender, Collections.singletonList( expression ), walker );
break;
default:
expression.accept( walker );
break;
}
separator = "||";
}
sqlAppender.appendSql( ')' );
}
@Override
public String getSignature(String name) {
return "(STRING string0[, STRING string1[, ...]])";
}
}
| ConcatPipeFunction |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/SpannerDialectTableExporter.java | {
"start": 830,
"end": 3651
} | class ____ implements Exporter<Table> {
private final SpannerDialect spannerDialect;
private final String createTableTemplate;
/**
* Constructor.
*
* @param spannerDialect a Cloud Spanner dialect.
*/
public SpannerDialectTableExporter(SpannerDialect spannerDialect) {
this.spannerDialect = spannerDialect;
this.createTableTemplate =
this.spannerDialect.getCreateTableString() + " {0} ({1}) PRIMARY KEY ({2})";
}
@Override
public String[] getSqlCreateStrings(Table table, Metadata metadata, SqlStringGenerationContext context) {
Collection<Column> keyColumns;
if ( table.hasPrimaryKey() ) {
// a typical table that corresponds to an entity type
keyColumns = table.getPrimaryKey().getColumns();
}
else if ( !table.getForeignKeyCollection().isEmpty() ) {
// a table with no PK's but has FK's; often corresponds to element collection properties
keyColumns = table.getColumns();
}
else {
// the case corresponding to a sequence-table that will only have 1 row.
keyColumns = Collections.emptyList();
}
return getTableString( table, metadata, keyColumns, context );
}
private String[] getTableString(Table table, Metadata metadata, Iterable<Column> keyColumns, SqlStringGenerationContext context) {
String primaryKeyColNames = StreamSupport.stream( keyColumns.spliterator(), false )
.map( Column::getName )
.collect( Collectors.joining( "," ) );
StringJoiner colsAndTypes = new StringJoiner( "," );
for ( Column column : table.getColumns() ) {
final String sqlType = column.getSqlType( metadata );
final String columnDeclaration =
column.getName()
+ " " + sqlType
+ ( column.isNullable() ? this.spannerDialect.getNullColumnString( sqlType ) : " not null" );
colsAndTypes.add( columnDeclaration );
}
ArrayList<String> statements = new ArrayList<>();
statements.add(
MessageFormat.format(
this.createTableTemplate,
context.format( table.getQualifiedTableName() ),
colsAndTypes.toString(),
primaryKeyColNames
)
);
return statements.toArray(EMPTY_STRING_ARRAY);
}
@Override
public String[] getSqlDropStrings(Table table, Metadata metadata, SqlStringGenerationContext context) {
/* Cloud Spanner requires examining the metadata to find all indexes and interleaved tables.
* These must be dropped before the given table can be dropped.
* The current implementation does not support interleaved tables.
*/
ArrayList<String> dropStrings = new ArrayList<>();
for ( Index index : table.getIndexes().values() ) {
dropStrings.add( "drop index " + index.getName() );
}
dropStrings.add( this.spannerDialect.getDropTableString( context.format( table.getQualifiedTableName() ) ) );
return dropStrings.toArray( new String[0] );
}
}
| SpannerDialectTableExporter |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/expressions/parser/exception/ExpressionParsingException.java | {
"start": 837,
"end": 985
} | class ____ extends RuntimeException {
public ExpressionParsingException(String message) {
super(message);
}
}
| ExpressionParsingException |
java | google__guava | android/guava/src/com/google/common/math/DoubleMath.java | {
"start": 1976,
"end": 19724
} | class ____ {
/*
* This method returns a value y such that rounding y DOWN (towards zero) gives the same result as
* rounding x according to the specified mode.
*/
@GwtIncompatible // #isMathematicalInteger, com.google.common.math.DoubleUtils
static double roundIntermediate(double x, RoundingMode mode) {
if (!isFinite(x)) {
throw new ArithmeticException("input is infinite or NaN");
}
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(isMathematicalInteger(x));
return x;
case FLOOR:
if (x >= 0.0 || isMathematicalInteger(x)) {
return x;
} else {
return (long) x - 1;
}
case CEILING:
if (x <= 0.0 || isMathematicalInteger(x)) {
return x;
} else {
return (long) x + 1;
}
case DOWN:
return x;
case UP:
if (isMathematicalInteger(x)) {
return x;
} else {
return (long) x + (x > 0 ? 1 : -1);
}
case HALF_EVEN:
return rint(x);
case HALF_UP:
{
double z = rint(x);
if (abs(x - z) == 0.5) {
return x + copySign(0.5, x);
} else {
return z;
}
}
case HALF_DOWN:
{
double z = rint(x);
if (abs(x - z) == 0.5) {
return x;
} else {
return z;
}
}
}
throw new AssertionError();
}
/**
* Returns the {@code int} value that is equal to {@code x} rounded with the specified rounding
* mode, if possible.
*
* @throws ArithmeticException if
* <ul>
* <li>{@code x} is infinite or NaN
* <li>{@code x}, after being rounded to a mathematical integer using the specified rounding
* mode, is either less than {@code Integer.MIN_VALUE} or greater than {@code
* Integer.MAX_VALUE}
* <li>{@code x} is not a mathematical integer and {@code mode} is {@link
* RoundingMode#UNNECESSARY}
* </ul>
*/
@GwtIncompatible // #roundIntermediate
// Whenever both tests are cheap and functional, it's faster to use &, | instead of &&, ||
@SuppressWarnings("ShortCircuitBoolean")
public static int roundToInt(double x, RoundingMode mode) {
double z = roundIntermediate(x, mode);
checkInRangeForRoundingInputs(
z > MIN_INT_AS_DOUBLE - 1.0 & z < MAX_INT_AS_DOUBLE + 1.0, x, mode);
return (int) z;
}
private static final double MIN_INT_AS_DOUBLE = -0x1p31;
private static final double MAX_INT_AS_DOUBLE = 0x1p31 - 1.0;
/**
* Returns the {@code long} value that is equal to {@code x} rounded with the specified rounding
* mode, if possible.
*
* @throws ArithmeticException if
* <ul>
* <li>{@code x} is infinite or NaN
* <li>{@code x}, after being rounded to a mathematical integer using the specified rounding
* mode, is either less than {@code Long.MIN_VALUE} or greater than {@code
* Long.MAX_VALUE}
* <li>{@code x} is not a mathematical integer and {@code mode} is {@link
* RoundingMode#UNNECESSARY}
* </ul>
*/
@GwtIncompatible // #roundIntermediate
// Whenever both tests are cheap and functional, it's faster to use &, | instead of &&, ||
@SuppressWarnings("ShortCircuitBoolean")
public static long roundToLong(double x, RoundingMode mode) {
double z = roundIntermediate(x, mode);
checkInRangeForRoundingInputs(
MIN_LONG_AS_DOUBLE - z < 1.0 & z < MAX_LONG_AS_DOUBLE_PLUS_ONE, x, mode);
return (long) z;
}
private static final double MIN_LONG_AS_DOUBLE = -0x1p63;
/*
* We cannot store Long.MAX_VALUE as a double without losing precision. Instead, we store
* Long.MAX_VALUE + 1 == -Long.MIN_VALUE, and then offset all comparisons by 1.
*/
private static final double MAX_LONG_AS_DOUBLE_PLUS_ONE = 0x1p63;
/**
* Returns the {@code BigInteger} value that is equal to {@code x} rounded with the specified
* rounding mode, if possible.
*
* @throws ArithmeticException if
* <ul>
* <li>{@code x} is infinite or NaN
* <li>{@code x} is not a mathematical integer and {@code mode} is {@link
* RoundingMode#UNNECESSARY}
* </ul>
*/
// #roundIntermediate, java.lang.Math.getExponent, com.google.common.math.DoubleUtils
@GwtIncompatible
// Whenever both tests are cheap and functional, it's faster to use &, | instead of &&, ||
@SuppressWarnings("ShortCircuitBoolean")
public static BigInteger roundToBigInteger(double x, RoundingMode mode) {
x = roundIntermediate(x, mode);
if (MIN_LONG_AS_DOUBLE - x < 1.0 & x < MAX_LONG_AS_DOUBLE_PLUS_ONE) {
return BigInteger.valueOf((long) x);
}
int exponent = getExponent(x);
long significand = getSignificand(x);
BigInteger result = BigInteger.valueOf(significand).shiftLeft(exponent - SIGNIFICAND_BITS);
return (x < 0) ? result.negate() : result;
}
/**
* Returns {@code true} if {@code x} is exactly equal to {@code 2^k} for some finite integer
* {@code k}.
*/
@GwtIncompatible // com.google.common.math.DoubleUtils
public static boolean isPowerOfTwo(double x) {
if (x > 0.0 && isFinite(x)) {
long significand = getSignificand(x);
return (significand & (significand - 1)) == 0;
}
return false;
}
/**
* Returns the base 2 logarithm of a double value.
*
* <p>Special cases:
*
* <ul>
* <li>If {@code x} is NaN or less than zero, the result is NaN.
* <li>If {@code x} is positive infinity, the result is positive infinity.
* <li>If {@code x} is positive or negative zero, the result is negative infinity.
* </ul>
*
* <p>The computed result is within 1 ulp of the exact result.
*
* <p>If the result of this method will be immediately rounded to an {@code int}, {@link
* #log2(double, RoundingMode)} is faster.
*/
public static double log2(double x) {
return log(x) / LN_2; // surprisingly within 1 ulp according to tests
}
/**
* Returns the base 2 logarithm of a double value, rounded with the specified rounding mode to an
* {@code int}.
*
* <p>Regardless of the rounding mode, this is faster than {@code (int) log2(x)}.
*
* @throws IllegalArgumentException if {@code x <= 0.0}, {@code x} is NaN, or {@code x} is
* infinite
*/
@GwtIncompatible // java.lang.Math.getExponent, com.google.common.math.DoubleUtils
// Whenever both tests are cheap and functional, it's faster to use &, | instead of &&, ||
@SuppressWarnings({"fallthrough", "ShortCircuitBoolean"})
public static int log2(double x, RoundingMode mode) {
checkArgument(x > 0.0 && isFinite(x), "x must be positive and finite");
int exponent = getExponent(x);
if (!isNormal(x)) {
return log2(x * IMPLICIT_BIT, mode) - SIGNIFICAND_BITS;
// Do the calculation on a normal value.
}
// x is positive, finite, and normal
boolean increment;
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(isPowerOfTwo(x));
// fall through
case FLOOR:
increment = false;
break;
case CEILING:
increment = !isPowerOfTwo(x);
break;
case DOWN:
increment = exponent < 0 & !isPowerOfTwo(x);
break;
case UP:
increment = exponent >= 0 & !isPowerOfTwo(x);
break;
case HALF_DOWN:
case HALF_EVEN:
case HALF_UP:
double xScaled = scaleNormalize(x);
// sqrt(2) is irrational, and the spec is relative to the "exact numerical result,"
// so log2(x) is never exactly exponent + 0.5.
increment = (xScaled * xScaled) > 2.0;
break;
default:
throw new AssertionError();
}
return increment ? exponent + 1 : exponent;
}
private static final double LN_2 = log(2);
/**
* Returns {@code true} if {@code x} represents a mathematical integer.
*
* <p>This is equivalent to, but not necessarily implemented as, the expression {@code
* !Double.isNaN(x) && !Double.isInfinite(x) && x == Math.rint(x)}.
*/
@GwtIncompatible // java.lang.Math.getExponent, com.google.common.math.DoubleUtils
public static boolean isMathematicalInteger(double x) {
return isFinite(x)
&& (x == 0.0
|| SIGNIFICAND_BITS - Long.numberOfTrailingZeros(getSignificand(x)) <= getExponent(x));
}
/**
* Returns {@code n!}, that is, the product of the first {@code n} positive integers, {@code 1} if
* {@code n == 0}, or {@code n!}, or {@link Double#POSITIVE_INFINITY} if {@code n! >
* Double.MAX_VALUE}.
*
* <p>The result is within 1 ulp of the true value.
*
* @throws IllegalArgumentException if {@code n < 0}
*/
public static double factorial(int n) {
checkNonNegative("n", n);
if (n > MAX_FACTORIAL) {
return Double.POSITIVE_INFINITY;
} else {
// Multiplying the last (n & 0xf) values into their own accumulator gives a more accurate
// result than multiplying by everySixteenthFactorial[n >> 4] directly.
double accum = 1.0;
for (int i = 1 + (n & ~0xf); i <= n; i++) {
accum *= i;
}
return accum * everySixteenthFactorial[n >> 4];
}
}
@VisibleForTesting static final int MAX_FACTORIAL = 170;
@VisibleForTesting
static final double[] everySixteenthFactorial = {
0x1.0p0,
0x1.30777758p44,
0x1.956ad0aae33a4p117,
0x1.ee69a78d72cb6p202,
0x1.fe478ee34844ap295,
0x1.c619094edabffp394,
0x1.3638dd7bd6347p498,
0x1.7cac197cfe503p605,
0x1.1e5dfc140e1e5p716,
0x1.8ce85fadb707ep829,
0x1.95d5f3d928edep945
};
/**
* Returns {@code true} if {@code a} and {@code b} are within {@code tolerance} of each other.
*
* <p>Technically speaking, this is equivalent to {@code Math.abs(a - b) <= tolerance ||
* Double.valueOf(a).equals(Double.valueOf(b))}.
*
* <p>Notable special cases include:
*
* <ul>
* <li>All NaNs are fuzzily equal.
* <li>If {@code a == b}, then {@code a} and {@code b} are always fuzzily equal.
* <li>Positive and negative zero are always fuzzily equal.
* <li>If {@code tolerance} is zero, and neither {@code a} nor {@code b} is NaN, then {@code a}
* and {@code b} are fuzzily equal if and only if {@code a == b}.
* <li>With {@link Double#POSITIVE_INFINITY} tolerance, all non-NaN values are fuzzily equal.
* <li>With finite tolerance, {@code Double.POSITIVE_INFINITY} and {@code
* Double.NEGATIVE_INFINITY} are fuzzily equal only to themselves.
* </ul>
*
* <p>This is reflexive and symmetric, but <em>not</em> transitive, so it is <em>not</em> an
* equivalence relation and <em>not</em> suitable for use in {@link Object#equals}
* implementations.
*
* @throws IllegalArgumentException if {@code tolerance} is {@code < 0} or NaN
* @since 13.0
*/
public static boolean fuzzyEquals(double a, double b, double tolerance) {
MathPreconditions.checkNonNegative("tolerance", tolerance);
return Math.copySign(a - b, 1.0) <= tolerance
// copySign(x, 1.0) is a branch-free version of abs(x), but with different NaN semantics
|| (a == b) // needed to ensure that infinities equal themselves
|| (Double.isNaN(a) && Double.isNaN(b));
}
/**
* Compares {@code a} and {@code b} "fuzzily," with a tolerance for nearly-equal values.
*
* <p>This method is equivalent to {@code fuzzyEquals(a, b, tolerance) ? 0 : Double.compare(a,
* b)}. In particular, like {@link Double#compare(double, double)}, it treats all NaN values as
* equal and greater than all other values (including {@link Double#POSITIVE_INFINITY}).
*
* <p>This is <em>not</em> a total ordering and is <em>not</em> suitable for use in {@link
* Comparable#compareTo} implementations. In particular, it is not transitive.
*
* @throws IllegalArgumentException if {@code tolerance} is {@code < 0} or NaN
* @since 13.0
*/
public static int fuzzyCompare(double a, double b, double tolerance) {
if (fuzzyEquals(a, b, tolerance)) {
return 0;
} else if (a < b) {
return -1;
} else if (a > b) {
return 1;
} else {
return Boolean.compare(Double.isNaN(a), Double.isNaN(b));
}
}
/**
* Returns the <a href="http://en.wikipedia.org/wiki/Arithmetic_mean">arithmetic mean</a> of
* {@code values}.
*
* <p>If these values are a sample drawn from a population, this is also an unbiased estimator of
* the arithmetic mean of the population.
*
* @param values a nonempty series of values
* @throws IllegalArgumentException if {@code values} is empty or contains any non-finite value
* @deprecated Use {@link Stats#meanOf} instead, noting the less strict handling of non-finite
* values.
*/
@Deprecated
// com.google.common.math.DoubleUtils
@GwtIncompatible
public static double mean(double... values) {
checkArgument(values.length > 0, "Cannot take mean of 0 values");
long count = 1;
double mean = checkFinite(values[0]);
for (int index = 1; index < values.length; ++index) {
checkFinite(values[index]);
count++;
// Art of Computer Programming vol. 2, Knuth, 4.2.2, (15)
mean += (values[index] - mean) / count;
}
return mean;
}
/**
* Returns the <a href="http://en.wikipedia.org/wiki/Arithmetic_mean">arithmetic mean</a> of
* {@code values}.
*
* <p>If these values are a sample drawn from a population, this is also an unbiased estimator of
* the arithmetic mean of the population.
*
* @param values a nonempty series of values
* @throws IllegalArgumentException if {@code values} is empty
* @deprecated Use {@link Stats#meanOf} instead, noting the less strict handling of non-finite
* values.
*/
@Deprecated
public static double mean(int... values) {
checkArgument(values.length > 0, "Cannot take mean of 0 values");
// The upper bound on the length of an array and the bounds on the int values mean that, in
// this case only, we can compute the sum as a long without risking overflow or loss of
// precision. So we do that, as it's slightly quicker than the Knuth algorithm.
long sum = 0;
for (int index = 0; index < values.length; ++index) {
sum += values[index];
}
return (double) sum / values.length;
}
/**
* Returns the <a href="http://en.wikipedia.org/wiki/Arithmetic_mean">arithmetic mean</a> of
* {@code values}.
*
* <p>If these values are a sample drawn from a population, this is also an unbiased estimator of
* the arithmetic mean of the population.
*
* @param values a nonempty series of values, which will be converted to {@code double} values
* (this may cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15))
* @throws IllegalArgumentException if {@code values} is empty
* @deprecated Use {@link Stats#meanOf} instead, noting the less strict handling of non-finite
* values.
*/
@Deprecated
public static double mean(long... values) {
checkArgument(values.length > 0, "Cannot take mean of 0 values");
long count = 1;
double mean = values[0];
for (int index = 1; index < values.length; ++index) {
count++;
// Art of Computer Programming vol. 2, Knuth, 4.2.2, (15)
mean += (values[index] - mean) / count;
}
return mean;
}
/**
* Returns the <a href="http://en.wikipedia.org/wiki/Arithmetic_mean">arithmetic mean</a> of
* {@code values}.
*
* <p>If these values are a sample drawn from a population, this is also an unbiased estimator of
* the arithmetic mean of the population.
*
* @param values a nonempty series of values, which will be converted to {@code double} values
* (this may cause loss of precision)
* @throws IllegalArgumentException if {@code values} is empty or contains any non-finite value
* @deprecated Use {@link Stats#meanOf} instead, noting the less strict handling of non-finite
* values.
*/
@Deprecated
// com.google.common.math.DoubleUtils
@GwtIncompatible
public static double mean(Iterable<? extends Number> values) {
return mean(values.iterator());
}
/**
* Returns the <a href="http://en.wikipedia.org/wiki/Arithmetic_mean">arithmetic mean</a> of
* {@code values}.
*
* <p>If these values are a sample drawn from a population, this is also an unbiased estimator of
* the arithmetic mean of the population.
*
* @param values a nonempty series of values, which will be converted to {@code double} values
* (this may cause loss of precision)
* @throws IllegalArgumentException if {@code values} is empty or contains any non-finite value
* @deprecated Use {@link Stats#meanOf} instead, noting the less strict handling of non-finite
* values.
*/
@Deprecated
// com.google.common.math.DoubleUtils
@GwtIncompatible
public static double mean(Iterator<? extends Number> values) {
checkArgument(values.hasNext(), "Cannot take mean of 0 values");
long count = 1;
double mean = checkFinite(values.next().doubleValue());
while (values.hasNext()) {
double value = checkFinite(values.next().doubleValue());
count++;
// Art of Computer Programming vol. 2, Knuth, 4.2.2, (15)
mean += (value - mean) / count;
}
return mean;
}
@GwtIncompatible // com.google.common.math.DoubleUtils
@CanIgnoreReturnValue
private static double checkFinite(double argument) {
checkArgument(isFinite(argument));
return argument;
}
private DoubleMath() {}
}
| DoubleMath |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/strategy/InputConsumableDecider.java | {
"start": 1169,
"end": 2243
} | interface ____ {
/**
* Determining whether the input of an execution vertex is consumable.
*
* @param executionVertex to be determined whether it's input is consumable.
* @param verticesToSchedule vertices that are not yet scheduled but already decided to be
* scheduled.
* @param consumableStatusCache a cache for {@link ConsumedPartitionGroup} consumable status.
* This is to avoid repetitive computation.
*/
boolean isInputConsumable(
SchedulingExecutionVertex executionVertex,
Set<ExecutionVertexID> verticesToSchedule,
Map<ConsumedPartitionGroup, Boolean> consumableStatusCache);
/**
* Determining whether the consumed partition group is consumable based on finished producers.
*
* @param consumedPartitionGroup to be determined whether it is consumable.
*/
boolean isConsumableBasedOnFinishedProducers(
final ConsumedPartitionGroup consumedPartitionGroup);
/** Factory for {@link InputConsumableDecider}. */
| InputConsumableDecider |
java | apache__camel | dsl/camel-kamelet-main/src/main/java/org/apache/camel/main/download/MainHttpServerFactory.java | {
"start": 1287,
"end": 3803
} | class ____ {
public static MainHttpServer setupHttpServer(CamelContext camelContext, boolean silent) {
// if we only use management then there is no main server already
MainHttpServer server = camelContext.hasService(MainHttpServer.class);
ManagementHttpServer managementHttpServer = camelContext.hasService(ManagementHttpServer.class);
// but if none has already been created, and we are using platform-http, then we need an embedded default http server
if (server == null && managementHttpServer == null) {
// set up a default http server on configured port if not already done
HttpServerConfigurationProperties config = new HttpServerConfigurationProperties(null);
String port = CamelJBangSettingsHelper.readSettings("camel.server.port");
if (port != null) {
config.setPort(CamelContextHelper.parseInt(camelContext, port));
} else {
CamelJBangSettingsHelper.writeSettingsIfNotExists("camel.server.port",
String.valueOf(config.getPort()));
}
if (!silent) {
try {
// enable http server if not silent
org.apache.camel.main.MainHttpServerFactory factory = resolveMainHttpServerFactory(camelContext);
Service httpServer = factory.newHttpServer(camelContext, config);
camelContext.addService(httpServer, true, true);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
return server;
}
private static org.apache.camel.main.MainHttpServerFactory resolveMainHttpServerFactory(CamelContext camelContext)
throws Exception {
// lookup in service registry first
org.apache.camel.main.MainHttpServerFactory answer
= camelContext.getRegistry().findSingleByType(org.apache.camel.main.MainHttpServerFactory.class);
if (answer == null) {
answer = camelContext.getCamelContextExtension().getBootstrapFactoryFinder()
.newInstance(MainConstants.PLATFORM_HTTP_SERVER, org.apache.camel.main.MainHttpServerFactory.class)
.orElseThrow(() -> new IllegalArgumentException(
"Cannot find MainHttpServerFactory on classpath. Add camel-platform-http-main to classpath."));
}
return answer;
}
}
| MainHttpServerFactory |
java | apache__flink | flink-python/src/main/java/org/apache/flink/table/runtime/operators/python/aggregate/arrow/batch/AbstractBatchArrowPythonAggregateFunctionOperator.java | {
"start": 1734,
"end": 4617
} | class ____
extends AbstractArrowPythonAggregateFunctionOperator {
private static final long serialVersionUID = 1L;
private final GeneratedProjection groupKeyGeneratedProjection;
private final GeneratedProjection groupSetGeneratedProjection;
/** Last group key value. */
transient BinaryRowData lastGroupKey;
/** Last group set value. */
transient BinaryRowData lastGroupSet;
/** The Projection which projects the group key fields from the input row. */
transient Projection<RowData, BinaryRowData> groupKeyProjection;
/**
* The Projection which projects the group set fields (group key and aux group key) from the
* input row.
*/
transient Projection<RowData, BinaryRowData> groupSetProjection;
AbstractBatchArrowPythonAggregateFunctionOperator(
Configuration config,
PythonFunctionInfo[] pandasAggFunctions,
RowType inputType,
RowType udfInputType,
RowType udfOutputType,
GeneratedProjection inputGeneratedProjection,
GeneratedProjection groupKeyGeneratedProjection,
GeneratedProjection groupSetGeneratedProjection) {
super(
config,
pandasAggFunctions,
inputType,
udfInputType,
udfOutputType,
inputGeneratedProjection);
this.groupKeyGeneratedProjection = Preconditions.checkNotNull(groupKeyGeneratedProjection);
this.groupSetGeneratedProjection = Preconditions.checkNotNull(groupSetGeneratedProjection);
}
@SuppressWarnings("unchecked")
@Override
public void open() throws Exception {
super.open();
groupKeyProjection =
groupKeyGeneratedProjection.newInstance(
Thread.currentThread().getContextClassLoader());
groupSetProjection =
groupSetGeneratedProjection.newInstance(
Thread.currentThread().getContextClassLoader());
lastGroupKey = null;
lastGroupSet = null;
}
@Override
public void endInput() throws Exception {
invokeCurrentBatch();
super.endInput();
}
@Override
public void finish() throws Exception {
invokeCurrentBatch();
super.finish();
}
protected abstract void invokeCurrentBatch() throws Exception;
boolean isNewKey(BinaryRowData currentKey) {
return lastGroupKey == null
|| (lastGroupKey.getSizeInBytes() != currentKey.getSizeInBytes())
|| !(BinaryRowDataUtil.byteArrayEquals(
currentKey.getSegments()[0].getHeapMemory(),
lastGroupKey.getSegments()[0].getHeapMemory(),
currentKey.getSizeInBytes()));
}
}
| AbstractBatchArrowPythonAggregateFunctionOperator |
java | mapstruct__mapstruct | processor/src/test/resources/fixtures/org/mapstruct/ap/test/nestedbeans/mixed/FishTankMapperWithDocumentImpl.java | {
"start": 1099,
"end": 3437
} | class ____ implements FishTankMapperWithDocument {
@Override
public FishTankWithNestedDocumentDto map(FishTank source) {
if ( source == null ) {
return null;
}
FishTankWithNestedDocumentDto fishTankWithNestedDocumentDto = new FishTankWithNestedDocumentDto();
fishTankWithNestedDocumentDto.setFish( fishToFishDto( source.getFish() ) );
fishTankWithNestedDocumentDto.setQuality( waterQualityToWaterQualityWithDocumentDto( source.getQuality() ) );
fishTankWithNestedDocumentDto.setName( source.getName() );
return fishTankWithNestedDocumentDto;
}
protected FishDto fishToFishDto(Fish fish) {
if ( fish == null ) {
return null;
}
FishDto fishDto = new FishDto();
fishDto.setKind( fish.getType() );
fishDto.setName( "Jaws" );
return fishDto;
}
protected WaterQualityOrganisationDto waterQualityReportToWaterQualityOrganisationDto(WaterQualityReport waterQualityReport) {
if ( waterQualityReport == null ) {
return null;
}
WaterQualityOrganisationDto waterQualityOrganisationDto = new WaterQualityOrganisationDto();
waterQualityOrganisationDto.setName( "NoIdeaInc" );
return waterQualityOrganisationDto;
}
protected WaterQualityReportDto waterQualityReportToWaterQualityReportDto(WaterQualityReport waterQualityReport) {
if ( waterQualityReport == null ) {
return null;
}
WaterQualityReportDto waterQualityReportDto = new WaterQualityReportDto();
waterQualityReportDto.setVerdict( waterQualityReport.getVerdict() );
waterQualityReportDto.setOrganisation( waterQualityReportToWaterQualityOrganisationDto( waterQualityReport ) );
return waterQualityReportDto;
}
protected WaterQualityWithDocumentDto waterQualityToWaterQualityWithDocumentDto(WaterQuality waterQuality) {
if ( waterQuality == null ) {
return null;
}
WaterQualityWithDocumentDto waterQualityWithDocumentDto = new WaterQualityWithDocumentDto();
waterQualityWithDocumentDto.setDocument( waterQualityReportToWaterQualityReportDto( waterQuality.getReport() ) );
return waterQualityWithDocumentDto;
}
}
| FishTankMapperWithDocumentImpl |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/search/SearchReply.java | {
"start": 827,
"end": 4440
} | class ____<K, V> {
private long count;
private final List<SearchResult<K, V>> results;
private Long cursorId;
private final List<V> warnings = new ArrayList<>();
/**
* Creates a new empty SearchReply instance.
*/
public SearchReply() {
this.count = 0;
this.results = new ArrayList<>();
this.cursorId = null;
}
/**
* Creates a new SearchReply instance with the specified count and results.
*
* @param count the total number of matching documents
* @param results the list of search result documents
*/
SearchReply(long count, List<SearchResult<K, V>> results) {
this.count = count;
this.results = new ArrayList<>(results);
this.cursorId = null;
}
/**
* Gets the total number of matching documents.
* <p>
* This represents the total count of documents that match the search query, which may be larger than the number of results
* returned if LIMIT was used.
*
* @return the total number of matching documents
*/
public long getCount() {
return count;
}
/**
* Sets the total number of matching documents.
*
* @param count the total number of matching documents
*/
void setCount(long count) {
this.count = count;
}
/**
* Gets the list of search result documents.
* <p>
* Each result contains the document ID and optionally the document fields, score, payload, and sort keys depending on the
* search arguments used.
*
* @return an unmodifiable list of search result documents
*/
public List<SearchResult<K, V>> getResults() {
return Collections.unmodifiableList(results);
}
/**
* Adds a search result document to the results list.
*
* @param result the search result document to add
*/
public void addResult(SearchResult<K, V> result) {
this.results.add(result);
}
/**
* Gets the number of search result documents returned.
* <p>
* This may be different from {@link #getCount()} if LIMIT was used in the search.
*
* @return the number of search result documents returned
*/
public int size() {
return results.size();
}
/**
* Checks if the search results are empty.
*
* @return true if no search result documents were returned, false otherwise
*/
public boolean isEmpty() {
return results.isEmpty();
}
/**
* Gets the cursor ID for paginated results.
* <p>
* This is only available when using cursor-based pagination with FT.AGGREGATE WITHCURSOR. A cursor ID of 0 indicates that
* there are no more results to fetch.
*
* @return the cursor ID, or null if cursor-based pagination is not being used
*/
public Long getCursorId() {
return cursorId;
}
/**
* @return a {@link List} of all the warnings generated during the execution of this search
*/
public List<V> getWarnings() {
return this.warnings;
}
/**
* Sets the cursor ID for paginated results.
*
* @param cursorId the cursor ID
*/
void setCursorId(Long cursorId) {
this.cursorId = cursorId;
}
/**
* Add a new warning to the list of warnings
*
* @param v the warning to add
*/
void addWarning(V v) {
this.warnings.add(v);
}
/**
* Represents a single search result document.
*
* @param <K> Key type.
* @param <V> Value type.
*/
public static | SearchReply |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/aggregate/DefaultAggregateController.java | {
"start": 973,
"end": 2146
} | class ____ implements AggregateController {
private AggregateProcessor processor;
@Override
public void onStart(AggregateProcessor processor) {
this.processor = processor;
}
@Override
public void onStop(AggregateProcessor processor) {
this.processor = null;
}
@Override
public int forceCompletionOfGroup(String key) {
if (processor != null) {
return processor.forceCompletionOfGroup(key);
} else {
return 0;
}
}
@Override
public int forceCompletionOfAllGroups() {
if (processor != null) {
return processor.forceCompletionOfAllGroups();
} else {
return 0;
}
}
@Override
public int forceDiscardingOfGroup(String key) {
if (processor != null) {
return processor.forceDiscardingOfGroup(key);
} else {
return 0;
}
}
@Override
public int forceDiscardingOfAllGroups() {
if (processor != null) {
return processor.forceDiscardingOfAllGroups();
} else {
return 0;
}
}
}
| DefaultAggregateController |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-project-interpolation/src/main/java/org/apache/maven/plugin/coreit/PropertyInterpolationVerifierMojo.java | {
"start": 1398,
"end": 2583
} | class ____ extends AbstractMojo {
/**
* The current Maven project.
*/
@Parameter(defaultValue = "${project}")
private MavenProject project;
/**
* The properties.
*/
@Parameter(property = "clsldr.pluginClassLoaderOutput")
private Properties properties;
public void execute() throws MojoExecutionException, MojoFailureException {
Model model = project.getModel();
if (properties == null) {
return;
}
Enumeration e = properties.propertyNames();
while (e.hasMoreElements()) {
String name = (String) e.nextElement();
String value = properties.getProperty(name);
if (!value.equals(model.getProperties().getProperty(name))) {
throw new MojoExecutionException("Properties do not match: Name = " + name + ", Value = " + value);
}
if (value.contains("${")) {
throw new MojoExecutionException("Unresolved value: Name = " + name + ", Value = " + value);
}
getLog().info("Property match: Name = " + name + ", Value = " + value);
}
}
}
| PropertyInterpolationVerifierMojo |
java | elastic__elasticsearch | test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/OS.java | {
"start": 1180,
"end": 2523
} | class ____<T> {
private final Map<OS, Supplier<? extends T>> conditions = new EnumMap<>(OS.class);
public Conditional<T> onWindows(Supplier<? extends T> supplier) {
conditions.put(WINDOWS, supplier);
return this;
}
public Conditional<T> onLinux(Supplier<? extends T> supplier) {
conditions.put(LINUX, supplier);
return this;
}
public Conditional<T> onMac(Supplier<? extends T> supplier) {
conditions.put(MAC, supplier);
return this;
}
public Conditional<T> onUnix(Supplier<? extends T> supplier) {
conditions.put(MAC, supplier);
conditions.put(LINUX, supplier);
return this;
}
T supply() {
Set<OS> missingOS = EnumSet.allOf(OS.class);
missingOS.removeAll(conditions.keySet());
if (missingOS.isEmpty() == false) {
throw new IllegalArgumentException("No condition specified for " + missingOS);
}
return conditions.get(OS.current()).get();
}
}
public static <T> T conditional(Consumer<Conditional<T>> config) {
Conditional<T> conditional = new Conditional<>();
config.accept(conditional);
return conditional.supply();
}
}
| Conditional |
java | apache__spark | sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java | {
"start": 19060,
"end": 20071
} | class ____ implements ParquetVectorUpdater {
@Override
public void readValues(
int total,
int offset,
WritableColumnVector values,
VectorizedValuesReader valuesReader) {
valuesReader.readUnsignedIntegers(total, values, offset);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipIntegers(total);
}
@Override
public void readValue(
int offset,
WritableColumnVector values,
VectorizedValuesReader valuesReader) {
values.putLong(offset, Integer.toUnsignedLong(valuesReader.readInteger()));
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
values.putLong(offset, Integer.toUnsignedLong(
dictionary.decodeToInt(dictionaryIds.getDictId(offset))));
}
}
private static | UnsignedIntegerUpdater |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/server/share/persister/NoOpStatePersister.java | {
"start": 1068,
"end": 6275
} | class ____ implements Persister {
public NoOpStatePersister() {
}
@Override
public CompletableFuture<InitializeShareGroupStateResult> initializeState(InitializeShareGroupStateParameters request) {
GroupTopicPartitionData<PartitionStateData> reqData = request.groupTopicPartitionData();
List<TopicData<PartitionErrorData>> resultArgs = new ArrayList<>();
for (TopicData<PartitionStateData> topicData : reqData.topicsData()) {
resultArgs.add(new TopicData<>(topicData.topicId(), topicData.partitions().stream()
.map(partStateData -> PartitionFactory.newPartitionErrorData(partStateData.partition(), PartitionFactory.DEFAULT_ERROR_CODE, PartitionFactory.DEFAULT_ERR_MESSAGE))
.collect(Collectors.toList())));
}
return CompletableFuture.completedFuture(new InitializeShareGroupStateResult.Builder().setTopicsData(resultArgs).build());
}
@Override
public CompletableFuture<ReadShareGroupStateResult> readState(ReadShareGroupStateParameters request) {
GroupTopicPartitionData<PartitionIdLeaderEpochData> reqData = request.groupTopicPartitionData();
List<TopicData<PartitionAllData>> resultArgs = new ArrayList<>();
// we will fetch topic and partition info from the request and
// return valid but default response (keep partition id and topic from request but initialize other
// values as default).
for (TopicData<PartitionIdLeaderEpochData> topicData : reqData.topicsData()) {
resultArgs.add(new TopicData<>(topicData.topicId(), topicData.partitions().stream().
map(partitionIdData -> PartitionFactory.newPartitionAllData(
partitionIdData.partition(), PartitionFactory.DEFAULT_STATE_EPOCH, PartitionFactory.UNINITIALIZED_START_OFFSET, PartitionFactory.DEFAULT_ERROR_CODE, PartitionFactory.DEFAULT_ERR_MESSAGE, List.of()))
.collect(Collectors.toList())));
}
return CompletableFuture.completedFuture(new ReadShareGroupStateResult.Builder().setTopicsData(resultArgs).build());
}
@Override
public CompletableFuture<WriteShareGroupStateResult> writeState(WriteShareGroupStateParameters request) {
GroupTopicPartitionData<PartitionStateBatchData> reqData = request.groupTopicPartitionData();
List<TopicData<PartitionErrorData>> resultArgs = new ArrayList<>();
for (TopicData<PartitionStateBatchData> topicData : reqData.topicsData()) {
resultArgs.add(new TopicData<>(topicData.topicId(), topicData.partitions().stream()
.map(batch -> PartitionFactory.newPartitionErrorData(batch.partition(), PartitionFactory.DEFAULT_ERROR_CODE, PartitionFactory.DEFAULT_ERR_MESSAGE))
.collect(Collectors.toList())));
}
return CompletableFuture.completedFuture(new WriteShareGroupStateResult.Builder().setTopicsData(resultArgs).build());
}
@Override
public CompletableFuture<DeleteShareGroupStateResult> deleteState(DeleteShareGroupStateParameters request) {
GroupTopicPartitionData<PartitionIdData> reqData = request.groupTopicPartitionData();
List<TopicData<PartitionErrorData>> resultArgs = new ArrayList<>();
for (TopicData<PartitionIdData> topicData : reqData.topicsData()) {
resultArgs.add(new TopicData<>(topicData.topicId(), topicData.partitions().stream()
.map(batch -> PartitionFactory.newPartitionErrorData(batch.partition(), PartitionFactory.DEFAULT_ERROR_CODE, PartitionFactory.DEFAULT_ERR_MESSAGE))
.collect(Collectors.toList())));
}
return CompletableFuture.completedFuture(new DeleteShareGroupStateResult.Builder().setTopicsData(resultArgs).build());
}
@Override
public CompletableFuture<ReadShareGroupStateSummaryResult> readSummary(ReadShareGroupStateSummaryParameters request) {
GroupTopicPartitionData<PartitionIdLeaderEpochData> reqData = request.groupTopicPartitionData();
List<TopicData<PartitionStateSummaryData>> resultArgs = new ArrayList<>();
// we will fetch topic and partition info from the request and
// return valid but default response (keep partition id and topic from request but initialize other
// values as default).
for (TopicData<PartitionIdLeaderEpochData> topicData : reqData.topicsData()) {
resultArgs.add(new TopicData<>(topicData.topicId(), topicData.partitions().stream().
map(partitionIdData -> PartitionFactory.newPartitionStateSummaryData(
partitionIdData.partition(), PartitionFactory.DEFAULT_STATE_EPOCH, PartitionFactory.UNINITIALIZED_START_OFFSET,
PartitionFactory.UNINITIALIZED_DELIVERY_COMPLETE_COUNT, PartitionFactory.DEFAULT_LEADER_EPOCH,
PartitionFactory.DEFAULT_ERROR_CODE, PartitionFactory.DEFAULT_ERR_MESSAGE))
.collect(Collectors.toList())));
}
return CompletableFuture.completedFuture(new ReadShareGroupStateSummaryResult.Builder().setTopicsData(resultArgs).build());
}
@Override
public void stop() {
//noop
}
}
| NoOpStatePersister |
java | elastic__elasticsearch | x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java | {
"start": 892,
"end": 1539
} | class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(new Route(DELETE, "/_ccr/auto_follow/{name}"));
}
@Override
public String getName() {
return "ccr_delete_auto_follow_pattern_action";
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) {
final var request = new Request(getMasterNodeTimeout(restRequest), TimeValue.THIRTY_SECONDS, restRequest.param("name"));
return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel));
}
}
| RestDeleteAutoFollowPatternAction |
java | resilience4j__resilience4j | resilience4j-vavr/src/main/java/io/github/resilience4j/ratelimiter/VavrRateLimiter.java | {
"start": 981,
"end": 12704
} | interface ____ {
/**
* Creates a supplier which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param supplier the original supplier
* @param <T> the type of results supplied supplier
* @return a supplier which is restricted by a RateLimiter.
*/
static <T> CheckedFunction0<T> decorateCheckedSupplier(RateLimiter rateLimiter, CheckedFunction0<T> supplier) {
return decorateCheckedSupplier(rateLimiter, 1, supplier);
}
/**
* Creates a supplier which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param permits number of permits that this call requires
* @param supplier the original supplier
* @param <T> the type of results supplied supplier
* @return a supplier which is restricted by a RateLimiter.
*/
static <T> CheckedFunction0<T> decorateCheckedSupplier(RateLimiter rateLimiter, int permits,
CheckedFunction0<T> supplier) {
return () -> {
waitForPermission(rateLimiter, permits);
try {
T result = supplier.apply();
rateLimiter.onResult(result);
return result;
} catch (Exception exception) {
rateLimiter.onError(exception);
throw exception;
}
};
}
/**
* Creates a runnable which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param runnable the original runnable
* @return a runnable which is restricted by a RateLimiter.
*/
static CheckedRunnable decorateCheckedRunnable(RateLimiter rateLimiter, CheckedRunnable runnable) {
return decorateCheckedRunnable(rateLimiter, 1, runnable);
}
/**
* Creates a runnable which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param permits number of permits that this call requires
* @param runnable the original runnable
* @return a runnable which is restricted by a RateLimiter.
*/
static CheckedRunnable decorateCheckedRunnable(RateLimiter rateLimiter, int permits, CheckedRunnable runnable) {
return () -> {
waitForPermission(rateLimiter, permits);
try {
runnable.run();
rateLimiter.onSuccess();
} catch (Exception exception) {
rateLimiter.onError(exception);
throw exception;
}
};
}
/**
* Creates a function which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param function the original function
* @param <T> the type of function argument
* @param <R> the type of function results
* @return a function which is restricted by a RateLimiter.
*/
static <T, R> CheckedFunction1<T, R> decorateCheckedFunction(RateLimiter rateLimiter,
CheckedFunction1<T, R> function) {
return decorateCheckedFunction(rateLimiter, 1, function);
}
/**
* Creates a function which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param permits number of permits that this call requires
* @param function the original function
* @param <T> the type of function argument
* @param <R> the type of function results
* @return a function which is restricted by a RateLimiter.
*/
static <T, R> CheckedFunction1<T, R> decorateCheckedFunction(RateLimiter rateLimiter,
int permits, CheckedFunction1<T, R> function) {
return (T t) -> decorateCheckedSupplier(rateLimiter, permits, () -> function.apply(t))
.apply();
}
/**
* Creates a function which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param permitsCalculator calculates the number of permits required by this call based on the
* functions argument
* @param function the original function
* @param <T> the type of function argument
* @param <R> the type of function results
* @return a function which is restricted by a RateLimiter.
*/
static <T, R> CheckedFunction1<T, R> decorateCheckedFunction(RateLimiter rateLimiter,
Function<T, Integer> permitsCalculator, CheckedFunction1<T, R> function) {
return (T t) -> decorateCheckedFunction(rateLimiter, permitsCalculator.apply(t), function)
.apply(t);
}
/**
* Creates a supplier which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param supplier the original supplier
* @param <T> the type of results supplied supplier
* @return a supplier which is restricted by a RateLimiter.
*/
static <T> Supplier<Try<T>> decorateTrySupplier(RateLimiter rateLimiter, Supplier<Try<T>> supplier) {
return decorateTrySupplier(rateLimiter, 1, supplier);
}
/**
* Creates a supplier which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param permits number of permits that this call requires
* @param supplier the original supplier
* @param <T> the type of results supplied supplier
* @return a supplier which is restricted by a RateLimiter.
*/
static <T> Supplier<Try<T>> decorateTrySupplier(RateLimiter rateLimiter, int permits, Supplier<Try<T>> supplier) {
return () -> {
try {
waitForPermission(rateLimiter, permits);
try {
Try<T> result = supplier.get();
if (result.isSuccess()) {
rateLimiter.onResult(result.get());
} else {
rateLimiter.onError(result.getCause());
}
return result;
} catch (Exception exception) {
rateLimiter.onError(exception);
throw exception;
}
} catch (RequestNotPermitted requestNotPermitted) {
return Try.failure(requestNotPermitted);
}
};
}
/**
* Creates a supplier which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param supplier the original supplier
* @param <T> the type of results supplied supplier
* @return a supplier which is restricted by a RateLimiter.
*/
static <T> Supplier<Either<Exception, T>> decorateEitherSupplier(RateLimiter rateLimiter,
Supplier<Either<? extends Exception, T>> supplier) {
return decorateEitherSupplier(rateLimiter, 1, supplier);
}
/**
* Creates a supplier which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param permits number of permits that this call requires
* @param supplier the original supplier
* @param <T> the type of results supplied supplier
* @return a supplier which is restricted by a RateLimiter.
*/
static <T> Supplier<Either<Exception, T>> decorateEitherSupplier(RateLimiter rateLimiter,
int permits, Supplier<Either<? extends Exception, T>> supplier) {
return () -> {
try {
waitForPermission(rateLimiter, permits);
try {
Either<? extends Exception, T> result = supplier.get();
if (result.isRight()) {
rateLimiter.onResult(result.get());
} else {
rateLimiter.onError(result.getLeft());
}
return Either.narrow(result);
} catch (Exception exception) {
rateLimiter.onError(exception);
throw exception;
}
} catch (RequestNotPermitted requestNotPermitted) {
return Either.left(requestNotPermitted);
}
};
}
/**
* Decorates and executes the decorated Supplier.
*
* @param supplier the original Supplier
* @param <T> the type of results supplied by this supplier
* @return the result of the decorated Supplier.
*/
static <T> Try<T> executeTrySupplier(RateLimiter rateLimiter,Supplier<Try<T>> supplier) {
return executeTrySupplier(rateLimiter,1, supplier);
}
/**
* Decorates and executes the decorated Supplier.
*
* @param permits number of permits that this call requires
* @param supplier the original Supplier
* @param <T> the type of results supplied by this supplier
* @return the result of the decorated Supplier.
*/
static <T> Try<T> executeTrySupplier(RateLimiter rateLimiter, int permits, Supplier<Try<T>> supplier) {
return decorateTrySupplier(rateLimiter, permits, supplier).get();
}
/**
* Decorates and executes the decorated Supplier.
*
* @param supplier the original Supplier
* @param <T> the type of results supplied by this supplier
* @return the result of the decorated Supplier.
*/
static <T> Either<Exception, T> executeEitherSupplier(RateLimiter rateLimiter,
Supplier<Either<? extends Exception, T>> supplier) {
return executeEitherSupplier(rateLimiter,1, supplier);
}
/**
* Decorates and executes the decorated Supplier.
*
* @param permits number of permits that this call requires
* @param supplier the original Supplier
* @param <T> the type of results supplied by this supplier
* @return the result of the decorated Supplier.
*/
static <T> Either<Exception, T> executeEitherSupplier(RateLimiter rateLimiter, int permits,
Supplier<Either<? extends Exception, T>> supplier) {
return decorateEitherSupplier(rateLimiter, permits, supplier).get();
}
/**
* Decorates and executes the decorated Supplier.
*
* @param checkedSupplier the original Supplier
* @param <T> the type of results supplied by this supplier
* @return the result of the decorated Supplier.
* @throws Throwable if something goes wrong applying this function to the given arguments
*/
static <T> T executeCheckedSupplier(RateLimiter rateLimiter, CheckedFunction0<T> checkedSupplier) throws Throwable {
return executeCheckedSupplier(rateLimiter,1, checkedSupplier);
}
/**
* Decorates and executes the decorated Supplier.
*
* @param permits number of permits that this call requires
* @param checkedSupplier the original Supplier
* @param <T> the type of results supplied by this supplier
* @return the result of the decorated Supplier.
* @throws Throwable if something goes wrong applying this function to the given arguments
*/
static <T> T executeCheckedSupplier(RateLimiter rateLimiter, int permits, CheckedFunction0<T> checkedSupplier)
throws Throwable {
return decorateCheckedSupplier(rateLimiter, permits, checkedSupplier).apply();
}
}
| VavrRateLimiter |
java | spring-projects__spring-boot | module/spring-boot-graphql/src/test/java/org/springframework/boot/graphql/autoconfigure/data/GraphQlQueryByExampleAutoConfigurationTests.java | {
"start": 3171,
"end": 3441
} | class ____ {
@Bean
MockRepository mockRepository() {
MockRepository mockRepository = mock(MockRepository.class);
given(mockRepository.findBy(any(), any())).willReturn(Optional.of(book));
return mockRepository;
}
}
@GraphQlRepository
| MockRepositoryConfig |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/cglib/core/DefaultGeneratorStrategy.java | {
"start": 711,
"end": 1358
} | class ____ implements GeneratorStrategy {
public static final DefaultGeneratorStrategy INSTANCE = new DefaultGeneratorStrategy();
@Override
public byte[] generate(ClassGenerator cg) throws Exception {
DebuggingClassWriter cw = getClassVisitor();
transform(cg).generateClass(cw);
return transform(cw.toByteArray());
}
protected DebuggingClassWriter getClassVisitor() throws Exception {
return new DebuggingClassWriter(ClassWriter.COMPUTE_FRAMES);
}
protected final ClassWriter getClassWriter() {
// Cause compile / runtime errors for people who implemented the old
// | DefaultGeneratorStrategy |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng8653AfterAndEachPhasesWithConcurrentBuilderTest.java | {
"start": 1114,
"end": 2283
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify the dependency management of the consumer POM is computed correctly
*/
@Test
void testIt() throws Exception {
Path basedir = extractResources("/mng-8653").getAbsoluteFile().toPath();
Verifier verifier = newVerifier(basedir.toString());
verifier.addCliArguments("compile", "-b", "concurrent", "-T8");
verifier.execute();
verifier.verifyErrorFreeLog();
List<String> lines = verifier.loadLogLines();
List<String> hallo = lines.stream().filter(l -> l.contains("Hallo")).toList();
// Verify parent's before:all is first
assertTrue(
hallo.get(0).contains("'before:all' phase from 'parent'"),
"First line should be parent's before:all but was: " + hallo.get(0));
// Verify parent's after:all is last
assertTrue(
hallo.get(hallo.size() - 1).contains("'after:all' phase from 'parent'"),
"Last line should be parent's after:all but was: " + hallo.get(hallo.size() - 1));
}
}
| MavenITmng8653AfterAndEachPhasesWithConcurrentBuilderTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/TaskExecutor.java | {
"start": 115752,
"end": 118584
} | class ____
implements RegistrationConnectionListener<
TaskExecutorToResourceManagerConnection,
TaskExecutorRegistrationSuccess,
TaskExecutorRegistrationRejection> {
@Override
public void onRegistrationSuccess(
TaskExecutorToResourceManagerConnection connection,
TaskExecutorRegistrationSuccess success) {
final ResourceID resourceManagerId = success.getResourceManagerId();
final InstanceID taskExecutorRegistrationId = success.getRegistrationId();
final ClusterInformation clusterInformation = success.getClusterInformation();
final ResourceManagerGateway resourceManagerGateway = connection.getTargetGateway();
byte[] tokens = success.getInitialTokens();
if (tokens != null) {
try {
log.info("Receive initial delegation tokens from resource manager");
delegationTokenReceiverRepository.onNewTokensObtained(tokens);
} catch (Throwable t) {
log.error("Could not update delegation tokens.", t);
ExceptionUtils.rethrowIfFatalError(t);
}
}
runAsync(
() -> {
// filter out outdated connections
//noinspection ObjectEquality
if (resourceManagerConnection == connection) {
try {
establishResourceManagerConnection(
resourceManagerGateway,
resourceManagerId,
taskExecutorRegistrationId,
clusterInformation);
} catch (Throwable t) {
log.error(
"Establishing Resource Manager connection in Task Executor failed",
t);
}
}
});
}
@Override
public void onRegistrationFailure(Throwable failure) {
onFatalError(failure);
}
@Override
public void onRegistrationRejection(
String targetAddress, TaskExecutorRegistrationRejection rejection) {
onFatalError(
new FlinkException(
String.format(
"The TaskExecutor's registration at the ResourceManager %s has been rejected: %s",
targetAddress, rejection)));
}
}
private final | ResourceManagerRegistrationListener |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/converter/xml/AbstractJaxb2HttpMessageConverter.java | {
"start": 1735,
"end": 2676
} | class ____ create the marshaller for
* @return the {@code Marshaller}
* @throws HttpMessageConversionException in case of JAXB errors
*/
protected final Marshaller createMarshaller(Class<?> clazz) {
try {
JAXBContext jaxbContext = getJaxbContext(clazz);
Marshaller marshaller = jaxbContext.createMarshaller();
customizeMarshaller(marshaller);
return marshaller;
}
catch (JAXBException ex) {
throw new HttpMessageConversionException(
"Could not create Marshaller for class [" + clazz + "]: " + ex.getMessage(), ex);
}
}
/**
* Customize the {@link Marshaller} created by this
* message converter before using it to write the object to the output.
* @param marshaller the marshaller to customize
* @since 4.0.3
* @see #createMarshaller(Class)
*/
protected void customizeMarshaller(Marshaller marshaller) {
}
/**
* Create a new {@link Unmarshaller} for the given class.
* @param clazz the | to |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/JavaSerializerUpgradeTest.java | {
"start": 1377,
"end": 2322
} | class ____ extends TypeSerializerUpgradeTestBase<Serializable, Serializable> {
private static final String SPEC_NAME = "java-serializer";
public Collection<TestSpecification<?, ?>> createTestSpecifications(FlinkVersion flinkVersion)
throws Exception {
ArrayList<TestSpecification<?, ?>> testSpecifications = new ArrayList<>();
testSpecifications.add(
new TestSpecification<>(
SPEC_NAME,
flinkVersion,
JavaSerializerSetup.class,
JavaSerializerVerifier.class));
return testSpecifications;
}
// ----------------------------------------------------------------------------------------------
// Specification for "java-serializer"
// ----------------------------------------------------------------------------------------------
/**
* This | JavaSerializerUpgradeTest |
java | quarkusio__quarkus | extensions/spring-scheduled/deployment/src/test/java/io/quarkus/spring/scheduled/deployment/SpringScheduledProcessorTest.java | {
"start": 5370,
"end": 5905
} | class ____ {
@Scheduled(fixedRate = 1000, initialDelay = 1000)
void checkEverySecondWithDelay() {
}
@Scheduled(fixedRateString = "1000", initialDelayString = "1000")
void checkEverySecondWithDelayString() {
}
@Scheduled(fixedRateString = "1000", initialDelayString = "invalid format")
void invalidFormatInitialDelay() {
}
@Scheduled(fixedRateString = "invalid format")
void invalidFormatFixedRate() {
}
}
}
| BeanWithScheduledMethods |
java | apache__camel | components/camel-netty/src/test/java/org/apache/camel/component/netty/NettySSLTest.java | {
"start": 1372,
"end": 3811
} | class ____ extends BaseNettyTest {
@BindToRegistry("ksf")
public File loadKeystoreKsf() {
return new File("src/test/resources/keystore.jks");
}
@BindToRegistry("tsf")
public File loadKeystoreTsf() {
return new File("src/test/resources/keystore.jks");
}
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testSSLInOutWithNettyConsumer() throws Exception {
context.addRoutes(new RouteBuilder() {
public void configure() {
// needClientAuth=true so we can get the client certificate
// details
from("netty:tcp://127.0.0.1:{{port}}?sync=true&ssl=true&passphrase=changeit&keyStoreResource=#ksf&trustStoreResource=#tsf&needClientAuth=true")
.process(new Processor() {
public void process(Exchange exchange) throws Exception {
SSLSession session
= exchange.getIn().getHeader(NettyConstants.NETTY_SSL_SESSION, SSLSession.class);
if (session != null) {
X509Certificate cert = (X509Certificate) session.getPeerCertificates()[0];
Principal principal = cert.getSubjectDN();
log.info("Client Cert SubjectDN: {}", principal.getName());
exchange.getMessage().setBody(
"When You Go Home, Tell Them Of Us And Say, For Your Tomorrow, We Gave Our Today.");
} else {
exchange.getMessage().setBody("Cannot start conversion without SSLSession");
}
}
});
}
});
context.start();
String response = template.requestBody(
"netty:tcp://localhost:{{port}}?sync=true&ssl=true&passphrase=changeit&keyStoreResource=#ksf&trustStoreResource=#tsf",
"Epitaph in Kohima, India marking the WWII Battle of Kohima and Imphal, Burma Campaign - Attributed to John Maxwell Edmonds",
String.class);
assertEquals("When You Go Home, Tell Them Of Us And Say, For Your Tomorrow, We Gave Our Today.", response);
}
}
| NettySSLTest |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java | {
"start": 227995,
"end": 229073
} | class ____ extends OperatorExpressionContext {
public PrimaryExpressionContext primaryExpression() {
return getRuleContext(PrimaryExpressionContext.class,0);
}
@SuppressWarnings("this-escape")
public OperatorExpressionDefaultContext(OperatorExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterOperatorExpressionDefault(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitOperatorExpressionDefault(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor<? extends T>)visitor).visitOperatorExpressionDefault(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static | OperatorExpressionDefaultContext |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/parallel/ParallelSortedJoin.java | {
"start": 8010,
"end": 8975
} | class ____<T>
extends AtomicReference<Subscription>
implements FlowableSubscriber<List<T>> {
private static final long serialVersionUID = 6751017204873808094L;
final SortedJoinSubscription<T> parent;
final int index;
SortedJoinInnerSubscriber(SortedJoinSubscription<T> parent, int index) {
this.parent = parent;
this.index = index;
}
@Override
public void onSubscribe(Subscription s) {
SubscriptionHelper.setOnce(this, s, Long.MAX_VALUE);
}
@Override
public void onNext(List<T> t) {
parent.innerNext(t, index);
}
@Override
public void onError(Throwable t) {
parent.innerError(t);
}
@Override
public void onComplete() {
// ignored
}
void cancel() {
SubscriptionHelper.cancel(this);
}
}
}
| SortedJoinInnerSubscriber |
java | apache__camel | components/camel-test/camel-test-main-junit5/src/test/java/org/apache/camel/test/main/junit5/annotation/ReplaceBeanFromMethodTest.java | {
"start": 1965,
"end": 2616
} | class
____.addConfiguration(MyConfiguration.class);
}
/**
* Replace the default bean whose name is <i>myGreetings</i> and type is {@link Greetings} with this custom
* implementation used for the test only.
*/
@ReplaceInRegistry
Greetings myGreetings() {
return new CustomGreetings(name);
}
@Test
void shouldReplaceTheBeanWithACustomBean() throws Exception {
mock.expectedBodiesReceived("Hi Will!");
String result = template.requestBody((Object) null, String.class);
mock.assertIsSatisfied();
assertEquals("Hi Will!", result);
}
@Nested
| configuration |
java | quarkusio__quarkus | extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devui/ArcDevUIProcessor.java | {
"start": 1195,
"end": 8705
} | class ____ {
@BuildStep(onlyIf = IsLocalDevelopment.class)
public CardPageBuildItem pages(ArcBeanInfoBuildItem arcBeanInfoBuildItem, ArcConfig config) {
DevBeanInfos beanInfos = arcBeanInfoBuildItem.getBeanInfos();
CardPageBuildItem pageBuildItem = new CardPageBuildItem();
pageBuildItem.setLogo("cdi_logo.png", "cdi_logo.png");
pageBuildItem.addLibraryVersion("jakarta.enterprise", "jakarta.enterprise.cdi-api", "Jakarta CDI",
"https://jakarta.ee/specifications/cdi/");
List<DevBeanInfo> beans = beanInfos.getBeans();
if (!beans.isEmpty()) {
pageBuildItem.addPage(Page.webComponentPageBuilder()
.icon("font-awesome-solid:egg")
.componentLink("qwc-arc-beans.js")
.staticLabel(String.valueOf(beans.size())));
pageBuildItem.addBuildTimeData(BEANS, toDevBeanWithInterceptorInfo(beans, beanInfos));
pageBuildItem.addBuildTimeData(BEAN_IDS_WITH_DEPENDENCY_GRAPHS, beanInfos.getDependencyGraphs().keySet());
pageBuildItem.addBuildTimeData(DEPENDENCY_GRAPHS, beanInfos.getDependencyGraphs());
}
List<DevObserverInfo> observers = beanInfos.getObservers();
if (!observers.isEmpty()) {
pageBuildItem.addPage(Page.webComponentPageBuilder()
.icon("font-awesome-solid:eye")
.componentLink("qwc-arc-observers.js")
.staticLabel(String.valueOf(observers.size())));
pageBuildItem.addBuildTimeData(OBSERVERS, observers);
}
List<DevInterceptorInfo> interceptors = beanInfos.getInterceptors();
if (!interceptors.isEmpty()) {
pageBuildItem.addPage(Page.webComponentPageBuilder()
.icon("font-awesome-solid:traffic-light")
.componentLink("qwc-arc-interceptors.js")
.staticLabel(String.valueOf(interceptors.size())));
pageBuildItem.addBuildTimeData(INTERCEPTORS, interceptors);
}
List<DevDecoratorInfo> decorators = beanInfos.getDecorators();
if (!decorators.isEmpty()) {
pageBuildItem.addPage(Page.webComponentPageBuilder()
.icon("font-awesome-solid:traffic-light")
.componentLink("qwc-arc-decorators.js")
.staticLabel(String.valueOf(decorators.size())));
pageBuildItem.addBuildTimeData(DECORATORS, decorators);
}
if (config.devMode().monitoringEnabled()) {
pageBuildItem.addPage(Page.webComponentPageBuilder()
.icon("font-awesome-solid:fire")
.componentLink("qwc-arc-fired-events.js"));
pageBuildItem.addPage(Page.webComponentPageBuilder()
.icon("font-awesome-solid:diagram-project")
.componentLink("qwc-arc-invocation-trees.js"));
}
int removedComponents = beanInfos.getRemovedComponents();
if (removedComponents > 0) {
pageBuildItem.addPage(Page.webComponentPageBuilder()
.icon("font-awesome-solid:trash-can")
.componentLink("qwc-arc-removed-components.js")
.staticLabel(String.valueOf(removedComponents)));
pageBuildItem.addBuildTimeData(REMOVED_BEANS, beanInfos.getRemovedBeans());
pageBuildItem.addBuildTimeData(REMOVED_DECORATORS, beanInfos.getRemovedDecorators());
pageBuildItem.addBuildTimeData(REMOVED_INTERCEPTORS, beanInfos.getRemovedInterceptors());
}
return pageBuildItem;
}
@BuildStep(onlyIf = IsLocalDevelopment.class)
JsonRPCProvidersBuildItem createJsonRPCService() {
return new JsonRPCProvidersBuildItem(ArcJsonRPCService.class);
}
@BuildStep(onlyIf = IsLocalDevelopment.class)
void registerMonitoringComponents(ArcConfig config, BuildProducer<AdditionalBeanBuildItem> beans,
BuildProducer<AnnotationsTransformerBuildItem> annotationTransformers,
CustomScopeAnnotationsBuildItem customScopes, List<BeanDefiningAnnotationBuildItem> beanDefiningAnnotations) {
if (!config.devMode().monitoringEnabled()) {
return;
}
if (!config.transformUnproxyableClasses()) {
throw new IllegalStateException(
"Dev UI problem: monitoring of CDI business method invocations not possible\n\t- quarkus.arc.transform-unproxyable-classes was set to false and therefore it would not be possible to apply interceptors to unproxyable bean classes\n\t- please disable the monitoring feature via quarkus.arc.dev-mode.monitoring-enabled=false or enable unproxyable classes transformation");
}
// Register beans
beans.produce(AdditionalBeanBuildItem.builder().setUnremovable()
.addBeanClasses(EventsMonitor.class, InvocationTree.class, InvocationsMonitor.class,
InvocationInterceptor.class,
Monitored.class)
.build());
// Add @Monitored to all beans
Set<DotName> skipNames = Set.of(DotName.createSimple(InvocationTree.class),
DotName.createSimple(InvocationsMonitor.class), DotName.createSimple(EventsMonitor.class));
annotationTransformers.produce(new AnnotationsTransformerBuildItem(AnnotationsTransformer
.appliedToClass()
.whenClass(c -> (customScopes.isScopeDeclaredOn(c)
|| isAdditionalBeanDefiningAnnotationOn(c, beanDefiningAnnotations))
&& !skipClass(c, skipNames))
.thenTransform(t -> t.add(Monitored.class))));
}
private boolean skipClass(ClassInfo beanClass, Set<DotName> skipNames) {
if (skipNames.contains(beanClass.name())) {
return true;
}
if (beanClass.name().packagePrefix().startsWith("io.quarkus.devui.runtime")) {
// Skip monitoring for internal devui components
return true;
}
return false;
}
private List<DevBeanWithInterceptorInfo> toDevBeanWithInterceptorInfo(List<DevBeanInfo> beans, DevBeanInfos devBeanInfos) {
List<DevBeanWithInterceptorInfo> l = new ArrayList<>();
for (DevBeanInfo dbi : beans) {
l.add(new DevBeanWithInterceptorInfo(dbi, devBeanInfos));
}
return l;
}
private boolean isAdditionalBeanDefiningAnnotationOn(ClassInfo beanClass,
List<BeanDefiningAnnotationBuildItem> beanDefiningAnnotations) {
for (BeanDefiningAnnotationBuildItem beanDefiningAnnotation : beanDefiningAnnotations) {
if (beanClass.hasDeclaredAnnotation(beanDefiningAnnotation.getName())) {
return true;
}
}
return false;
}
private static final String BEAN_IDS_WITH_DEPENDENCY_GRAPHS = "beanIdsWithDependencyGraphs";
private static final String DEPENDENCY_GRAPHS = "dependencyGraphs";
private static final String BEANS = "beans";
private static final String OBSERVERS = "observers";
private static final String INTERCEPTORS = "interceptors";
private static final String DECORATORS = "decorators";
private static final String REMOVED_BEANS = "removedBeans";
private static final String REMOVED_DECORATORS = "removedDecorators";
private static final String REMOVED_INTERCEPTORS = "removedInterceptors";
}
| ArcDevUIProcessor |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/tasks/SourceStreamTask.java | {
"start": 3366,
"end": 3750
} | class ____<
OUT, SRC extends SourceFunction<OUT>, OP extends StreamSource<OUT, SRC>>
extends StreamTask<OUT, OP> {
private final LegacySourceFunctionThread sourceThread;
private final Object lock;
private volatile boolean externallyInducedCheckpoints;
private final AtomicBoolean stopped = new AtomicBoolean(false);
private | SourceStreamTask |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/i18n/MessageBundleExpressionValidationTest.java | {
"start": 2495,
"end": 2554
} | class ____ {
static int level = 5;
}
}
| MyGlobals |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/security/SecurityWithMethodGenericsTest.java | {
"start": 2831,
"end": 3064
} | class ____ extends BaseResource<String> {
@Path("allow")
@GET
public String allow() {
return "allow";
}
}
@Provider
@Consumes("text/plain")
public static | AuthenticatedResource |
java | elastic__elasticsearch | plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleWhitelistExtension.java | {
"start": 1107,
"end": 2411
} | class ____ implements PainlessExtension {
@Override
public Map<ScriptContext<?>, List<Whitelist>> getContextWhitelists() {
Map<String, WhitelistAnnotationParser> parsers = new HashMap<>(WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS);
parsers.put(ExamplePainlessAnnotation.NAME, ExampleWhitelistAnnotationParser.INSTANCE);
Whitelist classWhitelist =
WhitelistLoader.loadFromResourceFiles(ExampleWhitelistExtension.class, parsers, "example_whitelist.txt");
ExampleWhitelistedInstance ewi = new ExampleWhitelistedInstance(1);
WhitelistInstanceBinding addValue = new WhitelistInstanceBinding("example addValue", ewi,
"addValue", "int", Collections.singletonList("int"), Collections.emptyList());
WhitelistInstanceBinding getValue = new WhitelistInstanceBinding("example getValue", ewi,
"getValue", "int", Collections.emptyList(), Collections.emptyList());
Whitelist instanceWhitelist = new Whitelist(ewi.getClass().getClassLoader(), Collections.emptyList(),
Collections.emptyList(), Collections.emptyList(), Arrays.asList(addValue, getValue));
return Collections.singletonMap(FieldScript.CONTEXT, Arrays.asList(classWhitelist, instanceWhitelist));
}
}
| ExampleWhitelistExtension |
java | apache__camel | components/camel-jacksonxml/src/test/java/org/apache/camel/component/jacksonxml/ListJacksonUnmarshalDTest.java | {
"start": 905,
"end": 1295
} | class ____ extends JacksonMarshalUnmarshalListTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:backPojo").unmarshal(new ListJacksonXMLDataFormat(TestPojo.class)).to("mock:reversePojo");
}
};
}
}
| ListJacksonUnmarshalDTest |
java | apache__camel | components/camel-wordpress/src/main/java/org/apache/camel/component/wordpress/api/service/WordpressCrudService.java | {
"start": 1248,
"end": 1614
} | interface ____<T, S extends SearchCriteria> extends WordpressService {
T retrieve(Integer entityID, Context context);
T retrieve(Integer entityID);
T create(T entity);
T delete(Integer entityID);
DeletedModel<T> forceDelete(Integer entityID);
List<T> list(S searchCriteria);
T update(Integer entityID, T entity);
}
| WordpressCrudService |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/StatFilterReadBytesLengthTest.java | {
"start": 476,
"end": 3051
} | class ____ extends TestCase {
private DruidDataSource dataSource;
protected void setUp() throws Exception {
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setFilters("stat");
dataSource.setTestOnBorrow(false);
dataSource.getProxyFilters().add(new FilterAdapter() {
@Override
public byte[] resultSet_getBytes(FilterChain chain, ResultSetProxy result, int columnIndex)
throws SQLException {
return new byte[6];
}
@Override
public byte[] resultSet_getBytes(FilterChain chain, ResultSetProxy result, String columnIndex)
throws SQLException {
return new byte[7];
}
});
dataSource.init();
}
protected void tearDown() throws Exception {
JdbcUtils.close(dataSource);
}
public void test_stat() throws Exception {
Connection conn = dataSource.getConnection();
String sql = "select 'x'";
PreparedStatement stmt = conn.prepareStatement("select 'x'");
JdbcSqlStat sqlStat = dataSource.getDataSourceStat().getSqlStat(sql);
assertEquals(0, sqlStat.getReadStringLength());
assertEquals(0, sqlStat.getReadBytesLength());
ResultSet rs = stmt.executeQuery();
rs.next();
rs.getBytes(1);
rs.close();
stmt.close();
conn.close();
assertEquals(0, sqlStat.getReadStringLength());
assertEquals(6, sqlStat.getReadBytesLength());
sqlStat.reset();
assertEquals(0, sqlStat.getReadStringLength());
assertEquals(0, sqlStat.getReadBytesLength());
}
public void test_stat_1() throws Exception {
Connection conn = dataSource.getConnection();
String sql = "select 'x'";
PreparedStatement stmt = conn.prepareStatement("select 'x'");
JdbcSqlStat sqlStat = dataSource.getDataSourceStat().getSqlStat(sql);
assertEquals(0, sqlStat.getReadStringLength());
assertEquals(0, sqlStat.getReadBytesLength());
ResultSet rs = stmt.executeQuery();
rs.next();
rs.getBytes("1");
rs.close();
stmt.close();
conn.close();
assertEquals(0, sqlStat.getReadStringLength());
assertEquals(7, sqlStat.getReadBytesLength());
sqlStat.reset();
assertEquals(0, sqlStat.getReadStringLength());
assertEquals(0, sqlStat.getReadBytesLength());
}
}
| StatFilterReadBytesLengthTest |
java | apache__flink | flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/FencedPekkoRpcActor.java | {
"start": 1660,
"end": 5675
} | class ____<
F extends Serializable, T extends FencedRpcEndpoint<F> & RpcGateway>
extends PekkoRpcActor<T> {
public FencedPekkoRpcActor(
T rpcEndpoint,
CompletableFuture<Boolean> terminationFuture,
int version,
final long maximumFramesize,
final boolean forceSerialization,
ClassLoader flinkClassLoader,
final Map<String, String> loggingContext) {
super(
rpcEndpoint,
terminationFuture,
version,
maximumFramesize,
forceSerialization,
flinkClassLoader,
loggingContext);
}
@Override
protected void handleRpcMessage(Object message) {
if (message instanceof FencedMessage) {
final F expectedFencingToken = rpcEndpoint.getFencingToken();
if (expectedFencingToken == null) {
if (log.isDebugEnabled()) {
log.debug(
"Fencing token not set: Ignoring message {} because the fencing token is null.",
message);
}
sendErrorIfSender(
new FencingTokenException(
String.format(
"Fencing token not set: Ignoring message %s sent to %s because the fencing token is null.",
message, rpcEndpoint.getAddress())));
} else {
@SuppressWarnings("unchecked")
FencedMessage<F, ?> fencedMessage = ((FencedMessage<F, ?>) message);
F fencingToken = fencedMessage.getFencingToken();
if (Objects.equals(expectedFencingToken, fencingToken)) {
super.handleRpcMessage(fencedMessage.getPayload());
} else {
if (log.isDebugEnabled()) {
log.debug(
"Fencing token mismatch: Ignoring message {} because the fencing token {} did "
+ "not match the expected fencing token {}.",
message,
fencingToken,
expectedFencingToken);
}
sendErrorIfSender(
new FencingTokenException(
"Fencing token mismatch: Ignoring message "
+ message
+ " because the fencing token "
+ fencingToken
+ " did not match the expected fencing token "
+ expectedFencingToken
+ '.'));
}
}
} else {
if (log.isDebugEnabled()) {
log.debug(
"Unknown message type: Ignoring message {} because it is not of type {}.",
message,
FencedMessage.class.getSimpleName());
}
sendErrorIfSender(
new UnknownMessageException(
"Unknown message type: Ignoring message "
+ message
+ " of type "
+ message.getClass().getSimpleName()
+ " because it is not of type "
+ FencedMessage.class.getSimpleName()
+ "."));
}
}
@Override
protected Object envelopeSelfMessage(Object message) {
final F fencingToken = rpcEndpoint.getFencingToken();
return new LocalFencedMessage<>(fencingToken, message);
}
}
| FencedPekkoRpcActor |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java | {
"start": 7353,
"end": 20628
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(OfflineImageViewerPB.class);
private static final int NUM_DIRS = 3;
private static final int FILES_PER_DIR = 4;
private static final String TEST_RENEWER = "JobTracker";
private static File originalFsimage = null;
private static int filesECCount = 0;
private static String addedErasureCodingPolicyName = null;
private static final long FILE_NODE_ID_1 = 16388;
private static final long FILE_NODE_ID_2 = 16389;
private static final long FILE_NODE_ID_3 = 16394;
private static final long DIR_NODE_ID = 16391;
private static final long SAMPLE_TIMESTAMP = 946684800000L;
private static TimeZone defaultTimeZone = null;
// namespace as written to dfs, to be compared with viewer's output
final static HashMap<String, FileStatus> writtenFiles = Maps.newHashMap();
static int dirCount = 0;
private static File tempDir;
// Create a populated namespace for later testing. Save its contents to a
// data structure and store its fsimage location.
// We only want to generate the fsimage file once and use it for
// multiple tests.
@SuppressWarnings("checkstyle:MethodLength")
@BeforeAll
public static void createOriginalFSImage() throws IOException {
defaultTimeZone = TimeZone.getDefault();
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
File[] nnDirs = MiniDFSCluster.getNameNodeDirectory(
MiniDFSCluster.getBaseDirectory(), 0, 0);
tempDir = nnDirs[0];
MiniDFSCluster cluster = null;
try {
final ErasureCodingPolicy ecPolicy = SystemErasureCodingPolicies
.getByID(SystemErasureCodingPolicies.XOR_2_1_POLICY_ID);
Configuration conf = new Configuration();
conf.setLong(
DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_MAX_LIFETIME_KEY, 10000);
conf.setLong(
DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_RENEW_INTERVAL_KEY, 5000);
conf.setBoolean(
DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL,
"RULE:[2:$1@$0](JobTracker@.*FOO.COM)s/@.*//" + "DEFAULT");
// fsimage with sub-section conf
conf.set(DFSConfigKeys.DFS_IMAGE_PARALLEL_LOAD_KEY, "true");
conf.set(DFSConfigKeys.DFS_IMAGE_PARALLEL_INODE_THRESHOLD_KEY, "1");
conf.set(DFSConfigKeys.DFS_IMAGE_PARALLEL_TARGET_SECTIONS_KEY, "4");
conf.set(DFSConfigKeys.DFS_IMAGE_PARALLEL_THREADS_KEY, "4");
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
cluster.waitActive();
DistributedFileSystem hdfs = cluster.getFileSystem();
hdfs.enableErasureCodingPolicy(ecPolicy.getName());
Map<String, String> options = ImmutableMap.of("k1", "v1", "k2", "v2");
ECSchema schema = new ECSchema(ErasureCodeConstants.RS_CODEC_NAME,
10, 4, options);
ErasureCodingPolicy policy = new ErasureCodingPolicy(schema, 1024);
AddErasureCodingPolicyResponse[] responses =
hdfs.addErasureCodingPolicies(new ErasureCodingPolicy[]{policy});
addedErasureCodingPolicyName = responses[0].getPolicy().getName();
hdfs.enableErasureCodingPolicy(addedErasureCodingPolicyName);
// Create a reasonable namespace
for (int i = 0; i < NUM_DIRS; i++, dirCount++) {
Path dir = new Path("/dir" + i);
hdfs.mkdirs(dir);
writtenFiles.put(dir.toString(), pathToFileEntry(hdfs, dir.toString()));
for (int j = 0; j < FILES_PER_DIR; j++) {
Path file = new Path(dir, "file" + j);
FSDataOutputStream o = hdfs.create(file);
o.write(23);
o.close();
writtenFiles.put(file.toString(),
pathToFileEntry(hdfs, file.toString()));
}
}
// Create an empty directory
Path emptydir = new Path("/emptydir");
hdfs.mkdirs(emptydir);
dirCount++;
writtenFiles.put(emptydir.toString(), hdfs.getFileStatus(emptydir));
//Create directories whose name should be escaped in XML
Path invalidXMLDir = new Path("/dirContainingInvalidXMLChar\u0000here");
hdfs.mkdirs(invalidXMLDir);
dirCount++;
Path entityRefXMLDir = new Path("/dirContainingEntityRef&here");
hdfs.mkdirs(entityRefXMLDir);
dirCount++;
writtenFiles.put(entityRefXMLDir.toString(),
hdfs.getFileStatus(entityRefXMLDir));
//Create directories with new line characters
Path newLFDir = new Path("/dirContainingNewLineChar"
+ StringUtils.LF + "here");
hdfs.mkdirs(newLFDir);
dirCount++;
writtenFiles.put("\"/dirContainingNewLineChar%x0Ahere\"",
hdfs.getFileStatus(newLFDir));
Path newCRLFDir = new Path("/dirContainingNewLineChar"
+ PBImageDelimitedTextWriter.CRLF + "here");
hdfs.mkdirs(newCRLFDir);
dirCount++;
writtenFiles.put("\"/dirContainingNewLineChar%x0D%x0Ahere\"",
hdfs.getFileStatus(newCRLFDir));
//Create a directory with sticky bits
Path stickyBitDir = new Path("/stickyBit");
hdfs.mkdirs(stickyBitDir);
hdfs.setPermission(stickyBitDir, new FsPermission(FsAction.ALL,
FsAction.ALL, FsAction.ALL, true));
dirCount++;
writtenFiles.put(stickyBitDir.toString(),
hdfs.getFileStatus(stickyBitDir));
// Get delegation tokens so we log the delegation token op
Token<?>[] delegationTokens = hdfs
.addDelegationTokens(TEST_RENEWER, null);
for (Token<?> t : delegationTokens) {
LOG.debug("got token " + t);
}
// Create INodeReference
final Path src = new Path("/src");
hdfs.mkdirs(src);
dirCount++;
writtenFiles.put(src.toString(), hdfs.getFileStatus(src));
// Create snapshot and snapshotDiff.
final Path orig = new Path("/src/orig");
hdfs.mkdirs(orig);
final Path file1 = new Path("/src/file");
FSDataOutputStream o = hdfs.create(file1);
o.write(23);
o.write(45);
o.close();
hdfs.allowSnapshot(src);
hdfs.createSnapshot(src, "snapshot");
final Path dst = new Path("/dst");
// Rename a directory in the snapshot directory to add snapshotCopy
// field to the dirDiff entry.
hdfs.rename(orig, dst);
dirCount++;
writtenFiles.put(dst.toString(), hdfs.getFileStatus(dst));
// Truncate a file in the snapshot directory to add snapshotCopy and
// blocks fields to the fileDiff entry.
hdfs.truncate(file1, 1);
writtenFiles.put(file1.toString(), hdfs.getFileStatus(file1));
// HDFS-14148: Create a second snapshot-enabled directory. This can cause
// TestOfflineImageViewer#testReverseXmlRoundTrip to fail before the patch
final Path snapshotDir2 = new Path("/snapshotDir2");
hdfs.mkdirs(snapshotDir2);
// Simply enable snapshot on it, no need to create one
hdfs.allowSnapshot(snapshotDir2);
dirCount++;
writtenFiles.put(snapshotDir2.toString(),
hdfs.getFileStatus(snapshotDir2));
// Set XAttrs so the fsimage contains XAttr ops
final Path xattr = new Path("/xattr");
hdfs.mkdirs(xattr);
dirCount++;
hdfs.setXAttr(xattr, "user.a1", new byte[]{ 0x31, 0x32, 0x33 });
hdfs.setXAttr(xattr, "user.a2", new byte[]{ 0x37, 0x38, 0x39 });
// OIV should be able to handle empty value XAttrs
hdfs.setXAttr(xattr, "user.a3", null);
// OIV should be able to handle XAttr values that can't be expressed
// as UTF8
hdfs.setXAttr(xattr, "user.a4", new byte[]{ -0x3d, 0x28 });
writtenFiles.put(xattr.toString(), hdfs.getFileStatus(xattr));
// Set ACLs
hdfs.setAcl(
xattr,
Lists.newArrayList(aclEntry(ACCESS, USER, ALL),
aclEntry(ACCESS, USER, "foo", ALL),
aclEntry(ACCESS, GROUP, READ_EXECUTE),
aclEntry(ACCESS, GROUP, "bar", READ_EXECUTE),
aclEntry(ACCESS, OTHER, EXECUTE)));
// Create an Erasure Coded dir
Path ecDir = new Path("/ec");
hdfs.mkdirs(ecDir);
dirCount++;
hdfs.getClient().setErasureCodingPolicy(ecDir.toString(),
ecPolicy.getName());
writtenFiles.put(ecDir.toString(), hdfs.getFileStatus(ecDir));
// Create an empty Erasure Coded file
Path emptyECFile = new Path(ecDir, "EmptyECFile.txt");
hdfs.create(emptyECFile).close();
writtenFiles.put(emptyECFile.toString(),
pathToFileEntry(hdfs, emptyECFile.toString()));
filesECCount++;
// Create a small Erasure Coded file
Path smallECFile = new Path(ecDir, "SmallECFile.txt");
FSDataOutputStream out = hdfs.create(smallECFile);
Random r = new Random();
byte[] bytes = new byte[1024 * 10];
r.nextBytes(bytes);
out.write(bytes);
writtenFiles.put(smallECFile.toString(),
pathToFileEntry(hdfs, smallECFile.toString()));
filesECCount++;
// Write results to the fsimage file
hdfs.setSafeMode(SafeModeAction.ENTER, false);
hdfs.saveNamespace();
hdfs.setSafeMode(SafeModeAction.LEAVE, false);
// Determine location of fsimage file
originalFsimage = FSImageTestUtil.findLatestImageFile(FSImageTestUtil
.getFSImage(cluster.getNameNode()).getStorage().getStorageDir(0));
if (originalFsimage == null) {
throw new RuntimeException("Didn't generate or can't find fsimage");
}
LOG.debug("original FS image file is " + originalFsimage);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
@AfterAll
public static void deleteOriginalFSImage() throws IOException {
FileUtils.deleteQuietly(tempDir);
if (originalFsimage != null && originalFsimage.exists()) {
originalFsimage.delete();
}
if (defaultTimeZone != null) {
TimeZone.setDefault(defaultTimeZone);
}
}
// Convenience method to generate a file status from file system for
// later comparison
private static FileStatus pathToFileEntry(FileSystem hdfs, String file)
throws IOException {
return hdfs.getFileStatus(new Path(file));
}
@Test
public void testTruncatedFSImage() throws IOException {
assertThrows(IOException.class, () -> {
File truncatedFile = new File(tempDir, "truncatedFsImage");
PrintStream output = new PrintStream(NullOutputStream.INSTANCE);
copyPartOfFile(originalFsimage, truncatedFile);
try (RandomAccessFile r = new RandomAccessFile(truncatedFile, "r")) {
new FileDistributionCalculator(new Configuration(), 0, 0, false, output)
.visit(r);
}
});
}
private void copyPartOfFile(File src, File dest) throws IOException {
FileInputStream in = null;
FileOutputStream out = null;
final int MAX_BYTES = 700;
try {
in = new FileInputStream(src);
out = new FileOutputStream(dest);
in.getChannel().transferTo(0, MAX_BYTES, out.getChannel());
out.close();
out = null;
} finally {
IOUtils.closeStream(in);
IOUtils.closeStream(out);
}
}
@Test
public void testFileDistributionCalculator() throws IOException {
try (ByteArrayOutputStream output = new ByteArrayOutputStream();
PrintStream o = new PrintStream(output);
RandomAccessFile r = new RandomAccessFile(originalFsimage, "r")) {
new FileDistributionCalculator(new Configuration(), 0, 0, false, o)
.visit(r);
o.close();
String outputString = output.toString();
Pattern p = Pattern.compile("totalFiles = (\\d+)\n");
Matcher matcher = p.matcher(outputString);
assertTrue(matcher.find() && matcher.groupCount() == 1);
int totalFiles = Integer.parseInt(matcher.group(1));
assertEquals(NUM_DIRS * FILES_PER_DIR + filesECCount + 1, totalFiles);
p = Pattern.compile("totalDirectories = (\\d+)\n");
matcher = p.matcher(outputString);
assertTrue(matcher.find() && matcher.groupCount() == 1);
int totalDirs = Integer.parseInt(matcher.group(1));
// totalDirs includes root directory
assertEquals(dirCount + 1, totalDirs);
FileStatus maxFile = Collections.max(writtenFiles.values(),
new Comparator<FileStatus>() {
@Override
public int compare(FileStatus first, FileStatus second) {
return first.getLen() < second.getLen() ?
-1 :
((first.getLen() == second.getLen()) ? 0 : 1);
}
});
p = Pattern.compile("maxFileSize = (\\d+)\n");
matcher = p.matcher(output.toString("UTF-8"));
assertTrue(matcher.find() && matcher.groupCount() == 1);
assertEquals(maxFile.getLen(), Long.parseLong(matcher.group(1)));
}
}
@Test
public void testFileDistributionCalculatorWithOptions() throws Exception {
int status = OfflineImageViewerPB.run(new String[] {"-i",
originalFsimage.getAbsolutePath(), "-o", "-", "-p", "FileDistribution",
"-maxSize", "512", "-step", "8"});
assertEquals(0, status);
}
/**
* SAX handler to verify EC Files and their policies.
*/
| TestOfflineImageViewer |
java | quarkusio__quarkus | extensions/funqy/funqy-google-cloud-functions/runtime/src/main/java/io/quarkus/funqy/gcp/functions/event/StorageEvent.java | {
"start": 1345,
"end": 1456
} | class ____ {
public String encryptionAlgorithm;
public String keySha256;
}
}
| CustomerEncryption |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.