language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/junit/jupiter/nested/TransactionalNestedTests.java
|
{
"start": 3826,
"end": 4111
}
|
class ____ {
@Test
void transactional(@Autowired DataSource dataSource) {
assertThatTransaction().isActive();
assertThat(dataSource).isNotNull();
assertRollback();
}
@Nested
@NestedTestConfiguration(INHERIT)
@Commit
|
DoubleNestedWithOverriddenConfigTests
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java
|
{
"start": 62293,
"end": 62502
}
|
class ____ {
}
@Configuration(proxyBeanMethods = false)
@EnableConfigurationProperties({ PrefixProperties.class, AnotherPrefixProperties.class })
static
|
PrefixPropertiesDeclaredAsAnnotationValueConfiguration
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/components/EmbeddableWithCollectionTest.java
|
{
"start": 1451,
"end": 4348
}
|
class ____ {
private Long headerId;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
// Revision 1
headerId = scope.fromTransaction( entityManager -> {
Header h1 = new Header( "h1" );
h1.addItem( new Item( "h1-item0", h1 ) );
h1.addItem( new Item( "h1-item1", h1 ) );
entityManager.persist( h1 );
return h1.getId();
} );
// Revision 2
scope.inTransaction( entityManager -> {
final Header header = entityManager.find( Header.class, headerId );
header.addItem( new Item( "h1-item2", header ) );
entityManager.merge( header );
} );
// Revision 3
scope.inTransaction( entityManager -> {
final Header header = entityManager.find( Header.class, headerId );
header.removeItem( header.getEmbeddableWithCollection().getItems().get( 0 ) );
entityManager.merge( header );
} );
// Revision 4
scope.inTransaction( entityManager -> {
final Header header = entityManager.find( Header.class, headerId );
header.setEmbeddableWithCollection( null );
entityManager.merge( header );
} );
}
@Test
public void testRevisionCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1, 2, 3, 4 ), auditReader.getRevisions( Header.class, headerId ) );
} );
}
@Test
public void testRevisionHistory(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
final Header rev1 = auditReader.find( Header.class, headerId, 1 );
assertEquals( 2, rev1.getEmbeddableWithCollection().getItems().size() );
assertEquals( "h1-item0", rev1.getEmbeddableWithCollection().getItems().get( 0 ).getName() );
assertEquals( "h1-item1", rev1.getEmbeddableWithCollection().getItems().get( 1 ).getName() );
final Header rev2 = auditReader.find( Header.class, headerId, 2 );
assertEquals( 3, rev2.getEmbeddableWithCollection().getItems().size() );
assertEquals( "h1-item0", rev2.getEmbeddableWithCollection().getItems().get( 0 ).getName() );
assertEquals( "h1-item1", rev2.getEmbeddableWithCollection().getItems().get( 1 ).getName() );
assertEquals( "h1-item2", rev2.getEmbeddableWithCollection().getItems().get( 2 ).getName() );
final Header rev3 = auditReader.find( Header.class, headerId, 3 );
assertEquals( 2, rev3.getEmbeddableWithCollection().getItems().size() );
assertEquals( "h1-item1", rev3.getEmbeddableWithCollection().getItems().get( 0 ).getName() );
assertEquals( "h1-item2", rev3.getEmbeddableWithCollection().getItems().get( 1 ).getName() );
final Header rev4 = auditReader.find( Header.class, headerId, 4 );
assertEquals( 0, rev4.getEmbeddableWithCollection().getItems().size() );
} );
}
@Entity(name = "Header")
@Table(name = "ENVERS_HEADER")
@Audited
public static
|
EmbeddableWithCollectionTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/booleanarrays/BooleanArrays_assertContains_at_Index_Test.java
|
{
"start": 1813,
"end": 3899
}
|
class ____ extends BooleanArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContains(someInfo(), null, true, someIndex()))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_is_empty() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContains(someInfo(), emptyArray(), true,
someIndex()))
.withMessage(actualIsEmpty());
}
@Test
void should_throw_error_if_Index_is_null() {
assertThatNullPointerException().isThrownBy(() -> arrays.assertContains(someInfo(), actual, true, null))
.withMessage("Index should not be null");
}
@Test
void should_throw_error_if_Index_is_out_of_bounds() {
assertThatExceptionOfType(IndexOutOfBoundsException.class).isThrownBy(() -> arrays.assertContains(someInfo(),
actual, true,
atIndex(6)))
.withMessageContaining("Index should be between <0> and <1> (inclusive) but was:%n <6>".formatted());
}
@Test
void should_fail_if_actual_does_not_contain_value_at_index() {
AssertionInfo info = someInfo();
boolean value = true;
Index index = atIndex(1);
Throwable error = catchThrowable(() -> arrays.assertContains(info, actual, value, index));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainAtIndex(actual, value, index, false));
}
@Test
void should_pass_if_actual_contains_value_at_index() {
arrays.assertContains(someInfo(), actual, false, atIndex(1));
}
}
|
BooleanArrays_assertContains_at_Index_Test
|
java
|
apache__kafka
|
connect/api/src/main/java/org/apache/kafka/connect/health/ConnectorState.java
|
{
"start": 937,
"end": 1731
}
|
class ____ extends AbstractState {
/**
* Provides an instance of the ConnectorState.
*
* @param state - the status of connector, may not be {@code null} or empty
* @param workerId - the workerId associated with the connector, may not be {@code null} or empty
* @param traceMessage - any error message associated with the connector, may be {@code null} or empty
*/
public ConnectorState(String state, String workerId, String traceMessage) {
super(state, workerId, traceMessage);
}
@Override
public String toString() {
return "ConnectorState{"
+ "state='" + state() + '\''
+ ", traceMessage='" + traceMessage() + '\''
+ ", workerId='" + workerId() + '\''
+ '}';
}
}
|
ConnectorState
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/common/CommonExecExchange.java
|
{
"start": 1353,
"end": 1404
}
|
class ____ exec Exchange.
*
* <p>TODO Remove this
|
for
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java
|
{
"start": 16655,
"end": 17276
}
|
class ____ implements ExtensiblePlugin.ExtensionLoader {
private final MachineLearningExtension extension;
MlTestExtensionLoader(MachineLearningExtension extension) {
this.extension = extension;
}
@Override
@SuppressWarnings("unchecked")
public <T> List<T> loadExtensions(Class<T> extensionPointType) {
if (extensionPointType.isAssignableFrom(MachineLearningExtension.class)) {
return List.of((T) extension);
} else {
return List.of();
}
}
}
public static
|
MlTestExtensionLoader
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
|
{
"start": 3060,
"end": 9269
}
|
class ____ extends InputSplit implements Writable {
private long end = 0;
private long start = 0;
/**
* Default Constructor
*/
public DBInputSplit() {
}
/**
* Convenience Constructor
* @param start the index of the first row to select
* @param end the index of the last row to select
*/
public DBInputSplit(long start, long end) {
this.start = start;
this.end = end;
}
/** {@inheritDoc} */
public String[] getLocations() throws IOException {
// TODO Add a layer to enable SQL "sharding" and support locality
return new String[] {};
}
/**
* @return The index of the first row to select
*/
public long getStart() {
return start;
}
/**
* @return The index of the last row to select
*/
public long getEnd() {
return end;
}
/**
* @return The total row count in this split
*/
public long getLength() throws IOException {
return end - start;
}
/** {@inheritDoc} */
public void readFields(DataInput input) throws IOException {
start = input.readLong();
end = input.readLong();
}
/** {@inheritDoc} */
public void write(DataOutput output) throws IOException {
output.writeLong(start);
output.writeLong(end);
}
}
protected String conditions;
protected Connection connection;
protected String tableName;
protected String[] fieldNames;
protected DBConfiguration dbConf;
/** {@inheritDoc} */
public void setConf(Configuration conf) {
dbConf = new DBConfiguration(conf);
try {
this.connection = createConnection();
DatabaseMetaData dbMeta = connection.getMetaData();
this.dbProductName =
StringUtils.toUpperCase(dbMeta.getDatabaseProductName());
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
tableName = dbConf.getInputTableName();
fieldNames = dbConf.getInputFieldNames();
conditions = dbConf.getInputConditions();
}
public Configuration getConf() {
return dbConf.getConf();
}
public DBConfiguration getDBConf() {
return dbConf;
}
public Connection getConnection() {
// TODO Remove this code that handles backward compatibility.
if (this.connection == null) {
this.connection = createConnection();
}
return this.connection;
}
public Connection createConnection() {
try {
Connection newConnection = dbConf.getConnection();
newConnection.setAutoCommit(false);
newConnection.setTransactionIsolation(
Connection.TRANSACTION_SERIALIZABLE);
return newConnection;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public String getDBProductName() {
return dbProductName;
}
protected RecordReader<LongWritable, T> createDBRecordReader(DBInputSplit split,
Configuration conf) throws IOException {
@SuppressWarnings("unchecked")
Class<T> inputClass = (Class<T>) (dbConf.getInputClass());
try {
// use database product name to determine appropriate record reader.
if (dbProductName.startsWith("ORACLE")) {
// use Oracle-specific db reader.
return new OracleDBRecordReader<T>(split, inputClass,
conf, createConnection(), getDBConf(), conditions, fieldNames,
tableName);
} else if (dbProductName.startsWith("MYSQL")) {
// use MySQL-specific db reader.
return new MySQLDBRecordReader<T>(split, inputClass,
conf, createConnection(), getDBConf(), conditions, fieldNames,
tableName);
} else {
// Generic reader.
return new DBRecordReader<T>(split, inputClass,
conf, createConnection(), getDBConf(), conditions, fieldNames,
tableName);
}
} catch (SQLException ex) {
throw new IOException(ex.getMessage());
}
}
/** {@inheritDoc} */
public RecordReader<LongWritable, T> createRecordReader(InputSplit split,
TaskAttemptContext context) throws IOException, InterruptedException {
return createDBRecordReader((DBInputSplit) split, context.getConfiguration());
}
/** {@inheritDoc} */
public List<InputSplit> getSplits(JobContext job) throws IOException {
ResultSet results = null;
Statement statement = null;
try {
statement = connection.createStatement();
results = statement.executeQuery(getCountQuery());
results.next();
long count = results.getLong(1);
int chunks = job.getConfiguration().getInt(MRJobConfig.NUM_MAPS, 1);
long chunkSize = (count / chunks);
results.close();
statement.close();
List<InputSplit> splits = new ArrayList<InputSplit>();
// Split the rows into n-number of chunks and adjust the last chunk
// accordingly
for (int i = 0; i < chunks; i++) {
DBInputSplit split;
if ((i + 1) == chunks)
split = new DBInputSplit(i * chunkSize, count);
else
split = new DBInputSplit(i * chunkSize, (i * chunkSize)
+ chunkSize);
splits.add(split);
}
connection.commit();
return splits;
} catch (SQLException e) {
throw new IOException("Got SQLException", e);
} finally {
try {
if (results != null) { results.close(); }
} catch (SQLException e1) {}
try {
if (statement != null) { statement.close(); }
} catch (SQLException e1) {}
closeConnection();
}
}
/** Returns the query for getting the total number of rows,
* subclasses can override this for custom behaviour.*/
protected String getCountQuery() {
if(dbConf.getInputCountQuery() != null) {
return dbConf.getInputCountQuery();
}
StringBuilder query = new StringBuilder();
query.append("SELECT COUNT(*) FROM " + tableName);
if (conditions != null && conditions.length() > 0)
query.append(" WHERE " + conditions);
return query.toString();
}
/**
* Initializes the map-part of the job with the appropriate input settings.
*
* @param job The map-reduce job
* @param inputClass the
|
DBInputSplit
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3642DynamicResourcesTest.java
|
{
"start": 1240,
"end": 3146
}
|
class ____ extends AbstractMavenIntegrationTestCase {
@Test
public void testitMNG3642() throws Exception {
// The testdir is computed from the location of this
// file.
File testDir = extractResources("/mng-3642");
Verifier verifier;
/*
* We must first make sure that any artifact created
* by this test has been removed from the local
* repository. Failing to do this could cause
* unstable test results. Fortunately, the verifier
* makes it easy to do this.
*/
verifier = newVerifier(testDir.getAbsolutePath());
/*
* The Command Line Options (CLI) are passed to the
* verifier as a list. This is handy for things like
* redefining the local repository if needed. In
* this case, we use the -N flag so that Maven won't
* recurse. We are only installing the parent pom to
* the local repo here.
*/
verifier.addCliArgument("process-test-resources");
verifier.execute();
/*
* This is the simplest way to check a build
* succeeded. It is also the simplest way to create
* an IT test: make the build pass when the test
* should pass, and make the build fail when the
* test should fail. There are other methods
* supported by the verifier. They can be seen here:
* http://maven.apache.org/shared/maven-verifier/apidocs/index.html
*/
verifier.verifyErrorFreeLog();
File first = new File(testDir, "target/test-classes/one.txt");
assertTrue(first.exists(), "First resource file was not present: " + first);
File second = new File(testDir, "target/test-classes/two.txt");
assertTrue(second.exists(), "Second resource file was not present: " + second);
}
}
|
MavenITmng3642DynamicResourcesTest
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/plugin/PluginAdd.java
|
{
"start": 1328,
"end": 4520
}
|
class ____ extends PluginBaseCommand {
@CommandLine.Parameters(description = "The Camel plugin to add.",
paramLabel = "<name>")
String name;
@CommandLine.Option(names = { "--command" },
description = "The command that the plugin uses.")
String command;
@CommandLine.Option(names = { "--description" },
description = "A short description of the plugin.")
String description;
@CommandLine.Option(names = { "--artifactId" },
description = "Maven artifactId.")
String artifactId;
@CommandLine.Option(names = { "--groupId" },
defaultValue = "org.apache.camel",
description = "Maven groupId.")
String groupId = "org.apache.camel";
@CommandLine.Option(names = { "--version" },
defaultValue = "${camel-version}",
description = "Maven artifact version.")
String version;
@CommandLine.Option(names = { "--first-version" },
defaultValue = "${camel-version}",
description = "First version of this plugin.")
String firstVersion;
@CommandLine.Option(names = { "--gav" },
description = "Maven group and artifact coordinates.")
String gav;
public PluginAdd(CamelJBangMain main) {
super(main);
}
@Override
public Integer doCall() throws Exception {
JsonObject pluginConfig = loadConfig();
JsonObject plugins = pluginConfig.getMap("plugins");
Optional<PluginType> camelPlugin = PluginType.findByName(name);
if (camelPlugin.isPresent()) {
if (command == null) {
command = camelPlugin.get().getCommand();
}
if (description == null) {
description = camelPlugin.get().getDescription();
}
if (firstVersion == null) {
firstVersion = camelPlugin.get().getFirstVersion();
}
}
if (command == null) {
// use plugin name as command
command = name;
}
if (firstVersion == null) {
// fallback to version specified
firstVersion = version;
}
JsonObject plugin = new JsonObject();
plugin.put("name", name);
plugin.put("command", command);
if (firstVersion != null) {
plugin.put("firstVersion", firstVersion);
}
plugin.put("description",
description != null ? description : "Plugin %s called with command %s".formatted(name, command));
if (gav == null && (groupId != null && artifactId != null)) {
if (version == null) {
CamelCatalog catalog = new DefaultCamelCatalog();
version = catalog.getCatalogVersion();
}
gav = "%s:%s:%s".formatted(groupId, artifactId, version);
}
if (gav != null) {
plugin.put("dependency", gav);
}
plugins.put(name, plugin);
saveConfig(pluginConfig);
return 0;
}
}
|
PluginAdd
|
java
|
apache__kafka
|
generator/src/main/java/org/apache/kafka/message/MessageGenerator.java
|
{
"start": 1813,
"end": 8904
}
|
class ____ {
static final String JSON_SUFFIX = ".json";
static final String JSON_GLOB = "*" + JSON_SUFFIX;
static final String JAVA_SUFFIX = ".java";
static final String API_MESSAGE_TYPE_JAVA = "ApiMessageType.java";
static final String API_SCOPE_JAVA = "ApiScope.java";
static final String COORDINATOR_RECORD_TYPE_JAVA = "CoordinatorRecordType.java";
static final String COORDINATOR_RECORD_JSON_CONVERTERS_JAVA = "CoordinatorRecordJsonConverters.java";
static final String METADATA_RECORD_TYPE_JAVA = "MetadataRecordType.java";
static final String METADATA_JSON_CONVERTERS_JAVA = "MetadataJsonConverters.java";
static final String API_MESSAGE_CLASS = "org.apache.kafka.common.protocol.ApiMessage";
static final String MESSAGE_CLASS = "org.apache.kafka.common.protocol.Message";
static final String MESSAGE_UTIL_CLASS = "org.apache.kafka.common.protocol.MessageUtil";
static final String READABLE_CLASS = "org.apache.kafka.common.protocol.Readable";
static final String WRITABLE_CLASS = "org.apache.kafka.common.protocol.Writable";
static final String ARRAYS_CLASS = "java.util.Arrays";
static final String OBJECTS_CLASS = "java.util.Objects";
static final String LIST_CLASS = "java.util.List";
static final String ARRAYLIST_CLASS = "java.util.ArrayList";
static final String IMPLICIT_LINKED_HASH_COLLECTION_CLASS =
"org.apache.kafka.common.utils.ImplicitLinkedHashCollection";
static final String IMPLICIT_LINKED_HASH_MULTI_COLLECTION_CLASS =
"org.apache.kafka.common.utils.ImplicitLinkedHashMultiCollection";
static final String UNSUPPORTED_VERSION_EXCEPTION_CLASS =
"org.apache.kafka.common.errors.UnsupportedVersionException";
static final String ITERATOR_CLASS = "java.util.Iterator";
static final String ENUM_SET_CLASS = "java.util.EnumSet";
static final String TYPE_CLASS = "org.apache.kafka.common.protocol.types.Type";
static final String FIELD_CLASS = "org.apache.kafka.common.protocol.types.Field";
static final String SCHEMA_CLASS = "org.apache.kafka.common.protocol.types.Schema";
static final String NULLABLE_SCHEMA_CLASS = "org.apache.kafka.common.protocol.types.NullableSchema";
static final String ARRAYOF_CLASS = "org.apache.kafka.common.protocol.types.ArrayOf";
static final String COMPACT_ARRAYOF_CLASS = "org.apache.kafka.common.protocol.types.CompactArrayOf";
static final String BYTES_CLASS = "org.apache.kafka.common.utils.Bytes";
static final String UUID_CLASS = "org.apache.kafka.common.Uuid";
static final String BASE_RECORDS_CLASS = "org.apache.kafka.common.record.BaseRecords";
static final String MEMORY_RECORDS_CLASS = "org.apache.kafka.common.record.MemoryRecords";
static final String REQUEST_SUFFIX = "Request";
static final String RESPONSE_SUFFIX = "Response";
static final String BYTE_UTILS_CLASS = "org.apache.kafka.common.utils.ByteUtils";
static final String STANDARD_CHARSETS = "java.nio.charset.StandardCharsets";
static final String TAGGED_FIELDS_SECTION_CLASS = "org.apache.kafka.common.protocol.types.Field.TaggedFieldsSection";
static final String OBJECT_SERIALIZATION_CACHE_CLASS = "org.apache.kafka.common.protocol.ObjectSerializationCache";
static final String MESSAGE_SIZE_ACCUMULATOR_CLASS = "org.apache.kafka.common.protocol.MessageSizeAccumulator";
static final String RAW_TAGGED_FIELD_CLASS = "org.apache.kafka.common.protocol.types.RawTaggedField";
static final String RAW_TAGGED_FIELD_WRITER_CLASS = "org.apache.kafka.common.protocol.types.RawTaggedFieldWriter";
static final String TREE_MAP_CLASS = "java.util.TreeMap";
static final String BYTE_BUFFER_CLASS = "java.nio.ByteBuffer";
static final String NAVIGABLE_MAP_CLASS = "java.util.NavigableMap";
static final String MAP_ENTRY_CLASS = "java.util.Map.Entry";
static final String JSON_NODE_CLASS = "com.fasterxml.jackson.databind.JsonNode";
static final String OBJECT_NODE_CLASS = "com.fasterxml.jackson.databind.node.ObjectNode";
static final String JSON_NODE_FACTORY_CLASS = "com.fasterxml.jackson.databind.node.JsonNodeFactory";
static final String BOOLEAN_NODE_CLASS = "com.fasterxml.jackson.databind.node.BooleanNode";
static final String SHORT_NODE_CLASS = "com.fasterxml.jackson.databind.node.ShortNode";
static final String INT_NODE_CLASS = "com.fasterxml.jackson.databind.node.IntNode";
static final String LONG_NODE_CLASS = "com.fasterxml.jackson.databind.node.LongNode";
static final String TEXT_NODE_CLASS = "com.fasterxml.jackson.databind.node.TextNode";
static final String BINARY_NODE_CLASS = "com.fasterxml.jackson.databind.node.BinaryNode";
static final String NULL_NODE_CLASS = "com.fasterxml.jackson.databind.node.NullNode";
static final String ARRAY_NODE_CLASS = "com.fasterxml.jackson.databind.node.ArrayNode";
static final String DOUBLE_NODE_CLASS = "com.fasterxml.jackson.databind.node.DoubleNode";
static final long UNSIGNED_INT_MAX = 4294967295L;
static final int UNSIGNED_SHORT_MAX = 65535;
/**
* The Jackson serializer we use for JSON objects.
*/
public static final ObjectMapper JSON_SERDE;
static {
JSON_SERDE = new ObjectMapper();
JSON_SERDE.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
JSON_SERDE.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true);
JSON_SERDE.configure(DeserializationFeature.FAIL_ON_TRAILING_TOKENS, true);
JSON_SERDE.configure(JsonParser.Feature.ALLOW_COMMENTS, true);
JSON_SERDE.setDefaultPropertyInclusion(JsonInclude.Include.NON_EMPTY);
JSON_SERDE.registerModule(new Jdk8Module());
}
private static List<TypeClassGenerator> createTypeClassGenerators(String packageName,
List<String> types) {
if (types == null) return Collections.emptyList();
List<TypeClassGenerator> generators = new ArrayList<>();
for (String type : types) {
switch (type) {
case "ApiMessageTypeGenerator":
generators.add(new ApiMessageTypeGenerator(packageName));
break;
case "MetadataRecordTypeGenerator":
generators.add(new MetadataRecordTypeGenerator(packageName));
break;
case "MetadataJsonConvertersGenerator":
generators.add(new MetadataJsonConvertersGenerator(packageName));
break;
case "CoordinatorRecordTypeGenerator":
generators.add(new CoordinatorRecordTypeGenerator(packageName));
break;
case "CoordinatorRecordJsonConvertersGenerator":
generators.add(new CoordinatorRecordJsonConvertersGenerator(packageName));
break;
default:
throw new RuntimeException("Unknown type
|
MessageGenerator
|
java
|
spring-projects__spring-boot
|
module/spring-boot-health/src/test/java/org/springframework/boot/health/contributor/ReactiveHealthIndicatorTests.java
|
{
"start": 892,
"end": 1862
}
|
class ____ {
private final ReactiveHealthIndicator indicator = () -> Mono.just(Health.up().withDetail("spring", "boot").build());
@Test
void asHealthContributor() {
HealthIndicator adapted = this.indicator.asHealthContributor();
Health health = adapted.health(true);
assertThat(health).isNotNull();
assertThat(health.getDetails()).containsEntry("spring", "boot");
}
@Test
void getHealthWhenIncludeDetailsIsTrueReturnsHealthWithDetails() {
Health health = this.indicator.health(true).block();
assertThat(health).isNotNull();
assertThat(health.getStatus()).isEqualTo(Status.UP);
assertThat(health.getDetails()).containsEntry("spring", "boot");
}
@Test
void getHealthWhenIncludeDetailsIsFalseReturnsHealthWithoutDetails() {
Health health = this.indicator.health(false).block();
assertThat(health).isNotNull();
assertThat(health.getStatus()).isEqualTo(Status.UP);
assertThat(health.getDetails()).isEmpty();
}
}
|
ReactiveHealthIndicatorTests
|
java
|
quarkusio__quarkus
|
extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcConfig.java
|
{
"start": 1508,
"end": 2226
}
|
interface ____ {
/**
* Maximum number of cache entries.
* Set it to a positive value if the cache has to be enabled.
*/
@WithDefault("0")
int maxSize();
/**
* Maximum amount of time a given cache entry is valid for.
*/
@WithDefault("3M")
Duration timeToLive();
/**
* Clean up timer interval.
* If this property is set then a timer will check and remove the stale entries periodically.
*/
Optional<Duration> cleanUpTimerInterval();
}
static OidcTenantConfig getDefaultTenant(OidcConfig config) {
return config.namedTenants().get(DEFAULT_TENANT_KEY);
}
}
|
TokenCache
|
java
|
apache__camel
|
components/camel-snmp/src/main/java/org/apache/camel/component/snmp/SnmpProducer.java
|
{
"start": 1657,
"end": 6932
}
|
class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(SnmpProducer.class);
private SnmpEndpoint endpoint;
private Address targetAddress;
private USM usm;
private Target target;
private SnmpActionType actionType;
private PDU pdu;
public SnmpProducer(SnmpEndpoint endpoint, SnmpActionType actionType) {
super(endpoint);
this.endpoint = endpoint;
this.actionType = actionType;
}
@Override
protected void doStart() throws Exception {
super.doStart();
this.targetAddress = GenericAddress.parse(this.endpoint.getServerAddress());
LOG.debug("targetAddress: {}", targetAddress);
this.usm = SnmpHelper.createAndSetUSM(endpoint);
this.pdu = SnmpHelper.createPDU(endpoint);
this.target = SnmpHelper.createTarget(endpoint);
// in here,only POLL do set the oids
if (this.actionType == SnmpActionType.POLL) {
for (OID oid : this.endpoint.getOids()) {
this.pdu.add(new VariableBinding(oid));
}
}
this.pdu.setErrorIndex(0);
this.pdu.setErrorStatus(0);
if (endpoint.getSnmpVersion() > SnmpConstants.version1) {
this.pdu.setMaxRepetitions(0);
}
// support POLL and GET_NEXT
if (this.actionType == SnmpActionType.GET_NEXT) {
this.pdu.setType(PDU.GETNEXT);
} else {
this.pdu.setType(PDU.GET);
}
}
@Override
protected void doStop() throws Exception {
super.doStop();
try {
if (this.usm != null) {
SecurityModels.getInstance().removeSecurityModel(new Integer32(this.usm.getID()));
}
} finally {
this.targetAddress = null;
this.usm = null;
this.target = null;
this.pdu = null;
}
}
@Override
public void process(final Exchange exchange) throws Exception {
// load connection data only if the endpoint is enabled
Snmp snmp = null;
TransportMapping<? extends Address> transport = null;
try {
LOG.debug("Starting SNMP producer on {}", this.endpoint.getServerAddress());
// either tcp or udp
if ("tcp".equals(this.endpoint.getProtocol())) {
transport = new DefaultTcpTransportMapping();
} else if ("udp".equals(this.endpoint.getProtocol())) {
transport = new DefaultUdpTransportMapping();
} else {
throw new IllegalArgumentException("Unknown protocol: " + this.endpoint.getProtocol());
}
snmp = new Snmp(transport);
LOG.debug("Snmp: i am sending");
snmp.listen();
if (this.actionType == SnmpActionType.GET_NEXT) {
// snmp walk
List<SnmpMessage> smLst = new ArrayList<>();
for (OID oid : this.endpoint.getOids()) {
this.pdu.clear();
this.pdu.add(new VariableBinding(oid));
boolean matched = true;
while (matched) {
ResponseEvent responseEvent = snmp.send(this.pdu, this.target);
if (responseEvent == null || responseEvent.getResponse() == null) {
break;
}
PDU response = responseEvent.getResponse();
String nextOid = null;
List<? extends VariableBinding> variableBindings = response.getVariableBindings();
for (int i = 0; i < variableBindings.size(); i++) {
VariableBinding variableBinding = variableBindings.get(i);
nextOid = variableBinding.getOid().toDottedString();
if (!nextOid.startsWith(oid.toDottedString())) {
matched = false;
break;
}
}
if (!matched) {
break;
}
this.pdu.clear();
pdu.add(new VariableBinding(new OID(nextOid)));
smLst.add(new SnmpMessage(getEndpoint().getCamelContext(), response));
}
}
exchange.getIn().setBody(smLst);
} else {
// snmp get
ResponseEvent responseEvent = snmp.send(this.pdu, this.target);
LOG.debug("Snmp: sended");
if (responseEvent.getResponse() != null) {
exchange.getIn().setBody(new SnmpMessage(getEndpoint().getCamelContext(), responseEvent.getResponse()));
} else {
throw new TimeoutException("SNMP Producer Timeout");
}
}
} finally {
try {
transport.close();
} catch (Exception e) {
}
try {
snmp.close();
} catch (Exception e) {
}
}
} //end process
}
|
SnmpProducer
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/sql/AsyncVectorSearchITCase.java
|
{
"start": 1974,
"end": 11556
}
|
class ____ extends StreamingWithStateTestBase {
public AsyncVectorSearchITCase(StateBackendMode state) {
super(state);
}
private final List<Row> data =
Arrays.asList(
Row.of(1L, new Float[] {5f, 12f, 13f}),
Row.of(2L, new Float[] {11f, 60f, 61f}),
Row.of(3L, new Float[] {8f, 15f, 17f}));
private final List<Row> nullableData =
Arrays.asList(Row.of(1L, new Float[] {5f, 12f, 13f}), Row.of(4L, null));
@BeforeEach
public void before() {
super.before();
createTable("src", data);
createTable("nullableSrc", nullableData);
createTable("vector", data);
}
@TestTemplate
void testSimple() {
List<Row> actual =
CollectionUtil.iteratorToList(
tEnv().executeSql(
"SELECT * FROM src, LATERAL TABLE(VECTOR_SEARCH(TABLE vector, DESCRIPTOR(`vector`), src.vector, 2))")
.collect());
assertThatList(actual)
.containsExactlyInAnyOrder(
Row.of(
1L,
new Float[] {5.0f, 12.0f, 13.0f},
1L,
new Float[] {5.0f, 12.0f, 13.0f},
1.0),
Row.of(
1L,
new Float[] {5.0f, 12.0f, 13.0f},
3L,
new Float[] {8f, 15f, 17f},
0.9977375565610862),
Row.of(
2L,
new Float[] {11f, 60f, 61f},
2L,
new Float[] {11f, 60f, 61f},
1.0),
Row.of(
2L,
new Float[] {11f, 60f, 61f},
1L,
new Float[] {5.0f, 12.0f, 13.0f},
0.9886506935687265),
Row.of(
3L,
new Float[] {8f, 15f, 17f},
3L,
new Float[] {8f, 15f, 17f},
1.0000000000000002),
Row.of(
3L,
new Float[] {8f, 15f, 17f},
1L,
new Float[] {5.0f, 12.0f, 13.0f},
0.9977375565610862));
}
@TestTemplate
void testLeftLateralJoin() {
List<Row> actual =
CollectionUtil.iteratorToList(
tEnv().executeSql(
"SELECT * FROM nullableSrc LEFT JOIN LATERAL TABLE(VECTOR_SEARCH(TABLE vector, DESCRIPTOR(`vector`), nullableSrc.vector, 2)) ON TRUE")
.collect());
assertThatList(actual)
.containsExactlyInAnyOrder(
Row.of(
1L,
new Float[] {5.0f, 12.0f, 13.0f},
1L,
new Float[] {5.0f, 12.0f, 13.0f},
1.0),
Row.of(
1L,
new Float[] {5.0f, 12.0f, 13.0f},
3L,
new Float[] {8f, 15f, 17f},
0.9977375565610862),
Row.of(4L, null, null, null, null));
}
@TestTemplate
void testTimeout() {
tEnv().getConfig()
.set(
ExecutionConfigOptions.TABLE_EXEC_ASYNC_VECTOR_SEARCH_TIMEOUT,
Duration.ofMillis(100));
assertThatThrownBy(
() ->
CollectionUtil.iteratorToList(
tEnv().executeSql(
"SELECT * FROM nullableSrc LEFT JOIN LATERAL TABLE(VECTOR_SEARCH(TABLE vector, DESCRIPTOR(`vector`), nullableSrc.vector, 2)) ON TRUE")
.collect()))
.satisfies(
FlinkAssertions.anyCauseMatches(
TimeoutException.class, "Async function call has timed out."));
}
@TestTemplate
void testConstantValue() {
List<Row> actual =
CollectionUtil.iteratorToList(
tEnv().executeSql(
"SELECT * FROM TABLE(VECTOR_SEARCH(TABLE vector, DESCRIPTOR(`vector`), ARRAY[5, 12, 13], 2))")
.collect());
assertThat(actual)
.containsExactlyInAnyOrder(
Row.of(1L, new Float[] {5.0f, 12.0f, 13.0f}, 1.0),
Row.of(3L, new Float[] {8f, 15f, 17f}, 0.9977375565610862));
}
@TestTemplate
void testVectorSearchWithCalc() {
assertThatThrownBy(
() ->
tEnv().executeSql(
"SELECT * FROM nullableSrc\n "
+ "LEFT JOIN LATERAL TABLE(VECTOR_SEARCH((SELECT id+1, vector FROM vector), DESCRIPTOR(`vector`), nullableSrc.vector, 2)) ON TRUE"))
.satisfies(
FlinkAssertions.anyCauseMatches(
UnsupportedOperationException.class,
"Don't support calc on VECTOR_SEARCH node now."));
}
@TestTemplate
void testRuntimeConfig() {
assertThatThrownBy(
() ->
CollectionUtil.iteratorToList(
tEnv().executeSql(
"SELECT * FROM nullableSrc LEFT JOIN LATERAL TABLE(VECTOR_SEARCH(TABLE vector, DESCRIPTOR(`vector`), nullableSrc.vector, 2, MAP['timeout', '100ms'])) ON TRUE")
.collect()))
.satisfies(
FlinkAssertions.anyCauseMatches(
TimeoutException.class, "Async function call has timed out."));
}
@Parameters(name = "backend = {0}, objectReuse = {1}, asyncOutputMode = {2}")
public static Collection<Object[]> parameters() {
return Arrays.asList(
new Object[][] {
{
StreamingWithStateTestBase.HEAP_BACKEND(),
true,
ExecutionConfigOptions.AsyncOutputMode.ALLOW_UNORDERED
},
{
StreamingWithStateTestBase.HEAP_BACKEND(),
true,
ExecutionConfigOptions.AsyncOutputMode.ORDERED
},
{
StreamingWithStateTestBase.HEAP_BACKEND(),
false,
ExecutionConfigOptions.AsyncOutputMode.ALLOW_UNORDERED
},
{
StreamingWithStateTestBase.HEAP_BACKEND(),
false,
ExecutionConfigOptions.AsyncOutputMode.ORDERED
},
{
StreamingWithStateTestBase.ROCKSDB_BACKEND(),
true,
ExecutionConfigOptions.AsyncOutputMode.ALLOW_UNORDERED
},
{
StreamingWithStateTestBase.ROCKSDB_BACKEND(),
true,
ExecutionConfigOptions.AsyncOutputMode.ORDERED
},
{
StreamingWithStateTestBase.ROCKSDB_BACKEND(),
false,
ExecutionConfigOptions.AsyncOutputMode.ALLOW_UNORDERED
},
{
StreamingWithStateTestBase.ROCKSDB_BACKEND(),
false,
ExecutionConfigOptions.AsyncOutputMode.ORDERED
}
});
}
private void createTable(String tableName, List<Row> data) {
String dataId = TestValuesTableFactory.registerData(data);
tEnv().executeSql(
String.format(
"CREATE TABLE `%s`(\n"
+ " id BIGINT,\n"
+ " vector ARRAY<FLOAT>\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'enable-vector-search' = 'true',\n"
+ " 'data-id' = '%s',\n"
+ " 'async' = 'true',\n"
+ " 'latency' = '1000'"
+ ")",
tableName, dataId));
}
}
|
AsyncVectorSearchITCase
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/tuple/ImmutableTripleTest.java
|
{
"start": 1541,
"end": 8944
}
|
class ____ extends AbstractLangTest {
@Test
void testBasic() {
final ImmutableTriple<Integer, String, Boolean> triple = new ImmutableTriple<>(0, "foo", Boolean.TRUE);
assertEquals(0, triple.left.intValue());
assertEquals(0, triple.getLeft().intValue());
assertEquals("foo", triple.middle);
assertEquals("foo", triple.getMiddle());
assertEquals(Boolean.TRUE, triple.right);
assertEquals(Boolean.TRUE, triple.getRight());
final ImmutableTriple<Object, String, Integer> triple2 = new ImmutableTriple<>(null, "bar", 42);
assertNull(triple2.left);
assertNull(triple2.getLeft());
assertEquals("bar", triple2.middle);
assertEquals("bar", triple2.getMiddle());
assertEquals(Integer.valueOf(42), triple2.right);
assertEquals(Integer.valueOf(42), triple2.getRight());
}
@Test
void testEmptyArrayGenerics() {
final ImmutableTriple<Integer, String, Boolean>[] empty = ImmutableTriple.emptyArray();
assertEquals(0, empty.length);
}
@Test
void testEmptyArrayLength() {
@SuppressWarnings("unchecked")
final ImmutableTriple<Integer, String, Boolean>[] empty = (ImmutableTriple<Integer, String, Boolean>[]) ImmutableTriple.EMPTY_ARRAY;
assertEquals(0, empty.length);
}
@Test
void testEquals() {
assertEquals(ImmutableTriple.of(null, "foo", 42), ImmutableTriple.of(null, "foo", 42));
assertNotEquals(ImmutableTriple.of("foo", 0, Boolean.TRUE), ImmutableTriple.of("foo", null, null));
assertNotEquals(ImmutableTriple.of("foo", "bar", "baz"), ImmutableTriple.of("xyz", "bar", "blo"));
final ImmutableTriple<String, String, String> p = ImmutableTriple.of("foo", "bar", "baz");
assertEquals(p, p);
assertNotEquals(p, new Object());
}
@Test
void testHashCode() {
assertEquals(ImmutableTriple.of(null, "foo", Boolean.TRUE).hashCode(), ImmutableTriple.of(null, "foo", Boolean.TRUE).hashCode());
}
@Test
void testNullTripleEquals() {
assertEquals(ImmutableTriple.nullTriple(), ImmutableTriple.nullTriple());
}
@Test
void testNullTripleLeft() {
assertNull(ImmutableTriple.nullTriple().getLeft());
}
@Test
void testNullTripleMiddle() {
assertNull(ImmutableTriple.nullTriple().getMiddle());
}
@Test
void testNullTripleRight() {
assertNull(ImmutableTriple.nullTriple().getRight());
}
@Test
void testNullTripleSame() {
assertSame(ImmutableTriple.nullTriple(), ImmutableTriple.nullTriple());
}
@Test
void testNullTripleTyped() {
// No compiler warnings
// How do we assert that?
final ImmutableTriple<String, String, String> triple = ImmutableTriple.nullTriple();
assertNotNull(triple);
}
@Test
void testOf() {
assertSame(ImmutableTriple.nullTriple(), ImmutableTriple.of(null, null, null));
assertEquals(0, ImmutableTriple.of(0, null, null).getLeft());
assertEquals(0, ImmutableTriple.of(null, 0, null).getMiddle());
assertEquals(0, ImmutableTriple.of(null, null, 0).getRight());
final ImmutableTriple<Integer, String, Boolean> triple = ImmutableTriple.of(0, "foo", Boolean.FALSE);
assertEquals(0, triple.left.intValue());
assertEquals(0, triple.getLeft().intValue());
assertEquals("foo", triple.middle);
assertEquals("foo", triple.getMiddle());
assertEquals(Boolean.FALSE, triple.right);
assertEquals(Boolean.FALSE, triple.getRight());
final ImmutableTriple<Object, String, Boolean> triple2 = ImmutableTriple.of(null, "bar", Boolean.TRUE);
assertNull(triple2.left);
assertNull(triple2.getLeft());
assertEquals("bar", triple2.middle);
assertEquals("bar", triple2.getMiddle());
assertEquals(Boolean.TRUE, triple2.right);
assertEquals(Boolean.TRUE, triple2.getRight());
}
@Test
void testOfNonNull() {
assertNullPointerException(() -> ImmutableTriple.ofNonNull(null, null, null));
assertNullPointerException(() -> ImmutableTriple.ofNonNull(null, null, "z"));
assertNullPointerException(() -> ImmutableTriple.ofNonNull(null, "y", "z"));
assertNullPointerException(() -> ImmutableTriple.ofNonNull("x", null, null));
assertNullPointerException(() -> ImmutableTriple.ofNonNull("x", "y", null));
final ImmutableTriple<String, String, String> pair = ImmutableTriple.ofNonNull("x", "y", "z");
assertEquals("x", pair.left);
assertEquals("y", pair.middle);
assertEquals("z", pair.right);
}
@Test
void testSerialization() throws Exception {
final ImmutableTriple<Integer, String, Boolean> origTriple = ImmutableTriple.of(0, "foo", Boolean.TRUE);
final ImmutableTriple<Integer, String, Boolean> deserializedTriple = SerializationUtils.roundtrip(origTriple);
assertEquals(origTriple, deserializedTriple);
assertEquals(origTriple.hashCode(), deserializedTriple.hashCode());
}
@Test
void testToString() {
assertEquals("(null,null,null)", ImmutableTriple.of(null, null, null).toString());
assertEquals("(null,two,null)", ImmutableTriple.of(null, "two", null).toString());
assertEquals("(one,null,null)", ImmutableTriple.of("one", null, null).toString());
assertEquals("(one,two,null)", ImmutableTriple.of("one", "two", null).toString());
assertEquals("(null,two,three)", ImmutableTriple.of(null, "two", "three").toString());
assertEquals("(one,null,three)", ImmutableTriple.of("one", null, "three").toString());
assertEquals("(one,two,three)", MutableTriple.of("one", "two", "three").toString());
}
@Test
void testUseAsKeyOfHashMap() {
final HashMap<ImmutableTriple<Object, Object, Object>, String> map = new HashMap<>();
final Object o1 = new Object();
final Object o2 = new Object();
final Object o3 = new Object();
final ImmutableTriple<Object, Object, Object> key1 = ImmutableTriple.of(o1, o2, o3);
final String value1 = "a1";
map.put(key1, value1);
assertEquals(value1, map.get(key1));
assertEquals(value1, map.get(ImmutableTriple.of(o1, o2, o3)));
}
@Test
void testUseAsKeyOfTreeMap() {
final TreeMap<ImmutableTriple<Integer, Integer, Integer>, String> map = new TreeMap<>();
map.put(ImmutableTriple.of(0, 1, 2), "012");
map.put(ImmutableTriple.of(0, 1, 1), "011");
map.put(ImmutableTriple.of(0, 0, 1), "001");
final ArrayList<ImmutableTriple<Integer, Integer, Integer>> expected = new ArrayList<>();
expected.add(ImmutableTriple.of(0, 0, 1));
expected.add(ImmutableTriple.of(0, 1, 1));
expected.add(ImmutableTriple.of(0, 1, 2));
final Iterator<Entry<ImmutableTriple<Integer, Integer, Integer>, String>> it = map.entrySet().iterator();
for (final ImmutableTriple<Integer, Integer, Integer> item : expected) {
final Entry<ImmutableTriple<Integer, Integer, Integer>, String> entry = it.next();
assertEquals(item, entry.getKey());
assertEquals(item.getLeft() + "" + item.getMiddle() + "" + item.getRight(), entry.getValue());
}
}
}
|
ImmutableTripleTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ClassNewInstanceTest.java
|
{
"start": 3210,
"end": 3645
}
|
class ____ {
void f() {
try {
getClass().newInstance();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
}
""")
.addOutputLines(
"out/Test.java",
"""
|
Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java
|
{
"start": 427,
"end": 1769
}
|
class ____ extends ElasticsearchInternalModel {
// Ensure that inference endpoints based on ELSER don't go past its truncation window of 512 tokens
public static final int ELSER_MAX_WINDOW_SIZE = 300;
public ElserInternalModel(
String inferenceEntityId,
TaskType taskType,
String service,
ElserInternalServiceSettings serviceSettings,
ElserMlNodeTaskSettings taskSettings,
ChunkingSettings chunkingSettings
) {
super(inferenceEntityId, taskType, service, serviceSettings, taskSettings, chunkingSettings);
if (chunkingSettings != null && chunkingSettings.maxChunkSize() != null) {
if (chunkingSettings.maxChunkSize() > ELSER_MAX_WINDOW_SIZE) throw new IllegalArgumentException(
"ELSER based models do not support chunk sizes larger than "
+ ELSER_MAX_WINDOW_SIZE
+ ". Requested chunk size: "
+ chunkingSettings.maxChunkSize()
);
}
}
@Override
public ElserInternalServiceSettings getServiceSettings() {
return (ElserInternalServiceSettings) super.getServiceSettings();
}
@Override
public ElserMlNodeTaskSettings getTaskSettings() {
return (ElserMlNodeTaskSettings) super.getTaskSettings();
}
}
|
ElserInternalModel
|
java
|
google__dagger
|
javatests/artifacts/dagger/build-tests/src/test/java/buildtests/TransitiveSubcomponentQualifierTest.java
|
{
"start": 14687,
"end": 15318
}
|
class ____ implements BindingGraphPlugin {",
" @Override",
" public void visitGraph(",
" BindingGraph bindingGraph, DiagnosticReporter diagnosticReporter) {",
" bindingGraph.entryPointEdges().stream()",
" .map(DependencyEdge::dependencyRequest)",
" .map(DependencyRequest::key)",
" .forEach(key -> System.out.println(\"ENTRY_POINT_REQUEST: \" + key));",
" }",
"}");
return GradleRunner.create().withArguments("--stacktrace", "build").withProjectDir(projectDir);
}
}
|
TestBindingGraphPlugin
|
java
|
quarkusio__quarkus
|
extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/pathparams/HttpPathParamLimitWithReactiveRoutesTest.java
|
{
"start": 498,
"end": 2937
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("test-logging.properties")
.overrideConfigKey("quarkus.micrometer.binder-enabled-default", "false")
.overrideConfigKey("quarkus.micrometer.binder.http-client.enabled", "true")
.overrideConfigKey("quarkus.micrometer.binder.http-server.enabled", "true")
.overrideConfigKey("quarkus.micrometer.binder.vertx.enabled", "true")
.overrideConfigKey("quarkus.redis.devservices.enabled", "false")
.withApplicationRoot((jar) -> jar
.addClasses(Util.class,
Resource.class));
@Inject
MeterRegistry registry;
public static final int COUNT = 101;
public static final int ARITY_LIMIT = 100;
@Test
void testWithReactiveRouteOK() throws InterruptedException {
registry.clear();
// Verify OK response
for (int i = 0; i < COUNT; i++) {
RestAssured.get("/rr").then().statusCode(200);
RestAssured.get("/rr/foo-" + i).then().statusCode(200);
}
// Verify metrics
Util.waitForMeters(registry.find("http.server.requests").timers(), COUNT);
Assertions.assertEquals(COUNT, registry.find("http.server.requests")
.tag("uri", "/rr").timers().iterator().next().count());
Assertions.assertEquals(COUNT, registry.find("http.server.requests")
.tag("uri", "/rr/{message}").timers().iterator().next().count());
// Verify 405 responses
for (int i = 0; i < COUNT; i++) {
RestAssured.delete("/rr").then().statusCode(405);
RestAssured.patch("/rr/foo-" + i).then().statusCode(501); // Not totally sure why reactive routes return a 501, it's not necessarily wrong, just different.
}
Util.waitForMeters(registry.find("http.server.requests").timers(), COUNT * 2);
Assertions.assertEquals(COUNT, registry.find("http.server.requests")
.tag("uri", "/rr").tag("method", "DELETE").timers().iterator().next().count());
Assertions.assertEquals(ARITY_LIMIT - 2, registry.find("http.server.requests")
.tag("method", "PATCH").timers().size()); // -2 because of the two other uri: /rr and /rr/{message}.
}
@Singleton
public static
|
HttpPathParamLimitWithReactiveRoutesTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java
|
{
"start": 1090,
"end": 1218
}
|
class ____ up and configures an ILM policy and index
* template for the ILM history indices (ilm-history-N-00000M).
*/
public
|
sets
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-rest-jaxrs/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/support/jaxrs/filter/WriterInterceptorAdapter.java
|
{
"start": 1467,
"end": 1817
}
|
class ____ implements RestExtensionAdapter<WriterInterceptor> {
@Override
public boolean accept(Object extension) {
return extension instanceof WriterInterceptor;
}
@Override
public RestFilter adapt(WriterInterceptor extension) {
return new Filter(extension);
}
private static final
|
WriterInterceptorAdapter
|
java
|
elastic__elasticsearch
|
modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java
|
{
"start": 3686,
"end": 8562
}
|
class ____ extends ESMockAPIBasedRepositoryIntegTestCase {
protected static final String DEFAULT_ACCOUNT_NAME = "account";
protected static final Predicate<String> LIST_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+\\?.+").asMatchPredicate();
protected static final Predicate<String> GET_BLOB_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+/.+").asMatchPredicate();
private static final AtomicInteger MAX_CONNECTION_SETTING = new AtomicInteger(-1);
private static final AtomicInteger EVENT_LOOP_THREAD_COUNT_SETTING = new AtomicInteger(-1);
@Override
protected String repositoryType() {
return AzureRepository.TYPE;
}
@Override
protected Settings repositorySettings(String repoName) {
Settings.Builder settingsBuilder = Settings.builder()
.put(super.repositorySettings(repoName))
.put(AzureRepository.Repository.MAX_SINGLE_PART_UPLOAD_SIZE_SETTING.getKey(), ByteSizeValue.of(1, ByteSizeUnit.MB))
.put(AzureRepository.Repository.CONTAINER_SETTING.getKey(), "container")
.put(AzureStorageSettings.ACCOUNT_SETTING.getKey(), "test")
.put(AzureRepository.Repository.DELETION_BATCH_SIZE_SETTING.getKey(), randomIntBetween(5, 256))
.put(AzureRepository.Repository.MAX_CONCURRENT_BATCH_DELETES_SETTING.getKey(), randomIntBetween(1, 10));
if (randomBoolean()) {
settingsBuilder.put(AzureRepository.Repository.BASE_PATH_SETTING.getKey(), randomFrom("test", "test/1"));
}
return settingsBuilder.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(TestAzureRepositoryPlugin.class, TestTelemetryPlugin.class);
}
@Override
protected Map<String, HttpHandler> createHttpHandlers() {
return Collections.singletonMap(
"/" + DEFAULT_ACCOUNT_NAME,
new AzureHTTPStatsCollectorHandler(new AzureBlobStoreHttpHandler(DEFAULT_ACCOUNT_NAME, "container"))
);
}
@Override
protected HttpHandler createErroneousHttpHandler(final HttpHandler delegate) {
return new AzureHTTPStatsCollectorHandler(new AzureErroneousHttpHandler(delegate, AzureStorageSettings.DEFAULT_MAX_RETRIES));
}
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
final String key = Base64.getEncoder().encodeToString(randomAlphaOfLength(14).getBytes(StandardCharsets.UTF_8));
final MockSecureSettings secureSettings = new MockSecureSettings();
String accountName = DEFAULT_ACCOUNT_NAME;
secureSettings.setString(AzureStorageSettings.ACCOUNT_SETTING.getConcreteSettingForNamespace("test").getKey(), accountName);
if (randomBoolean()) {
secureSettings.setString(AzureStorageSettings.KEY_SETTING.getConcreteSettingForNamespace("test").getKey(), key);
} else {
// The SDK expects a valid SAS TOKEN
secureSettings.setString(
AzureStorageSettings.SAS_TOKEN_SETTING.getConcreteSettingForNamespace("test").getKey(),
"se=2021-07-20T13%3A21Z&sp=rwdl&sv=2018-11-09&sr=c&sig=random"
);
}
// see com.azure.storage.blob.BlobUrlParts.parseIpUrl
final String endpoint = "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=" + httpServerUrl() + "/" + accountName;
// The first node configured sets these for all nodes
MAX_CONNECTION_SETTING.compareAndSet(-1, randomIntBetween(10, 30));
EVENT_LOOP_THREAD_COUNT_SETTING.compareAndSet(-1, randomIntBetween(1, 3));
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
.put(AzureStorageSettings.ENDPOINT_SUFFIX_SETTING.getConcreteSettingForNamespace("test").getKey(), endpoint)
.put(AzureClientProvider.EVENT_LOOP_THREAD_COUNT.getKey(), EVENT_LOOP_THREAD_COUNT_SETTING.get())
.put(AzureClientProvider.MAX_OPEN_CONNECTIONS.getKey(), MAX_CONNECTION_SETTING.get())
.put(AzureClientProvider.MAX_IDLE_TIME.getKey(), TimeValue.timeValueSeconds(randomIntBetween(10, 30)))
.put(AzureClientProvider.OPEN_CONNECTION_TIMEOUT.getKey(), TimeValue.timeValueSeconds(randomIntBetween(10, 30)))
.setSecureSettings(secureSettings)
.build();
}
protected TestTelemetryPlugin getTelemetryPlugin(String dataNodeName) {
return internalCluster().getInstance(PluginsService.class, dataNodeName)
.filterPlugins(TestTelemetryPlugin.class)
.findFirst()
.orElseThrow();
}
/**
* AzureRepositoryPlugin that allows to set low values for the Azure's client retry policy
* and for BlobRequestOptions#getSingleBlobPutThresholdInBytes().
*/
public static
|
AzureBlobStoreRepositoryTests
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
|
{
"start": 1427,
"end": 1600
}
|
interface ____ utilities for processing checksums for
* DFS data transfers.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving
public
|
and
|
java
|
apache__camel
|
components/camel-lumberjack/src/generated/java/org/apache/camel/component/lumberjack/LumberjackEndpointConfigurer.java
|
{
"start": 737,
"end": 3235
}
|
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
LumberjackEndpoint target = (LumberjackEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "sslcontextparameters":
case "sslContextParameters": target.setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "sslcontextparameters":
case "sslContextParameters": return org.apache.camel.support.jsse.SSLContextParameters.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
LumberjackEndpoint target = (LumberjackEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "sslcontextparameters":
case "sslContextParameters": return target.getSslContextParameters();
default: return null;
}
}
}
|
LumberjackEndpointConfigurer
|
java
|
quarkusio__quarkus
|
extensions/smallrye-reactive-messaging-kafka/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/kafka/deployment/ReflectiveClassForValueSerializerPayloadTest.java
|
{
"start": 2788,
"end": 2886
}
|
class ____ extends ObjectMapperSerializer<JacksonDto> {
}
private static
|
JacksonDtoSerializer
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/function/json/JsonArrayAggregateTest.java
|
{
"start": 974,
"end": 1872
}
|
class ____ {
@Test
public void testSimple(SessionFactoryScope scope) {
scope.inSession( em -> {
//tag::hql-json-arrayagg-example[]
em.createQuery( "select json_arrayagg(e.theString) from EntityOfBasics e" ).getResultList();
//end::hql-json-arrayagg-example[]
} );
}
@Test
public void testNull(SessionFactoryScope scope) {
scope.inSession( em -> {
//tag::hql-json-arrayagg-null-example[]
em.createQuery( "select json_arrayagg(e.theString null on null) from EntityOfBasics e" ).getResultList();
//end::hql-json-arrayagg-null-example[]
} );
}
@Test
public void testOrderBy(SessionFactoryScope scope) {
scope.inSession( em -> {
//tag::hql-json-arrayagg-order-by-example[]
em.createQuery( "select json_arrayagg(e.theString order by e.id) from EntityOfBasics e" ).getResultList();
//end::hql-json-arrayagg-order-by-example[]
} );
}
}
|
JsonArrayAggregateTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsFileStatus.java
|
{
"start": 2035,
"end": 14984
}
|
class ____ {
// Changing default values will affect cases where values are not
// specified. Be careful!
private long length = 0L;
private boolean isdir = false;
private int replication = 0;
private long blocksize = 0L;
private long mtime = 0L;
private long atime = 0L;
private FsPermission permission = null;
private EnumSet<Flags> flags = EnumSet.noneOf(Flags.class);
private String owner = null;
private String group = null;
private byte[] symlink = null;
private byte[] path = EMPTY_NAME;
private long fileId = -1L;
private int childrenNum = 0;
private FileEncryptionInfo feInfo = null;
private byte storagePolicy =
HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED;
private ErasureCodingPolicy ecPolicy = null;
private LocatedBlocks locations = null;
/**
* Set the length of the entity (default = 0).
* @param length Entity length
* @return This Builder instance
*/
public Builder length(long length) {
this.length = length;
return this;
}
/**
* Set the isDir flag for the entity (default = false).
* @param isdir True if the referent is a directory.
* @return This Builder instance
*/
public Builder isdir(boolean isdir) {
this.isdir = isdir;
return this;
}
/**
* Set the replication of this entity (default = 0).
* @param replication Number of replicas
* @return This Builder instance
*/
public Builder replication(int replication) {
this.replication = replication;
return this;
}
/**
* Set the blocksize of this entity (default = 0).
* @param blocksize Target, default blocksize
* @return This Builder instance
*/
public Builder blocksize(long blocksize) {
this.blocksize = blocksize;
return this;
}
/**
* Set the modification time of this entity (default = 0).
* @param mtime Last modified time
* @return This Builder instance
*/
public Builder mtime(long mtime) {
this.mtime = mtime;
return this;
}
/**
* Set the access time of this entity (default = 0).
* @param atime Last accessed time
* @return This Builder instance
*/
public Builder atime(long atime) {
this.atime = atime;
return this;
}
/**
* Set the permission mask of this entity (default = null).
* @param permission Permission bitmask
* @return This Builder instance
*/
public Builder perm(FsPermission permission) {
this.permission = permission;
return this;
}
/**
* Set {@link Flags} for this entity
* (default = {@link EnumSet#noneOf(Class)}).
* @param flags Flags
* @return This builder instance
*/
public Builder flags(EnumSet<Flags> flags) {
this.flags = flags;
return this;
}
/**
* Set the owner for this entity (default = null).
* @param owner Owner
* @return This Builder instance
*/
public Builder owner(String owner) {
this.owner = owner;
return this;
}
/**
* Set the group for this entity (default = null).
* @param group Group
* @return This Builder instance
*/
public Builder group(String group) {
this.group = group;
return this;
}
/**
* Set symlink bytes for this entity (default = null).
* @param symlink Symlink bytes (see
* {@link DFSUtilClient#bytes2String(byte[])})
* @return This Builder instance
*/
public Builder symlink(byte[] symlink) {
this.symlink = null == symlink
? null
: Arrays.copyOf(symlink, symlink.length);
return this;
}
/**
* Set path bytes for this entity (default = {@link #EMPTY_NAME}).
* @param path Path bytes (see {@link #makeQualified(URI, Path)}).
* @return This Builder instance
*/
public Builder path(byte[] path) {
this.path = null == path
? null
: Arrays.copyOf(path, path.length);
return this;
}
/**
* Set the fileId for this entity (default = -1).
* @param fileId FileId
* @return This Builder instance
*/
public Builder fileId(long fileId) {
this.fileId = fileId;
return this;
}
/**
* Set the number of children for this entity (default = 0).
* @param childrenNum Number of children
* @return This Builder instance
*/
public Builder children(int childrenNum) {
this.childrenNum = childrenNum;
return this;
}
/**
* Set the encryption info for this entity (default = null).
* @param feInfo Encryption info
* @return This Builder instance
*/
public Builder feInfo(FileEncryptionInfo feInfo) {
this.feInfo = feInfo;
return this;
}
/**
* Set the storage policy for this entity
* (default = {@link HdfsConstants#BLOCK_STORAGE_POLICY_ID_UNSPECIFIED}).
* @param storagePolicy Storage policy
* @return This Builder instance
*/
public Builder storagePolicy(byte storagePolicy) {
this.storagePolicy = storagePolicy;
return this;
}
/**
* Set the erasure coding policy for this entity (default = null).
* @param ecPolicy Erasure coding policy
* @return This Builder instance
*/
public Builder ecPolicy(ErasureCodingPolicy ecPolicy) {
this.ecPolicy = ecPolicy;
return this;
}
/**
* Set the block locations for this entity (default = null).
* @param locations HDFS locations
* (see {@link HdfsLocatedFileStatus#makeQualifiedLocated(URI, Path)})
* @return This Builder instance
*/
public Builder locations(LocatedBlocks locations) {
this.locations = locations;
return this;
}
/**
* @return An {@link HdfsFileStatus} instance from these parameters.
*/
public HdfsFileStatus build() {
if (null == locations && !isdir && null == symlink) {
return new HdfsNamedFileStatus(length, isdir, replication, blocksize,
mtime, atime, permission, flags, owner, group, symlink, path,
fileId, childrenNum, feInfo, storagePolicy, ecPolicy);
}
return new HdfsLocatedFileStatus(length, isdir, replication, blocksize,
mtime, atime, permission, flags, owner, group, symlink, path,
fileId, childrenNum, feInfo, storagePolicy, ecPolicy, locations);
}
}
///////////////////
// HDFS-specific //
///////////////////
/**
* Inode ID for this entity, if a file.
* @return inode ID.
*/
long getFileId();
/**
* Get metadata for encryption, if present.
* @return the {@link FileEncryptionInfo} for this stream, or null if not
* encrypted.
*/
FileEncryptionInfo getFileEncryptionInfo();
/**
* Check if the local name is empty.
* @return true if the name is empty
*/
default boolean isEmptyLocalName() {
return getLocalNameInBytes().length == 0;
}
/**
* Get the string representation of the local name.
* @return the local name in string
*/
default String getLocalName() {
return DFSUtilClient.bytes2String(getLocalNameInBytes());
}
/**
* Get the Java UTF8 representation of the local name.
* @return the local name in java UTF8
*/
byte[] getLocalNameInBytes();
/**
* Get the string representation of the full path name.
* @param parent the parent path
* @return the full path in string
*/
default String getFullName(String parent) {
if (isEmptyLocalName()) {
return parent;
}
StringBuilder fullName = new StringBuilder(parent);
if (!parent.endsWith(Path.SEPARATOR)) {
fullName.append(Path.SEPARATOR);
}
fullName.append(getLocalName());
return fullName.toString();
}
/**
* Get the full path.
* @param parent the parent path
* @return the full path
*/
default Path getFullPath(Path parent) {
if (isEmptyLocalName()) {
return parent;
}
return new Path(parent, getLocalName());
}
/**
* Opaque referant for the symlink, to be resolved at the client.
*/
byte[] getSymlinkInBytes();
/**
* @return number of children for this inode.
*/
int getChildrenNum();
/**
* Get the erasure coding policy if it's set.
* @return the erasure coding policy
*/
ErasureCodingPolicy getErasureCodingPolicy();
/** @return the storage policy id */
byte getStoragePolicy();
/**
* Resolve the short name of the Path given the URI, parent provided. This
* FileStatus reference will not contain a valid Path until it is resolved
* by this method.
* @param defaultUri FileSystem to fully qualify HDFS path.
* @param parent Parent path of this element.
* @return Reference to this instance.
*/
default FileStatus makeQualified(URI defaultUri, Path parent) {
// fully-qualify path
setPath(getFullPath(parent).makeQualified(defaultUri, null));
return (FileStatus) this; // API compatibility
}
////////////////////////////
// FileStatus "overrides" //
////////////////////////////
/**
* See {@link FileStatus#getPath()}.
*/
Path getPath();
/**
* See {@link FileStatus#setPath(Path)}.
*/
void setPath(Path p);
/**
* See {@link FileStatus#getLen()}.
*/
long getLen();
/**
* See {@link FileStatus#isFile()}.
*/
boolean isFile();
/**
* See {@link FileStatus#isDirectory()}.
*/
boolean isDirectory();
/**
* See {@link FileStatus#isDir()}.
*/
boolean isDir();
/**
* See {@link FileStatus#isSymlink()}.
*/
boolean isSymlink();
/**
* See {@link FileStatus#getBlockSize()}.
*/
long getBlockSize();
/**
* See {@link FileStatus#getReplication()}.
*/
short getReplication();
/**
* See {@link FileStatus#getModificationTime()}.
*/
long getModificationTime();
/**
* See {@link FileStatus#getAccessTime()}.
*/
long getAccessTime();
/**
* See {@link FileStatus#getPermission()}.
*/
FsPermission getPermission();
/**
* See {@link FileStatus#setPermission(FsPermission)}.
*/
void setPermission(FsPermission permission);
/**
* See {@link FileStatus#getOwner()}.
*/
String getOwner();
/**
* See {@link FileStatus#setOwner(String)}.
*/
void setOwner(String owner);
/**
* See {@link FileStatus#getGroup()}.
*/
String getGroup();
/**
* See {@link FileStatus#setGroup(String)}.
*/
void setGroup(String group);
/**
* See {@link FileStatus#hasAcl()}.
*/
boolean hasAcl();
/**
* See {@link FileStatus#isEncrypted()}.
*/
boolean isEncrypted();
/**
* See {@link FileStatus#isErasureCoded()}.
*/
boolean isErasureCoded();
/**
* See {@link FileStatus#isSnapshotEnabled()}.
*/
boolean isSnapshotEnabled();
/**
* See {@link FileStatus#getSymlink()}.
*/
Path getSymlink() throws IOException;
/**
* See {@link FileStatus#setSymlink(Path sym)}.
*/
void setSymlink(Path sym);
/**
* See {@link FileStatus#compareTo(FileStatus)}.
*/
int compareTo(FileStatus stat);
void setNamespace(String namespace);
String getNamespace();
/**
* Set redundant flags for compatibility with existing applications.
*/
static FsPermission convert(boolean isdir, boolean symlink,
FsPermission p, Set<Flags> f) {
if (p instanceof FsPermissionExtension) {
// verify flags are set consistently
assert p.getAclBit() == f.contains(HdfsFileStatus.Flags.HAS_ACL);
assert p.getEncryptedBit() == f.contains(HdfsFileStatus.Flags.HAS_CRYPT);
assert p.getErasureCodedBit() == f.contains(HdfsFileStatus.Flags.HAS_EC);
return p;
}
if (null == p) {
if (isdir) {
p = FsPermission.getDirDefault();
} else if (symlink) {
p = FsPermission.getDefault();
} else {
p = FsPermission.getFileDefault();
}
}
return new FsPermissionExtension(p, f.contains(Flags.HAS_ACL),
f.contains(Flags.HAS_CRYPT), f.contains(Flags.HAS_EC));
}
static Set<AttrFlags> convert(Set<Flags> flags) {
if (flags.isEmpty()) {
return FileStatus.NONE;
}
EnumSet<AttrFlags> attr = EnumSet.noneOf(AttrFlags.class);
if (flags.contains(Flags.HAS_ACL)) {
attr.add(AttrFlags.HAS_ACL);
}
if (flags.contains(Flags.HAS_EC)) {
attr.add(AttrFlags.HAS_EC);
}
if (flags.contains(Flags.HAS_CRYPT)) {
attr.add(AttrFlags.HAS_CRYPT);
}
if (flags.contains(Flags.SNAPSHOT_ENABLED)) {
attr.add(AttrFlags.SNAPSHOT_ENABLED);
}
return attr;
}
}
|
Builder
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/QueryFederationQueuePoliciesRequestPBImpl.java
|
{
"start": 1375,
"end": 5387
}
|
class ____
extends QueryFederationQueuePoliciesRequest {
private QueryFederationQueuePoliciesRequestProto proto =
QueryFederationQueuePoliciesRequestProto.getDefaultInstance();
private QueryFederationQueuePoliciesRequestProto.Builder builder = null;
private boolean viaProto = false;
private List<String> queues = null;
public QueryFederationQueuePoliciesRequestPBImpl() {
builder = QueryFederationQueuePoliciesRequestProto.newBuilder();
}
public QueryFederationQueuePoliciesRequestPBImpl(
QueryFederationQueuePoliciesRequestProto proto) {
this.proto = proto;
viaProto = true;
}
@Override
public void setPageSize(int pageSize) {
maybeInitBuilder();
Preconditions.checkNotNull(builder);
builder.setPageSize(pageSize);
}
@Override
public int getPageSize() {
QueryFederationQueuePoliciesRequestProtoOrBuilder p = viaProto ? proto : builder;
boolean hasPageSize = p.hasPageSize();
if (hasPageSize) {
return p.getPageSize();
}
return 0;
}
@Override
public void setCurrentPage(int currentPage) {
maybeInitBuilder();
Preconditions.checkNotNull(builder);
builder.setCurrentPage(currentPage);
}
@Override
public int getCurrentPage() {
QueryFederationQueuePoliciesRequestProtoOrBuilder p = viaProto ? proto : builder;
boolean hasCurrentPage = p.hasCurrentPage();
if (hasCurrentPage) {
return p.getCurrentPage();
}
return 0;
}
@Override
public String getQueue() {
QueryFederationQueuePoliciesRequestProtoOrBuilder p = viaProto ? proto : builder;
boolean hasQueue = p.hasQueue();
if (hasQueue) {
return p.getQueue();
}
return null;
}
@Override
public void setQueue(String queue) {
maybeInitBuilder();
if (queue == null) {
builder.clearQueue();
return;
}
builder.setQueue(queue);
}
@Override
public List<String> getQueues() {
if (this.queues != null) {
return this.queues;
}
initQueues();
return this.queues;
}
@Override
public void setQueues(List<String> pQueues) {
if (pQueues == null || pQueues.isEmpty()) {
maybeInitBuilder();
if (this.queues != null) {
this.queues.clear();
}
return;
}
if (this.queues == null) {
this.queues = new ArrayList<>();
}
this.queues.clear();
this.queues.addAll(pQueues);
}
private void initQueues() {
if (this.queues != null) {
return;
}
QueryFederationQueuePoliciesRequestProtoOrBuilder p = viaProto ? proto : builder;
List<String> list = p.getQueuesList();
this.queues = new ArrayList<>();
this.queues.addAll(list);
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null) {
return false;
}
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
public QueryFederationQueuePoliciesRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private void mergeLocalToProto() {
if (viaProto) {
maybeInitBuilder();
}
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void mergeLocalToBuilder() {
if (this.queues != null) {
addQueuesToProto();
}
}
private void addQueuesToProto() {
maybeInitBuilder();
builder.clearQueue();
if (this.queues == null) {
return;
}
builder.addAllQueues(this.queues);
}
private synchronized void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = QueryFederationQueuePoliciesRequestProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
}
|
QueryFederationQueuePoliciesRequestPBImpl
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/StreamsConfig.java
|
{
"start": 70240,
"end": 114136
}
|
class ____ {
// This is settable in the main Streams config, but it's a private API for now
public static final String INTERNAL_TASK_ASSIGNOR_CLASS = "internal.task.assignor.class";
// These are not settable in the main Streams config; they are set by the StreamThread to pass internal
// state into the assignor.
public static final String REFERENCE_CONTAINER_PARTITION_ASSIGNOR = "__reference.container.instance__";
// This is settable in the main Streams config, but it's a private API for testing
public static final String ASSIGNMENT_LISTENER = "__assignment.listener__";
// This is settable in the main Streams config, but it's a private API for testing
public static final String INTERNAL_CONSUMER_WRAPPER = "__internal.consumer.wrapper__";
// Private API used to control the emit latency for left/outer join results (https://issues.apache.org/jira/browse/KAFKA-10847)
public static final String EMIT_INTERVAL_MS_KSTREAMS_OUTER_JOIN_SPURIOUS_RESULTS_FIX = "__emit.interval.ms.kstreams.outer.join.spurious.results.fix__";
// Private API used to control the emit latency for windowed aggregation results for ON_WINDOW_CLOSE emit strategy
public static final String EMIT_INTERVAL_MS_KSTREAMS_WINDOWED_AGGREGATION = "__emit.interval.ms.kstreams.windowed.aggregation__";
// Private API used to control the usage of consistency offset vectors
public static final String IQ_CONSISTENCY_OFFSET_VECTOR_ENABLED = "__iq.consistency.offset"
+ ".vector.enabled__";
// Private API used to control the prefix of the auto created topics
public static final String TOPIC_PREFIX_ALTERNATIVE = "__internal.override.topic.prefix__";
// Private API to enable the state updater (i.e. state updating on a dedicated thread)
public static final String STATE_UPDATER_ENABLED = "__state.updater.enabled__";
public static boolean stateUpdaterEnabled(final Map<String, Object> configs) {
return InternalConfig.getBoolean(configs, InternalConfig.STATE_UPDATER_ENABLED, true);
}
// Private API to enable processing threads (i.e. polling is decoupled from processing)
public static final String PROCESSING_THREADS_ENABLED = "__processing.threads.enabled__";
public static boolean processingThreadsEnabled(final Map<String, Object> configs) {
// note: we did disable testing "processing threads"` in SmokeTestDriverIntegrationTest due to
// high failure rate, and the feature being incomplete with no active work
//
// we should re-enable testing this feature in SmokeTestDriverIntegrationTest
// once it is complete (or maybe even earlier when we resumg working on it
return InternalConfig.getBoolean(configs, InternalConfig.PROCESSING_THREADS_ENABLED, false);
}
public static boolean getBoolean(final Map<String, Object> configs, final String key, final boolean defaultValue) {
final Object value = configs.getOrDefault(key, defaultValue);
if (value instanceof Boolean) {
return (boolean) value;
} else if (value instanceof String) {
return Boolean.parseBoolean((String) value);
} else {
log.warn(
"Invalid value ({}) on internal configuration '{}'. Please specify a true/false value.",
value,
key
);
return defaultValue;
}
}
public static long getLong(final Map<String, Object> configs, final String key, final long defaultValue) {
final Object value = configs.getOrDefault(key, defaultValue);
if (value instanceof Number) {
return ((Number) value).longValue();
} else if (value instanceof String) {
return Long.parseLong((String) value);
} else {
log.warn(
"Invalid value ({}) on internal configuration '{}'. Please specify a numeric value.",
value,
key
);
return defaultValue;
}
}
public static String getString(final Map<String, Object> configs, final String key, final String defaultValue) {
final Object value = configs.getOrDefault(key, defaultValue);
if (value instanceof String) {
return (String) value;
} else {
log.warn(
"Invalid value ({}) on internal configuration '{}'. Please specify a String value.",
value,
key
);
return defaultValue;
}
}
}
/**
* Prefix a property with {@link #CONSUMER_PREFIX}. This is used to isolate {@link ConsumerConfig consumer configs}
* from other client configs.
*
* @param consumerProp the consumer property to be masked
* @return {@link #CONSUMER_PREFIX} + {@code consumerProp}
*/
@SuppressWarnings("WeakerAccess")
public static String consumerPrefix(final String consumerProp) {
return CONSUMER_PREFIX + consumerProp;
}
/**
* Prefix a property with {@link #MAIN_CONSUMER_PREFIX}. This is used to isolate {@link ConsumerConfig main consumer configs}
* from other client configs.
*
* @param consumerProp the consumer property to be masked
* @return {@link #MAIN_CONSUMER_PREFIX} + {@code consumerProp}
*/
@SuppressWarnings("WeakerAccess")
public static String mainConsumerPrefix(final String consumerProp) {
return MAIN_CONSUMER_PREFIX + consumerProp;
}
/**
* Prefix a property with {@link #RESTORE_CONSUMER_PREFIX}. This is used to isolate {@link ConsumerConfig restore consumer configs}
* from other client configs.
*
* @param consumerProp the consumer property to be masked
* @return {@link #RESTORE_CONSUMER_PREFIX} + {@code consumerProp}
*/
@SuppressWarnings("WeakerAccess")
public static String restoreConsumerPrefix(final String consumerProp) {
return RESTORE_CONSUMER_PREFIX + consumerProp;
}
/**
* Prefix a client tag key with {@link #CLIENT_TAG_PREFIX}.
*
* @param clientTagKey client tag key
* @return {@link #CLIENT_TAG_PREFIX} + {@code clientTagKey}
*/
public static String clientTagPrefix(final String clientTagKey) {
return CLIENT_TAG_PREFIX + clientTagKey;
}
/**
* Prefix a property with {@link #GLOBAL_CONSUMER_PREFIX}. This is used to isolate {@link ConsumerConfig global consumer configs}
* from other client configs.
*
* @param consumerProp the consumer property to be masked
* @return {@link #GLOBAL_CONSUMER_PREFIX} + {@code consumerProp}
*/
@SuppressWarnings("WeakerAccess")
public static String globalConsumerPrefix(final String consumerProp) {
return GLOBAL_CONSUMER_PREFIX + consumerProp;
}
/**
* Prefix a property with {@link #PRODUCER_PREFIX}. This is used to isolate {@link ProducerConfig producer configs}
* from other client configs.
*
* @param producerProp the producer property to be masked
* @return PRODUCER_PREFIX + {@code producerProp}
*/
@SuppressWarnings("WeakerAccess")
public static String producerPrefix(final String producerProp) {
return PRODUCER_PREFIX + producerProp;
}
/**
* Prefix a property with {@link #ADMIN_CLIENT_PREFIX}. This is used to isolate {@link AdminClientConfig admin configs}
* from other client configs.
*
* @param adminClientProp the admin client property to be masked
* @return ADMIN_CLIENT_PREFIX + {@code adminClientProp}
*/
@SuppressWarnings("WeakerAccess")
public static String adminClientPrefix(final String adminClientProp) {
return ADMIN_CLIENT_PREFIX + adminClientProp;
}
/**
* Prefix a property with {@link #TOPIC_PREFIX}
* used to provide default topic configs to be applied when creating internal topics.
*
* @param topicProp the topic property to be masked
* @return TOPIC_PREFIX + {@code topicProp}
*/
@SuppressWarnings("WeakerAccess")
public static String topicPrefix(final String topicProp) {
return TOPIC_PREFIX + topicProp;
}
/**
* Return a copy of the config definition.
*
* @return a copy of the config definition
*/
@SuppressWarnings("unused")
public static ConfigDef configDef() {
return new ConfigDef(CONFIG);
}
/**
* Create a new {@code StreamsConfig} using the given properties.
*
* @param props properties that specify Kafka Streams and internal consumer/producer configuration
*/
public StreamsConfig(final Map<?, ?> props) {
this(props, true);
}
@SuppressWarnings("this-escape")
protected StreamsConfig(final Map<?, ?> props,
final boolean doLog) {
super(CONFIG, props, doLog);
eosEnabled = StreamsConfigUtils.eosEnabled(this);
if (eosEnabled) {
verifyEOSTransactionTimeoutCompatibility();
}
verifyTopologyOptimizationConfigs(getString(TOPOLOGY_OPTIMIZATION_CONFIG));
verifyClientTelemetryConfigs();
verifyStreamsProtocolCompatibility(doLog);
}
private void verifyStreamsProtocolCompatibility(final boolean doLog) {
if (doLog && isStreamsProtocolEnabled()) {
final Map<String, Object> mainConsumerConfigs = getMainConsumerConfigs("dummy", "dummy", -1);
final String instanceId = (String) mainConsumerConfigs.get(CommonClientConfigs.GROUP_INSTANCE_ID_CONFIG);
if (instanceId != null && !instanceId.isEmpty()) {
throw new ConfigException("Streams rebalance protocol does not support static membership. "
+ "Please set group.protocol=classic or remove group.instance.id from the configuration.");
}
if (getInt(StreamsConfig.MAX_WARMUP_REPLICAS_CONFIG) != 0) {
log.warn("Warmup replicas are not supported yet with the streams protocol and will be ignored. "
+ "If you want to use warmup replicas, please set group.protocol=classic.");
}
if (getInt(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG) != 0) {
log.warn("Standby replicas are configured broker-side in the streams group protocol and will be ignored. "
+ "Please use the admin client or kafka-configs.sh to set the streams groups's standby replicas.");
}
}
}
private void verifyEOSTransactionTimeoutCompatibility() {
final long commitInterval = getLong(COMMIT_INTERVAL_MS_CONFIG);
final String transactionTimeoutConfigKey = producerPrefix(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG);
final int transactionTimeout =
originals().containsKey(transactionTimeoutConfigKey) ?
(int) Objects.requireNonNull(
parseType(transactionTimeoutConfigKey, originals().get(transactionTimeoutConfigKey), Type.INT),
"Could not parse config `" + COMMIT_INTERVAL_MS_CONFIG + "` because it's set to `null`") :
DEFAULT_TRANSACTION_TIMEOUT;
if (transactionTimeout < commitInterval) {
throw new IllegalArgumentException(String.format(
"Transaction timeout %d was set lower than " +
"streams commit interval %d. This will cause ongoing transaction always timeout due to inactivity " +
"caused by long commit interval. Consider reconfiguring commit interval to match " +
"transaction timeout by tuning 'commit.interval.ms' config, or increase the transaction timeout to match " +
"commit interval by tuning `producer.transaction.timeout.ms` config.",
transactionTimeout,
commitInterval
));
}
}
private void verifyClientTelemetryConfigs() {
final boolean streamTelemetryEnabled = getBoolean(ENABLE_METRICS_PUSH_CONFIG);
final Boolean mainConsumerMetricsConfig = maybeMetricsPushEnabled(MAIN_CONSUMER_PREFIX);
final Boolean consumerMetricsConfig = maybeMetricsPushEnabled(CONSUMER_PREFIX);
final Boolean adminMetricsConfig = maybeMetricsPushEnabled(ADMIN_CLIENT_PREFIX);
if (streamTelemetryEnabled) {
checkConsumerAndMainConsumerAndAdminMetricsConfig(adminMetricsConfig, consumerMetricsConfig, mainConsumerMetricsConfig);
checkMainConsumerAndAdminMetricsConfig(adminMetricsConfig, mainConsumerMetricsConfig, "enabled");
}
}
private void checkConsumerAndMainConsumerAndAdminMetricsConfig(final Boolean adminMetricsConfig,
final Boolean consumerMetricsConfig,
final Boolean mainConsumerMetricsConfig) {
if (consumerMetricsConfig != null) {
if (!consumerMetricsConfig
&& mainConsumerMetricsConfig == null
&& adminMetricsConfig == null) {
throw new ConfigException("Kafka Streams metrics push enabled but consumer.enable.metrics is false, the setting needs to be consistent between the two");
} else if (consumerMetricsConfig) {
checkMainConsumerAndAdminMetricsConfig(adminMetricsConfig, mainConsumerMetricsConfig, "and consumer.enable.metrics are enabled,");
}
}
}
private void checkMainConsumerAndAdminMetricsConfig(final Boolean adminMetricsConfig, final Boolean mainConsumerMetricsConfig, final String message) {
if (mainConsumerMetricsConfig != null && !mainConsumerMetricsConfig
&& adminMetricsConfig != null && !adminMetricsConfig) {
throw new ConfigException("Kafka Streams metrics push " + message + " but main.consumer and admin.client metrics push are disabled, the setting needs to be consistent between the two");
} else if (mainConsumerMetricsConfig != null && !mainConsumerMetricsConfig) {
throw new ConfigException("Kafka Streams metrics push " + message + " but main.consumer metrics push is disabled, the setting needs to be consistent between the two");
} else if (adminMetricsConfig != null && !adminMetricsConfig) {
throw new ConfigException("Kafka Streams metrics push " + message + " but admin.client metrics push is disabled, the setting needs to be consistent between the two");
}
}
private Boolean maybeMetricsPushEnabled(final String prefix) {
Boolean configSetValue = null;
if (originalsWithPrefix(prefix).containsKey(ENABLE_METRICS_PUSH_CONFIG)) {
configSetValue = (Boolean) originalsWithPrefix(prefix).get(ENABLE_METRICS_PUSH_CONFIG);
}
return configSetValue;
}
@Override
protected Map<String, Object> postProcessParsedConfig(final Map<String, Object> parsedValues) {
final Map<String, Object> configUpdates =
CommonClientConfigs.postProcessReconnectBackoffConfigs(this, parsedValues);
if (StreamsConfigUtils.eosEnabled(this) && !originals().containsKey(COMMIT_INTERVAL_MS_CONFIG)) {
log.debug("Using {} default value of {} as exactly once is enabled.",
COMMIT_INTERVAL_MS_CONFIG, EOS_DEFAULT_COMMIT_INTERVAL_MS);
configUpdates.put(COMMIT_INTERVAL_MS_CONFIG, EOS_DEFAULT_COMMIT_INTERVAL_MS);
}
validateRackAwarenessConfiguration();
return configUpdates;
}
private void validateRackAwarenessConfiguration() {
final List<String> rackAwareAssignmentTags = getList(RACK_AWARE_ASSIGNMENT_TAGS_CONFIG);
final Map<String, String> clientTags = getClientTags();
if (clientTags.size() > MAX_RACK_AWARE_ASSIGNMENT_TAG_LIST_SIZE) {
throw new ConfigException("At most " + MAX_RACK_AWARE_ASSIGNMENT_TAG_LIST_SIZE + " client tags " +
"can be specified using " + CLIENT_TAG_PREFIX + " prefix.");
}
for (final String rackAwareAssignmentTag : rackAwareAssignmentTags) {
if (!clientTags.containsKey(rackAwareAssignmentTag)) {
throw new ConfigException(RACK_AWARE_ASSIGNMENT_TAGS_CONFIG,
rackAwareAssignmentTags,
"Contains invalid value [" + rackAwareAssignmentTag + "] " +
"which doesn't have corresponding tag set via [" + CLIENT_TAG_PREFIX + "] prefix.");
}
}
clientTags.forEach((tagKey, tagValue) -> {
if (tagKey.length() > MAX_RACK_AWARE_ASSIGNMENT_TAG_KEY_LENGTH) {
throw new ConfigException(CLIENT_TAG_PREFIX,
tagKey,
"Tag key exceeds maximum length of " + MAX_RACK_AWARE_ASSIGNMENT_TAG_KEY_LENGTH + ".");
}
if (tagValue.length() > MAX_RACK_AWARE_ASSIGNMENT_TAG_VALUE_LENGTH) {
throw new ConfigException(CLIENT_TAG_PREFIX,
tagValue,
"Tag value exceeds maximum length of " + MAX_RACK_AWARE_ASSIGNMENT_TAG_VALUE_LENGTH + ".");
}
});
}
private Map<String, Object> getCommonConsumerConfigs() {
final Map<String, Object> clientProvidedProps = getClientPropsWithPrefix(CONSUMER_PREFIX, ConsumerConfig.configNames());
clientProvidedProps.remove(GROUP_PROTOCOL_CONFIG);
checkIfUnexpectedUserSpecifiedClientConfig(clientProvidedProps, NON_CONFIGURABLE_CONSUMER_DEFAULT_CONFIGS);
checkIfUnexpectedUserSpecifiedClientConfig(clientProvidedProps, NON_CONFIGURABLE_CONSUMER_EOS_CONFIGS);
final Map<String, Object> consumerProps = new HashMap<>(eosEnabled ? CONSUMER_EOS_OVERRIDES : CONSUMER_DEFAULT_OVERRIDES);
if (StreamsConfigUtils.eosEnabled(this)) {
consumerProps.put("internal.throw.on.fetch.stable.offset.unsupported", true);
}
consumerProps.putAll(getClientCustomProps());
consumerProps.putAll(clientProvidedProps);
// bootstrap.servers should be from StreamsConfig
consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, originals().get(BOOTSTRAP_SERVERS_CONFIG));
return consumerProps;
}
private void checkIfUnexpectedUserSpecifiedClientConfig(final Map<String, Object> clientProvidedProps,
final String[] nonConfigurableConfigs) {
// Streams does not allow users to configure certain client configurations (consumer/producer),
// for example, enable.auto.commit or transactional.id. In cases where user tries to override
// such non-configurable client configurations, log a warning and remove the user defined value
// from the Map. Thus, the default values for these client configurations that are suitable for
// Streams will be used instead.
final String nonConfigurableConfigMessage = "Unexpected user-specified {} config '{}' found. {} setting ({}) will be ignored and the Streams default setting ({}) will be used.";
final String eosMessage = "'" + PROCESSING_GUARANTEE_CONFIG + "' is set to \"" + getString(PROCESSING_GUARANTEE_CONFIG) + "\". Hence, user";
for (final String config: nonConfigurableConfigs) {
if (clientProvidedProps.containsKey(config)) {
if (CONSUMER_DEFAULT_OVERRIDES.containsKey(config)) {
if (!clientProvidedProps.get(config).equals(CONSUMER_DEFAULT_OVERRIDES.get(config))) {
log.error(
nonConfigurableConfigMessage,
"consumer",
config,
"User",
clientProvidedProps.get(config),
CONSUMER_DEFAULT_OVERRIDES.get(config)
);
clientProvidedProps.remove(config);
}
} else if (eosEnabled) {
if (CONSUMER_EOS_OVERRIDES.containsKey(config)) {
if (!clientProvidedProps.get(config).equals(CONSUMER_EOS_OVERRIDES.get(config))) {
log.warn(
nonConfigurableConfigMessage,
"consumer",
config,
eosMessage,
clientProvidedProps.get(config),
CONSUMER_EOS_OVERRIDES.get(config)
);
clientProvidedProps.remove(config);
}
} else if (PRODUCER_EOS_OVERRIDES.containsKey(config)) {
if (!clientProvidedProps.get(config).equals(PRODUCER_EOS_OVERRIDES.get(config))) {
log.warn(
nonConfigurableConfigMessage,
"producer",
config,
eosMessage,
clientProvidedProps.get(config),
PRODUCER_EOS_OVERRIDES.get(config)
);
clientProvidedProps.remove(config);
}
} else if (ProducerConfig.TRANSACTIONAL_ID_CONFIG.equals(config)) {
log.warn(
nonConfigurableConfigMessage,
"producer",
config,
eosMessage,
clientProvidedProps.get(config),
"<appId>-<generatedSuffix>"
);
clientProvidedProps.remove(config);
}
}
}
}
if (eosEnabled) {
verifyMaxInFlightRequestPerConnection(clientProvidedProps.get(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION));
}
}
private void verifyMaxInFlightRequestPerConnection(final Object maxInFlightRequests) {
if (maxInFlightRequests != null) {
final int maxInFlightRequestsAsInteger;
if (maxInFlightRequests instanceof Integer) {
maxInFlightRequestsAsInteger = (Integer) maxInFlightRequests;
} else if (maxInFlightRequests instanceof String) {
try {
maxInFlightRequestsAsInteger = Integer.parseInt(((String) maxInFlightRequests).trim());
} catch (final NumberFormatException e) {
throw new ConfigException(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, maxInFlightRequests, "String value could not be parsed as 32-bit integer");
}
} else {
throw new ConfigException(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, maxInFlightRequests, "Expected value to be a 32-bit integer, but it was a " + maxInFlightRequests.getClass().getName());
}
if (maxInFlightRequestsAsInteger > 5) {
throw new ConfigException(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, maxInFlightRequestsAsInteger, "Can't exceed 5 when exactly-once processing is enabled");
}
}
}
/**
* Get the configs to the {@link KafkaConsumer main consumer}.
* Properties using the prefix {@link #MAIN_CONSUMER_PREFIX} will be used in favor over
* the properties prefixed with {@link #CONSUMER_PREFIX} and the non-prefixed versions
* (read the override precedence ordering in {@link #MAIN_CONSUMER_PREFIX})
* except in the case of {@link ConsumerConfig#BOOTSTRAP_SERVERS_CONFIG} where we always use the non-prefixed
* version as we only support reading/writing from/to the same Kafka Cluster.
* If not specified by {@link #MAIN_CONSUMER_PREFIX}, main consumer will share the general consumer configs
* prefixed by {@link #CONSUMER_PREFIX}.
*
* @param groupId consumer groupId
* @param clientId clientId
* @param threadIdx stream thread index
* @return Map of the consumer configuration.
*/
@SuppressWarnings("WeakerAccess")
public Map<String, Object> getMainConsumerConfigs(final String groupId, final String clientId, final int threadIdx) {
final Map<String, Object> consumerProps = getCommonConsumerConfigs();
// Get main consumer override configs
final Map<String, Object> mainConsumerProps = originalsWithPrefix(MAIN_CONSUMER_PREFIX);
checkIfUnexpectedUserSpecifiedClientConfig(mainConsumerProps, NON_CONFIGURABLE_CONSUMER_DEFAULT_CONFIGS);
consumerProps.putAll(mainConsumerProps);
// this is a hack to work around StreamsConfig constructor inside StreamsPartitionAssignor to avoid casting
consumerProps.put(APPLICATION_ID_CONFIG, groupId);
// add group id, client id with stream client id prefix, and group instance id
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
consumerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId);
final String groupInstanceId = (String) consumerProps.get(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG);
// Suffix each thread consumer with thread.id to enforce uniqueness of group.instance.id.
if (groupInstanceId != null) {
consumerProps.put(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, groupInstanceId + "-" + threadIdx);
}
// add configs required for stream partition assignor
consumerProps.put(UPGRADE_FROM_CONFIG, getString(UPGRADE_FROM_CONFIG));
consumerProps.put(REPLICATION_FACTOR_CONFIG, getInt(REPLICATION_FACTOR_CONFIG));
consumerProps.put(APPLICATION_SERVER_CONFIG, getString(APPLICATION_SERVER_CONFIG));
consumerProps.put(NUM_STANDBY_REPLICAS_CONFIG, getInt(NUM_STANDBY_REPLICAS_CONFIG));
consumerProps.put(ACCEPTABLE_RECOVERY_LAG_CONFIG, getLong(ACCEPTABLE_RECOVERY_LAG_CONFIG));
consumerProps.put(MAX_WARMUP_REPLICAS_CONFIG, getInt(MAX_WARMUP_REPLICAS_CONFIG));
consumerProps.put(PROBING_REBALANCE_INTERVAL_MS_CONFIG, getLong(PROBING_REBALANCE_INTERVAL_MS_CONFIG));
consumerProps.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, StreamsPartitionAssignor.class.getName());
consumerProps.put(WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG, getLong(WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG));
consumerProps.put(RACK_AWARE_ASSIGNMENT_NON_OVERLAP_COST_CONFIG, getInt(RACK_AWARE_ASSIGNMENT_NON_OVERLAP_COST_CONFIG));
consumerProps.put(RACK_AWARE_ASSIGNMENT_STRATEGY_CONFIG, getString(RACK_AWARE_ASSIGNMENT_STRATEGY_CONFIG));
consumerProps.put(RACK_AWARE_ASSIGNMENT_TAGS_CONFIG, getList(RACK_AWARE_ASSIGNMENT_TAGS_CONFIG));
consumerProps.put(RACK_AWARE_ASSIGNMENT_TRAFFIC_COST_CONFIG, getInt(RACK_AWARE_ASSIGNMENT_TRAFFIC_COST_CONFIG));
consumerProps.put(TASK_ASSIGNOR_CLASS_CONFIG, getString(TASK_ASSIGNOR_CLASS_CONFIG));
// verify that producer batch config is no larger than segment size, then add topic configs required for creating topics
final Map<String, Object> topicProps = originalsWithPrefix(TOPIC_PREFIX, false);
final Map<String, Object> producerProps = getClientPropsWithPrefix(PRODUCER_PREFIX, ProducerConfig.configNames());
if (topicProps.containsKey(topicPrefix(TopicConfig.SEGMENT_BYTES_CONFIG)) &&
producerProps.containsKey(ProducerConfig.BATCH_SIZE_CONFIG)) {
final int segmentSize = Integer.parseInt(topicProps.get(topicPrefix(TopicConfig.SEGMENT_BYTES_CONFIG)).toString());
final int batchSize = Integer.parseInt(producerProps.get(ProducerConfig.BATCH_SIZE_CONFIG).toString());
if (segmentSize < batchSize) {
throw new IllegalArgumentException(String.format(
"Specified topic segment size %d is smaller than the configured producer batch size %d, this will cause produced batch not able to be appended to the topic",
segmentSize,
batchSize
));
}
}
consumerProps.putAll(topicProps);
return consumerProps;
}
/**
* Get the configs for the {@link KafkaConsumer restore-consumer}.
* Properties using the prefix {@link #RESTORE_CONSUMER_PREFIX} will be used in favor over
* the properties prefixed with {@link #CONSUMER_PREFIX} and the non-prefixed versions
* (read the override precedence ordering in {@link #RESTORE_CONSUMER_PREFIX})
* except in the case of {@link ConsumerConfig#BOOTSTRAP_SERVERS_CONFIG} where we always use the non-prefixed
* version as we only support reading/writing from/to the same Kafka Cluster.
* If not specified by {@link #RESTORE_CONSUMER_PREFIX}, restore consumer will share the general consumer configs
* prefixed by {@link #CONSUMER_PREFIX}.
*
* @param clientId clientId
* @return Map of the restore consumer configuration.
*/
@SuppressWarnings("WeakerAccess")
public Map<String, Object> getRestoreConsumerConfigs(final String clientId) {
final Map<String, Object> baseConsumerProps = getCommonConsumerConfigs();
// Get restore consumer override configs
final Map<String, Object> restoreConsumerProps = originalsWithPrefix(RESTORE_CONSUMER_PREFIX);
checkIfUnexpectedUserSpecifiedClientConfig(restoreConsumerProps, NON_CONFIGURABLE_CONSUMER_DEFAULT_CONFIGS);
baseConsumerProps.putAll(restoreConsumerProps);
// no need to set group id for a restore consumer
baseConsumerProps.remove(ConsumerConfig.GROUP_ID_CONFIG);
// no need to set instance id for a restore consumer
baseConsumerProps.remove(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG);
// add client id with stream client id prefix
baseConsumerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId);
baseConsumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "none");
return baseConsumerProps;
}
/**
* Get the configs for the {@link KafkaConsumer global consumer}.
* Properties using the prefix {@link #GLOBAL_CONSUMER_PREFIX} will be used in favor over
* the properties prefixed with {@link #CONSUMER_PREFIX} and the non-prefixed versions
* (read the override precedence ordering in {@link #GLOBAL_CONSUMER_PREFIX})
* except in the case of {@link ConsumerConfig#BOOTSTRAP_SERVERS_CONFIG} where we always use the non-prefixed
* version as we only support reading/writing from/to the same Kafka Cluster.
* If not specified by {@link #GLOBAL_CONSUMER_PREFIX}, global consumer will share the general consumer configs
* prefixed by {@link #CONSUMER_PREFIX}.
*
* @param clientId clientId
* @return Map of the global consumer configuration.
*/
@SuppressWarnings("WeakerAccess")
public Map<String, Object> getGlobalConsumerConfigs(final String clientId) {
final Map<String, Object> baseConsumerProps = getCommonConsumerConfigs();
// Get global consumer override configs
final Map<String, Object> globalConsumerProps = originalsWithPrefix(GLOBAL_CONSUMER_PREFIX);
checkIfUnexpectedUserSpecifiedClientConfig(globalConsumerProps, NON_CONFIGURABLE_CONSUMER_DEFAULT_CONFIGS);
baseConsumerProps.putAll(globalConsumerProps);
// no need to set group id for a global consumer
baseConsumerProps.remove(ConsumerConfig.GROUP_ID_CONFIG);
// no need to set instance id for a restore consumer
baseConsumerProps.remove(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG);
// add client id with stream client id prefix
baseConsumerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId + "-global-consumer");
baseConsumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "none");
return baseConsumerProps;
}
/**
* Get the configs for the {@link KafkaProducer producer}.
* Properties using the prefix {@link #PRODUCER_PREFIX} will be used in favor over their non-prefixed versions
* except in the case of {@link ProducerConfig#BOOTSTRAP_SERVERS_CONFIG} where we always use the non-prefixed
* version as we only support reading/writing from/to the same Kafka Cluster.
*
* @param clientId clientId
* @return Map of the producer configuration.
*/
@SuppressWarnings("WeakerAccess")
public Map<String, Object> getProducerConfigs(final String clientId) {
final Map<String, Object> clientProvidedProps = getClientPropsWithPrefix(PRODUCER_PREFIX, ProducerConfig.configNames());
checkIfUnexpectedUserSpecifiedClientConfig(clientProvidedProps, NON_CONFIGURABLE_PRODUCER_EOS_CONFIGS);
// generate producer configs from original properties and overridden maps
final Map<String, Object> props = new HashMap<>(eosEnabled ? PRODUCER_EOS_OVERRIDES : PRODUCER_DEFAULT_OVERRIDES);
props.putAll(getClientCustomProps());
props.putAll(clientProvidedProps);
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, originals().get(BOOTSTRAP_SERVERS_CONFIG));
// add client id with stream client id prefix
props.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId);
return props;
}
/**
* Get the configs for the {@link Admin admin client}.
* @param clientId clientId
* @return Map of the admin client configuration.
*/
@SuppressWarnings("WeakerAccess")
public Map<String, Object> getAdminConfigs(final String clientId) {
final Map<String, Object> clientProvidedProps = getClientPropsWithPrefix(ADMIN_CLIENT_PREFIX, AdminClientConfig.configNames());
final Map<String, Object> props = new HashMap<>(ADMIN_CLIENT_OVERRIDES);
props.putAll(getClientCustomProps());
props.putAll(clientProvidedProps);
// add client id with stream client id prefix
props.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId);
return props;
}
/**
* Get the configured client tags set with {@link #CLIENT_TAG_PREFIX} prefix.
*
* @return Map of the client tags.
*/
@SuppressWarnings("WeakerAccess")
public Map<String, String> getClientTags() {
return originalsWithPrefix(CLIENT_TAG_PREFIX).entrySet().stream().collect(
Collectors.toMap(
Map.Entry::getKey,
tagEntry -> Objects.toString(tagEntry.getValue())
)
);
}
private Map<String, Object> getClientPropsWithPrefix(final String prefix,
final Set<String> configNames) {
final Map<String, Object> props = clientProps(configNames, originals());
props.putAll(originalsWithPrefix(prefix));
return props;
}
/**
* Get a map of custom configs by removing from the originals all the Streams, Consumer, Producer, and AdminClient configs.
* Prefixed properties are also removed because they are already added by {@link #getClientPropsWithPrefix(String, Set)}.
* This allows to set a custom property for a specific client alone if specified using a prefix, or for all
* when no prefix is used.
*
* @return a map with the custom properties
*/
private Map<String, Object> getClientCustomProps() {
final Map<String, Object> props = originals();
props.keySet().removeAll(CONFIG.names());
props.keySet().removeAll(ConsumerConfig.configNames());
props.keySet().removeAll(ProducerConfig.configNames());
props.keySet().removeAll(AdminClientConfig.configNames());
props.keySet().removeAll(originalsWithPrefix(CONSUMER_PREFIX, false).keySet());
props.keySet().removeAll(originalsWithPrefix(PRODUCER_PREFIX, false).keySet());
props.keySet().removeAll(originalsWithPrefix(ADMIN_CLIENT_PREFIX, false).keySet());
return props;
}
public static Set<String> verifyTopologyOptimizationConfigs(final String config) {
final List<String> configs = Arrays.asList(config.split("\\s*,\\s*"));
final Set<String> verifiedConfigs = new HashSet<>();
// Verify it doesn't contain none or all plus a list of optimizations
if (configs.contains(NO_OPTIMIZATION) || configs.contains(OPTIMIZE)) {
if (configs.size() > 1) {
throw new ConfigException("\"" + config + "\" is not a valid optimization config. " + CONFIG_ERROR_MSG);
}
}
for (final String conf: configs) {
if (!TOPOLOGY_OPTIMIZATION_CONFIGS.contains(conf)) {
throw new ConfigException("Unrecognized config. " + CONFIG_ERROR_MSG);
}
}
if (configs.contains(OPTIMIZE)) {
verifiedConfigs.add(REUSE_KTABLE_SOURCE_TOPICS);
verifiedConfigs.add(MERGE_REPARTITION_TOPICS);
verifiedConfigs.add(SINGLE_STORE_SELF_JOIN);
} else if (!configs.contains(NO_OPTIMIZATION)) {
verifiedConfigs.addAll(configs);
}
return verifiedConfigs;
}
/**
* Return configured KafkaClientSupplier
* @return Configured KafkaClientSupplier
*/
public KafkaClientSupplier getKafkaClientSupplier() {
return getConfiguredInstance(StreamsConfig.DEFAULT_CLIENT_SUPPLIER_CONFIG,
KafkaClientSupplier.class);
}
/**
* Return an {@link Serde#configure(Map, boolean) configured} instance of {@link #DEFAULT_KEY_SERDE_CLASS_CONFIG key Serde
* class}.
*
* @return a configured instance of key Serde class
*/
@SuppressWarnings("WeakerAccess")
public Serde<?> defaultKeySerde() {
final Object keySerdeConfigSetting = get(DEFAULT_KEY_SERDE_CLASS_CONFIG);
if (keySerdeConfigSetting == null) {
throw new ConfigException("Please specify a key serde or set one through StreamsConfig#DEFAULT_KEY_SERDE_CLASS_CONFIG");
}
try {
final Serde<?> serde = getConfiguredInstance(DEFAULT_KEY_SERDE_CLASS_CONFIG, Serde.class);
serde.configure(originals(), true);
return serde;
} catch (final Exception e) {
throw new StreamsException(
String.format("Failed to configure key serde %s", keySerdeConfigSetting),
e
);
}
}
/**
* Return an {@link Serde#configure(Map, boolean) configured} instance of {@link #DEFAULT_VALUE_SERDE_CLASS_CONFIG value
* Serde class}.
*
* @return an configured instance of value Serde class
*/
@SuppressWarnings("WeakerAccess")
public Serde<?> defaultValueSerde() {
final Object valueSerdeConfigSetting = get(DEFAULT_VALUE_SERDE_CLASS_CONFIG);
if (valueSerdeConfigSetting == null) {
throw new ConfigException("Please specify a value serde or set one through StreamsConfig#DEFAULT_VALUE_SERDE_CLASS_CONFIG");
}
try {
final Serde<?> serde = getConfiguredInstance(DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serde.class);
serde.configure(originals(), false);
return serde;
} catch (final Exception e) {
throw new StreamsException(
String.format("Failed to configure value serde %s", valueSerdeConfigSetting),
e
);
}
}
@SuppressWarnings("WeakerAccess")
public TimestampExtractor defaultTimestampExtractor() {
return getConfiguredInstance(DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, TimestampExtractor.class);
}
public DeserializationExceptionHandler deserializationExceptionHandler() {
if (originals().containsKey(DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG) &&
originals().containsKey(DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG)) {
log.warn("Both the deprecated and new config for deserialization exception handler are configured. " +
"The deprecated one will be ignored.");
}
if (originals().containsKey(DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG)) {
return getConfiguredInstance(DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, DeserializationExceptionHandler.class);
} else {
return defaultDeserializationExceptionHandler();
}
}
/**
* @deprecated Since 4.0. Use {@link #deserializationExceptionHandler()} instead.
*/
@Deprecated
@SuppressWarnings("WeakerAccess")
public DeserializationExceptionHandler defaultDeserializationExceptionHandler() {
return getConfiguredInstance(DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, DeserializationExceptionHandler.class);
}
public ProductionExceptionHandler productionExceptionHandler() {
if (originals().containsKey(PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG) &&
originals().containsKey(DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG)) {
log.warn("Both the deprecated and new config for production exception handler are configured. " +
"The deprecated one will be ignored.");
}
if (originals().containsKey(PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG)) {
return getConfiguredInstance(PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG, ProductionExceptionHandler.class);
} else {
return defaultProductionExceptionHandler();
}
}
/**
* @deprecated Since 4.0. Use {@link #productionExceptionHandler()} instead.
*/
@Deprecated
@SuppressWarnings("WeakerAccess")
public ProductionExceptionHandler defaultProductionExceptionHandler() {
return getConfiguredInstance(DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG, ProductionExceptionHandler.class);
}
public ProcessingExceptionHandler processingExceptionHandler() {
return getConfiguredInstance(PROCESSING_EXCEPTION_HANDLER_CLASS_CONFIG, ProcessingExceptionHandler.class);
}
protected boolean isStreamsProtocolEnabled() {
return getString(GROUP_PROTOCOL_CONFIG).equalsIgnoreCase(GroupProtocol.STREAMS.name());
}
/**
* Override any client properties in the original configs with overrides
*
* @param configNames The given set of configuration names.
* @param originals The original configs to be filtered.
* @return client config with any overrides
*/
private Map<String, Object> clientProps(final Set<String> configNames,
final Map<String, Object> originals) {
// iterate all client config names, filter out non-client configs from the original
// property map and use the overridden values when they are not specified by users
final Map<String, Object> parsed = new HashMap<>();
for (final String configName: configNames) {
if (originals.containsKey(configName)) {
parsed.put(configName, originals.get(configName));
}
}
return parsed;
}
public static void main(final String[] args) {
System.out.println(CONFIG.toHtml(4, config -> "streamsconfigs_" + config));
}
}
|
InternalConfig
|
java
|
quarkusio__quarkus
|
extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/NoDefaultSecurityResource.java
|
{
"start": 224,
"end": 633
}
|
class ____ {
@GET
@Path("/hello")
public Greeting hello() {
return new Greeting("Hello there");
}
@POST
@Path("/hello")
public Greeting hello(Greeting greeting) {
return greeting;
}
@GET
@Path("/goodbye")
@Produces(MediaType.APPLICATION_XML)
public Greeting byebye() {
return new Greeting("Good Bye !");
}
}
|
NoDefaultSecurityResource
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/logging/logback/StructuredLogEncoder.java
|
{
"start": 1938,
"end": 7017
}
|
class ____ extends EncoderBase<ILoggingEvent> {
private final ThrowableProxyConverter throwableProxyConverter = new ThrowableProxyConverter();
private @Nullable String format;
private @Nullable StructuredLogFormatter<ILoggingEvent> formatter;
private @Nullable Charset charset = StandardCharsets.UTF_8;
public void setFormat(String format) {
this.format = format;
}
public void setCharset(@Nullable Charset charset) {
this.charset = charset;
}
@Override
public void start() {
Assert.state(this.format != null, "Format has not been set");
this.formatter = createFormatter(this.format);
super.start();
this.throwableProxyConverter.start();
}
private StructuredLogFormatter<ILoggingEvent> createFormatter(String format) {
Environment environment = (Environment) getContext().getObject(Environment.class.getName());
Assert.state(environment != null, "Unable to find Spring Environment in logger context");
return new StructuredLogFormatterFactory<>(ILoggingEvent.class, environment, this::addAvailableParameters,
this::addCommonFormatters)
.get(format);
}
private void addAvailableParameters(AvailableParameters availableParameters) {
availableParameters.add(ThrowableProxyConverter.class, this.throwableProxyConverter);
}
private void addCommonFormatters(CommonFormatters<ILoggingEvent> commonFormatters) {
commonFormatters.add(CommonStructuredLogFormat.ELASTIC_COMMON_SCHEMA, this::createEcsFormatter);
commonFormatters.add(CommonStructuredLogFormat.GRAYLOG_EXTENDED_LOG_FORMAT, this::createGraylogFormatter);
commonFormatters.add(CommonStructuredLogFormat.LOGSTASH, this::createLogstashFormatter);
}
private StructuredLogFormatter<ILoggingEvent> createEcsFormatter(Instantiator<?> instantiator) {
Environment environment = instantiator.getArg(Environment.class);
StackTracePrinter stackTracePrinter = instantiator.getArg(StackTracePrinter.class);
ContextPairs contextPairs = instantiator.getArg(ContextPairs.class);
ThrowableProxyConverter throwableProxyConverter = instantiator.getArg(ThrowableProxyConverter.class);
StructuredLoggingJsonMembersCustomizer.Builder<?> jsonMembersCustomizerBuilder = instantiator
.getArg(StructuredLoggingJsonMembersCustomizer.Builder.class);
Assert.state(environment != null, "'environment' must not be null");
Assert.state(contextPairs != null, "'contextPairs' must not be null");
Assert.state(throwableProxyConverter != null, "'throwableProxyConverter' must not be null");
Assert.state(jsonMembersCustomizerBuilder != null, "'jsonMembersCustomizerBuilder' must not be null");
return new ElasticCommonSchemaStructuredLogFormatter(environment, stackTracePrinter, contextPairs,
throwableProxyConverter, jsonMembersCustomizerBuilder);
}
private StructuredLogFormatter<ILoggingEvent> createGraylogFormatter(Instantiator<?> instantiator) {
Environment environment = instantiator.getArg(Environment.class);
StackTracePrinter stackTracePrinter = instantiator.getArg(StackTracePrinter.class);
ContextPairs contextPairs = instantiator.getArg(ContextPairs.class);
ThrowableProxyConverter throwableProxyConverter = instantiator.getArg(ThrowableProxyConverter.class);
StructuredLoggingJsonMembersCustomizer<?> jsonMembersCustomizer = instantiator
.getArg(StructuredLoggingJsonMembersCustomizer.class);
Assert.state(environment != null, "'environment' must not be null");
Assert.state(contextPairs != null, "'contextPairs' must not be null");
Assert.state(throwableProxyConverter != null, "'throwableProxyConverter' must not be null");
return new GraylogExtendedLogFormatStructuredLogFormatter(environment, stackTracePrinter, contextPairs,
throwableProxyConverter, jsonMembersCustomizer);
}
private StructuredLogFormatter<ILoggingEvent> createLogstashFormatter(Instantiator<?> instantiator) {
StackTracePrinter stackTracePrinter = instantiator.getArg(StackTracePrinter.class);
ContextPairs contextPairs = instantiator.getArg(ContextPairs.class);
ThrowableProxyConverter throwableProxyConverter = instantiator.getArg(ThrowableProxyConverter.class);
StructuredLoggingJsonMembersCustomizer<?> jsonMembersCustomizer = instantiator
.getArg(StructuredLoggingJsonMembersCustomizer.class);
Assert.state(contextPairs != null, "'contextPairs' must not be null");
Assert.state(throwableProxyConverter != null, "'throwableProxyConverter' must not be null");
return new LogstashStructuredLogFormatter(stackTracePrinter, contextPairs, throwableProxyConverter,
jsonMembersCustomizer);
}
@Override
public void stop() {
this.throwableProxyConverter.stop();
super.stop();
}
@Override
public byte @Nullable [] headerBytes() {
return null;
}
@Override
public byte[] encode(ILoggingEvent event) {
Assert.state(this.formatter != null,
"formatter must not be null. Make sure to call start() before this method");
return this.formatter.formatAsBytes(event, (this.charset != null) ? this.charset : StandardCharsets.UTF_8);
}
@Override
public byte @Nullable [] footerBytes() {
return null;
}
}
|
StructuredLogEncoder
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/discovery/predicates/TestClassPredicatesTests.java
|
{
"start": 6444,
"end": 7316
}
|
class ____ annotated with @Nested. It will not be executed.".formatted(
candidate.getName())) //
.source(ClassSource.from(candidate)) //
.build();
assertThat(discoveryIssues).containsExactlyInAnyOrder(notPrivateIssue, notInnerClassIssue);
}
@Test
void privateClassWithNestedTestCasesEvaluatesToFalse() {
var candidate = TestCases.PrivateClassWithNestedTestClass.class;
assertTrue(predicates.looksLikeIntendedTestClass(candidate));
assertFalse(predicates.isValidStandaloneTestClass(candidate));
var notPrivateIssue = DiscoveryIssue.builder(Severity.WARNING,
"Test class '%s' must not be private. It will not be executed.".formatted(candidate.getName())) //
.source(ClassSource.from(candidate)) //
.build();
var notInnerClassIssue = DiscoveryIssue.builder(Severity.WARNING,
"Test class '%s' must not be an inner
|
unless
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/action/CamelRouteStopAction.java
|
{
"start": 1152,
"end": 1397
}
|
class ____ extends CamelRouteAction {
public CamelRouteStopAction(CamelJBangMain main) {
super(main);
}
@Override
protected void onAction(JsonObject root) {
root.put("command", "stop");
}
}
|
CamelRouteStopAction
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/state/SnapshotDirectoryTest.java
|
{
"start": 1260,
"end": 8245
}
|
class ____ {
@TempDir private Path temporaryFolder;
/** Tests if mkdirs for snapshot directories works. */
@Test
void mkdirs() throws Exception {
File folderRoot = temporaryFolder.toFile();
File newFolder = new File(folderRoot, String.valueOf(UUID.randomUUID()));
File innerNewFolder = new File(newFolder, String.valueOf(UUID.randomUUID()));
Path path = innerNewFolder.toPath();
assertThat(newFolder).doesNotExist();
assertThat(innerNewFolder).doesNotExist();
SnapshotDirectory snapshotDirectory = SnapshotDirectory.permanent(path);
assertThat(snapshotDirectory.exists()).isFalse();
assertThat(newFolder).doesNotExist();
assertThat(innerNewFolder).doesNotExist();
assertThat(snapshotDirectory.mkdirs()).isTrue();
assertThat(newFolder).isDirectory();
assertThat(innerNewFolder).isDirectory();
assertThat(snapshotDirectory.exists()).isTrue();
}
/** Tests if indication of directory existence works. */
@Test
void exists() throws Exception {
File folderRoot = temporaryFolder.toFile();
File folderA = new File(folderRoot, String.valueOf(UUID.randomUUID()));
assertThat(folderA).doesNotExist();
Path path = folderA.toPath();
SnapshotDirectory snapshotDirectory = SnapshotDirectory.permanent(path);
assertThat(snapshotDirectory.exists()).isFalse();
assertThat(folderA.mkdirs()).isTrue();
assertThat(snapshotDirectory.exists()).isTrue();
assertThat(folderA.delete()).isTrue();
assertThat(snapshotDirectory.exists()).isFalse();
}
/** Tests listing of file statuses works like listing on the path directly. */
@Test
void listStatus() throws Exception {
File folderRoot = temporaryFolder.toFile();
File folderA = new File(folderRoot, String.valueOf(UUID.randomUUID()));
File folderB = new File(folderA, String.valueOf(UUID.randomUUID()));
assertThat(folderB.mkdirs()).isTrue();
File file = new File(folderA, "test.txt");
assertThat(file.createNewFile()).isTrue();
Path path = folderA.toPath();
SnapshotDirectory snapshotDirectory = SnapshotDirectory.permanent(path);
assertThat(snapshotDirectory.exists()).isTrue();
assertThat(Arrays.toString(snapshotDirectory.listDirectory()))
.isEqualTo(Arrays.toString(snapshotDirectory.listDirectory()));
assertThat(Arrays.toString(snapshotDirectory.listDirectory()))
.isEqualTo(Arrays.toString(FileUtils.listDirectory(path)));
}
/**
* Tests that reporting the handle of a completed snapshot works as expected and that the
* directory for completed snapshot is not deleted by {@link #deleteIfNotCompeltedSnapshot()}.
*/
@Test
void completeSnapshotAndGetHandle() throws Exception {
File folderRoot = temporaryFolder.toFile();
File folderA = new File(folderRoot, String.valueOf(UUID.randomUUID()));
assertThat(folderA.mkdirs()).isTrue();
Path folderAPath = folderA.toPath();
SnapshotDirectory snapshotDirectory = SnapshotDirectory.permanent(folderAPath);
// check that completed checkpoint dirs are not deleted as incomplete.
DirectoryStateHandle handle = snapshotDirectory.completeSnapshotAndGetHandle();
assertThat(handle).isNotNull();
assertThat(snapshotDirectory.cleanup()).isTrue();
assertThat(folderA).isDirectory();
assertThat(handle.getDirectory()).isEqualTo(folderAPath);
handle.discardState();
assertThat(folderA).doesNotExist();
assertThat(folderA.mkdirs()).isTrue();
SnapshotDirectory newSnapshotDirectory = SnapshotDirectory.permanent(folderAPath);
assertThat(newSnapshotDirectory.cleanup()).isTrue();
assertThatThrownBy(newSnapshotDirectory::completeSnapshotAndGetHandle)
.isInstanceOf(IOException.class);
}
/**
* Tests that snapshot director behaves correct for delete calls. Completed snapshots should not
* be deleted, only ongoing snapshots can.
*/
@Test
void deleteIfNotCompeltedSnapshot() throws Exception {
File folderRoot = temporaryFolder.toFile();
File folderA = new File(folderRoot, String.valueOf(UUID.randomUUID()));
File folderB = new File(folderA, String.valueOf(UUID.randomUUID()));
assertThat(folderB.mkdirs()).isTrue();
File file = new File(folderA, "test.txt");
assertThat(file.createNewFile()).isTrue();
Path folderAPath = folderA.toPath();
SnapshotDirectory snapshotDirectory = SnapshotDirectory.permanent(folderAPath);
assertThat(snapshotDirectory.cleanup()).isTrue();
assertThat(folderA).doesNotExist();
assertThat(folderA.mkdirs()).isTrue();
assertThat(file.createNewFile()).isTrue();
snapshotDirectory = SnapshotDirectory.permanent(folderAPath);
snapshotDirectory.completeSnapshotAndGetHandle();
assertThat(snapshotDirectory.cleanup()).isTrue();
assertThat(folderA).isDirectory();
assertThat(file).exists();
}
/**
* This test checks that completing or deleting the snapshot influence the #isSnapshotOngoing()
* flag.
*/
@Test
void isSnapshotOngoing() throws Exception {
File folderRoot = temporaryFolder.toFile();
File folderA = new File(folderRoot, String.valueOf(UUID.randomUUID()));
assertThat(folderA.mkdirs()).isTrue();
Path pathA = folderA.toPath();
SnapshotDirectory snapshotDirectory = SnapshotDirectory.permanent(pathA);
assertThat(snapshotDirectory.isSnapshotCompleted()).isFalse();
assertThat(snapshotDirectory.completeSnapshotAndGetHandle()).isNotNull();
assertThat(snapshotDirectory.isSnapshotCompleted()).isTrue();
snapshotDirectory = SnapshotDirectory.permanent(pathA);
assertThat(snapshotDirectory.isSnapshotCompleted()).isFalse();
snapshotDirectory.cleanup();
assertThat(snapshotDirectory.isSnapshotCompleted()).isFalse();
}
/** Tests that temporary directories have the right behavior on completion and deletion. */
@Test
void temporary() throws Exception {
File folderRoot = temporaryFolder.toFile();
File folder = new File(folderRoot, String.valueOf(UUID.randomUUID()));
assertThat(folder.mkdirs()).isTrue();
SnapshotDirectory tmpSnapshotDirectory = SnapshotDirectory.temporary(folder);
// temporary snapshot directories should not return a handle, because they will be deleted.
assertThat(tmpSnapshotDirectory.completeSnapshotAndGetHandle()).isNull();
// check that the directory is deleted even after we called #completeSnapshotAndGetHandle.
assertThat(tmpSnapshotDirectory.cleanup()).isTrue();
assertThat(folder).doesNotExist();
}
}
|
SnapshotDirectoryTest
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/redshift/visitor/RedshiftASTVisitor.java
|
{
"start": 171,
"end": 889
}
|
interface ____ extends SQLASTVisitor {
default boolean visit(RedshiftSelectQueryBlock x) {
return true;
}
default void endVisit(RedshiftSelectQueryBlock x) {}
default boolean visit(RedshiftSortKey x) {
return true;
}
default void endVisit(RedshiftSortKey x) {}
default boolean visit(RedshiftCreateTableStatement x) {
return true;
}
default void endVisit(RedshiftCreateTableStatement x) {}
default boolean visit(RedshiftColumnEncode x) { return true; }
default void endVisit(RedshiftColumnEncode x) {}
default boolean visit(RedshiftColumnKey x) {
return true;
}
default void endVisit(RedshiftColumnKey x) {
}
}
|
RedshiftASTVisitor
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldBeInSameMonth.java
|
{
"start": 864,
"end": 1500
}
|
class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldBeInSameMonth}</code>.
* @param actual the actual value in the failed assertion.
* @param other the value used in the failed assertion to compare the actual value to.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBeInSameMonth(Date actual, Date other) {
return new ShouldBeInSameMonth(actual, other);
}
private ShouldBeInSameMonth(Date actual, Date other) {
super("%nExpecting actual:%n %s%nto be on same year and month as:%n %s", actual, other);
}
}
|
ShouldBeInSameMonth
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java
|
{
"start": 2681,
"end": 12106
}
|
class ____
extends AbstractExecutionGraphHandler<
JobExceptionsInfoWithHistory, JobExceptionsMessageParameters>
implements JsonArchivist {
static final int MAX_NUMBER_EXCEPTION_TO_REPORT = 20;
static final List<FailureLabelFilterParameter.FailureLabel> EMPTY_FAILURE_LABEL_FILTER =
Collections.emptyList();
public JobExceptionsHandler(
GatewayRetriever<? extends RestfulGateway> leaderRetriever,
Duration timeout,
Map<String, String> responseHeaders,
MessageHeaders<
EmptyRequestBody,
JobExceptionsInfoWithHistory,
JobExceptionsMessageParameters>
messageHeaders,
ExecutionGraphCache executionGraphCache,
Executor executor) {
super(
leaderRetriever,
timeout,
responseHeaders,
messageHeaders,
executionGraphCache,
executor);
}
@Override
protected JobExceptionsInfoWithHistory handleRequest(
HandlerRequest<EmptyRequestBody> request, ExecutionGraphInfo executionGraph) {
final List<Integer> exceptionToReportMaxSizes =
request.getQueryParameter(UpperLimitExceptionParameter.class);
final int exceptionToReportMaxSize =
exceptionToReportMaxSizes.size() > 0
? exceptionToReportMaxSizes.get(0)
: MAX_NUMBER_EXCEPTION_TO_REPORT;
List<FailureLabelFilterParameter.FailureLabel> failureLabelFilter =
request.getQueryParameter(FailureLabelFilterParameter.class);
failureLabelFilter =
failureLabelFilter.size() > 0 ? failureLabelFilter : EMPTY_FAILURE_LABEL_FILTER;
return createJobExceptionsInfo(
executionGraph, exceptionToReportMaxSize, failureLabelFilter);
}
@Override
public Collection<ArchivedJson> archiveJsonWithPath(ExecutionGraphInfo executionGraphInfo)
throws IOException {
ResponseBody json =
createJobExceptionsInfo(
executionGraphInfo,
MAX_NUMBER_EXCEPTION_TO_REPORT,
EMPTY_FAILURE_LABEL_FILTER);
String path =
getMessageHeaders()
.getTargetRestEndpointURL()
.replace(
':' + JobIDPathParameter.KEY,
executionGraphInfo.getJobId().toString());
return Collections.singletonList(new ArchivedJson(path, json));
}
private static JobExceptionsInfoWithHistory createJobExceptionsInfo(
ExecutionGraphInfo executionGraphInfo,
int exceptionToReportMaxSize,
List<FailureLabelFilterParameter.FailureLabel> failureLabelFilter) {
return new JobExceptionsInfoWithHistory(
createJobExceptionHistory(
executionGraphInfo.getExceptionHistory(),
exceptionToReportMaxSize,
failureLabelFilter));
}
private static JobExceptionsInfoWithHistory.JobExceptionHistory createJobExceptionHistory(
Iterable<RootExceptionHistoryEntry> historyEntries,
int limit,
List<FailureLabelFilterParameter.FailureLabel> failureLabelFilter) {
// we need to reverse the history to have a stable result when doing paging on it
List<RootExceptionHistoryEntry> reversedHistoryEntries = new ArrayList<>();
Iterables.addAll(reversedHistoryEntries, historyEntries);
Collections.reverse(reversedHistoryEntries);
if (!failureLabelFilter.isEmpty()) {
reversedHistoryEntries =
reversedHistoryEntries.stream()
.filter(
entry -> {
for (FailureLabelFilterParameter.FailureLabel label :
failureLabelFilter) {
if (!entry.getFailureLabels()
.containsKey(label.getKey())
|| !entry.getFailureLabels()
.get(label.getKey())
.equals(label.getValue())) {
return false;
}
}
return true;
})
.collect(Collectors.toList());
}
final List<JobExceptionsInfoWithHistory.RootExceptionInfo> exceptionHistoryEntries =
reversedHistoryEntries.stream()
.limit(limit)
.map(JobExceptionsHandler::createRootExceptionInfo)
.collect(Collectors.toList());
return new JobExceptionsInfoWithHistory.JobExceptionHistory(
exceptionHistoryEntries,
exceptionHistoryEntries.size() < reversedHistoryEntries.size());
}
private static JobExceptionsInfoWithHistory.RootExceptionInfo createRootExceptionInfo(
RootExceptionHistoryEntry historyEntry) {
final List<JobExceptionsInfoWithHistory.ExceptionInfo> concurrentExceptions =
StreamSupport.stream(historyEntry.getConcurrentExceptions().spliterator(), false)
.map(JobExceptionsHandler::createExceptionInfo)
.collect(Collectors.toList());
if (historyEntry.isGlobal()) {
return new JobExceptionsInfoWithHistory.RootExceptionInfo(
historyEntry.getException().getOriginalErrorClassName(),
historyEntry.getExceptionAsString(),
historyEntry.getTimestamp(),
historyEntry.getFailureLabels(),
concurrentExceptions);
}
assertLocalExceptionInfo(historyEntry);
return new JobExceptionsInfoWithHistory.RootExceptionInfo(
historyEntry.getException().getOriginalErrorClassName(),
historyEntry.getExceptionAsString(),
historyEntry.getTimestamp(),
historyEntry.getFailureLabels(),
historyEntry.getFailingTaskName(),
toString(historyEntry.getTaskManagerLocation()),
toTaskManagerId(historyEntry.getTaskManagerLocation()),
concurrentExceptions);
}
private static JobExceptionsInfoWithHistory.ExceptionInfo createExceptionInfo(
ExceptionHistoryEntry exceptionHistoryEntry) {
if (exceptionHistoryEntry.isGlobal()) {
return new JobExceptionsInfoWithHistory.ExceptionInfo(
exceptionHistoryEntry.getException().getOriginalErrorClassName(),
exceptionHistoryEntry.getExceptionAsString(),
exceptionHistoryEntry.getTimestamp(),
exceptionHistoryEntry.getFailureLabels(),
null,
null,
null);
}
assertLocalExceptionInfo(exceptionHistoryEntry);
return new JobExceptionsInfoWithHistory.ExceptionInfo(
exceptionHistoryEntry.getException().getOriginalErrorClassName(),
exceptionHistoryEntry.getExceptionAsString(),
exceptionHistoryEntry.getTimestamp(),
exceptionHistoryEntry.getFailureLabels(),
exceptionHistoryEntry.getFailingTaskName(),
toString(exceptionHistoryEntry.getTaskManagerLocation()),
toTaskManagerId(exceptionHistoryEntry.getTaskManagerLocation()));
}
private static void assertLocalExceptionInfo(ExceptionHistoryEntry exceptionHistoryEntry) {
Preconditions.checkArgument(
exceptionHistoryEntry.getFailingTaskName() != null,
"The taskName must not be null for a non-global failure.");
}
@VisibleForTesting
static String toString(@Nullable TaskManagerLocation location) {
// '(unassigned)' being the default value is added to support backward-compatibility for the
// deprecated fields
return location != null ? location.getEndpoint() : "(unassigned)";
}
@VisibleForTesting
static String toTaskManagerId(@Nullable TaskManagerLocation location) {
// '(unassigned)' being the default value is added to support backward-compatibility for the
// deprecated fields
return location != null ? String.format("%s", location.getResourceID()) : "(unassigned)";
}
@VisibleForTesting
@Nullable
static String toString(@Nullable ExceptionHistoryEntry.ArchivedTaskManagerLocation location) {
return location != null ? location.getEndpoint() : null;
}
@VisibleForTesting
static String toTaskManagerId(
@Nullable ExceptionHistoryEntry.ArchivedTaskManagerLocation location) {
return location != null ? String.format("%s", location.getResourceID()) : null;
}
}
|
JobExceptionsHandler
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/StreamEndpointBuilderFactory.java
|
{
"start": 28744,
"end": 32948
}
|
interface ____ extends EndpointProducerBuilder {
default StreamEndpointProducerBuilder basic() {
return (StreamEndpointProducerBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedStreamEndpointProducerBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedStreamEndpointProducerBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Sets the read timeout to a specified timeout, in milliseconds. A
* non-zero value specifies the timeout when reading from Input stream
* when a connection is established to a resource. If the timeout
* expires before there is data available for read, a
* java.net.SocketTimeoutException is raised. A timeout of zero is
* interpreted as an infinite timeout.
*
* The option is a: <code>int</code> type.
*
* Group: advanced
*
* @param readTimeout the value to set
* @return the dsl builder
*/
default AdvancedStreamEndpointProducerBuilder readTimeout(int readTimeout) {
doSetProperty("readTimeout", readTimeout);
return this;
}
/**
* Sets the read timeout to a specified timeout, in milliseconds. A
* non-zero value specifies the timeout when reading from Input stream
* when a connection is established to a resource. If the timeout
* expires before there is data available for read, a
* java.net.SocketTimeoutException is raised. A timeout of zero is
* interpreted as an infinite timeout.
*
* The option will be converted to a <code>int</code> type.
*
* Group: advanced
*
* @param readTimeout the value to set
* @return the dsl builder
*/
default AdvancedStreamEndpointProducerBuilder readTimeout(String readTimeout) {
doSetProperty("readTimeout", readTimeout);
return this;
}
}
/**
* Builder for endpoint for the Stream component.
*/
public
|
AdvancedStreamEndpointProducerBuilder
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/xslt/XsltFileNotFoundTest.java
|
{
"start": 1268,
"end": 2094
}
|
class ____ extends TestSupport {
@Test
public void testNoXsltFile() throws Exception {
RouteBuilder builder = createRouteBuilder();
CamelContext context = new DefaultCamelContext();
context.addRoutes(builder);
RuntimeCamelException exception = assertThrows(RuntimeCamelException.class, context::start);
assertIsInstanceOf(TransformerException.class, exception.getCause().getCause().getCause());
assertIsInstanceOf(FileNotFoundException.class, exception.getCause().getCause().getCause().getCause());
}
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("seda:a").to("xslt:org/apache/camel/component/xslt/notfound.xsl");
}
};
}
}
|
XsltFileNotFoundTest
|
java
|
apache__camel
|
components/camel-quickfix/src/main/java/org/apache/camel/component/quickfixj/QuickfixjComponent.java
|
{
"start": 1454,
"end": 9266
}
|
class ____ extends DefaultComponent implements StartupListener {
private static final Logger LOG = LoggerFactory.getLogger(QuickfixjComponent.class);
private static final String PARAMETER_LAZY_CREATE_ENGINE = "lazyCreateEngine";
private final Map<String, QuickfixjEngine> engines = new HashMap<>();
private final Map<String, QuickfixjEngine> provisionalEngines = new HashMap<>();
private final Map<String, QuickfixjEndpoint> endpoints = new HashMap<>();
private Map<String, QuickfixjConfiguration> configurations = new HashMap<>();
@Metadata(label = "advanced")
private MessageStoreFactory messageStoreFactory;
@Metadata(label = "advanced")
private LogFactory logFactory;
@Metadata(label = "advanced")
private MessageFactory messageFactory;
@Metadata
private boolean lazyCreateEngines;
@Metadata(defaultValue = "true")
private boolean eagerStopEngines = true;
public QuickfixjComponent() {
}
public QuickfixjComponent(CamelContext context) {
super(context);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
// Look up the engine instance based on the settings file ("remaining")
QuickfixjEngine engine;
lock.lock();
try {
QuickfixjEndpoint endpoint = endpoints.get(uri);
if (endpoint == null) {
engine = engines.get(remaining);
if (engine == null) {
engine = provisionalEngines.get(remaining);
}
if (engine == null) {
QuickfixjConfiguration configuration = configurations.get(remaining);
SessionSettings settings;
if (configuration != null) {
settings = configuration.createSessionSettings();
} else {
settings = QuickfixjEngine.loadSettings(getCamelContext(), remaining);
}
Boolean lazyCreateEngineForEndpoint
= super.getAndRemoveParameter(parameters, PARAMETER_LAZY_CREATE_ENGINE, Boolean.TYPE);
if (lazyCreateEngineForEndpoint == null) {
lazyCreateEngineForEndpoint = isLazyCreateEngines();
}
engine = new QuickfixjEngine(
uri, settings, messageStoreFactory, logFactory, messageFactory,
lazyCreateEngineForEndpoint);
// only start engine if CamelContext is already started, otherwise the engines gets started
// automatic later when CamelContext has been started using the StartupListener
if (getCamelContext().getStatus().isStarted()) {
startQuickfixjEngine(engine);
engines.put(remaining, engine);
} else {
// engines to be started later
provisionalEngines.put(remaining, engine);
}
}
endpoint = new QuickfixjEndpoint(engine, uri, this);
endpoint.setConfigurationName(remaining);
endpoint.setLazyCreateEngine(engine.isLazy());
engine.addEventListener(endpoint);
endpoints.put(uri, endpoint);
}
return endpoint;
} finally {
lock.unlock();
}
}
@Override
protected void doStop() throws Exception {
// stop engines when stopping component
lock.lock();
try {
for (QuickfixjEngine engine : engines.values()) {
engine.stop();
}
} finally {
lock.unlock();
}
super.doStop();
}
@Override
protected void doShutdown() throws Exception {
// cleanup when shutting down
engines.clear();
provisionalEngines.clear();
endpoints.clear();
super.doShutdown();
}
private void startQuickfixjEngine(QuickfixjEngine engine) {
if (!engine.isLazy()) {
LOG.info("Starting QuickFIX/J engine: {}", engine.getUri());
ServiceHelper.startService(engine);
} else {
LOG.info("QuickFIX/J engine: {} will start lazily", engine.getUri());
}
}
// Test Support
Map<String, QuickfixjEngine> getEngines() {
return Collections.unmodifiableMap(engines);
}
// Test Support
Map<String, QuickfixjEngine> getProvisionalEngines() {
return Collections.unmodifiableMap(provisionalEngines);
}
/**
* To use the given MessageFactory
*/
public void setMessageFactory(MessageFactory messageFactory) {
this.messageFactory = messageFactory;
}
public MessageFactory getMessageFactory() {
return messageFactory;
}
/**
* To use the given LogFactory
*/
public void setLogFactory(LogFactory logFactory) {
this.logFactory = logFactory;
}
public LogFactory getLogFactory() {
return logFactory;
}
/**
* To use the given MessageStoreFactory
*/
public void setMessageStoreFactory(MessageStoreFactory messageStoreFactory) {
this.messageStoreFactory = messageStoreFactory;
}
public MessageStoreFactory getMessageStoreFactory() {
return messageStoreFactory;
}
public Map<String, QuickfixjConfiguration> getConfigurations() {
return configurations;
}
/**
* To use the given map of pre configured QuickFix configurations mapped to the key
*/
public void setConfigurations(Map<String, QuickfixjConfiguration> configurations) {
this.configurations = configurations;
}
public boolean isLazyCreateEngines() {
return this.lazyCreateEngines;
}
/**
* If set to true, the engines will be created and started when needed (when first message is send)
*/
public void setLazyCreateEngines(boolean lazyCreateEngines) {
this.lazyCreateEngines = lazyCreateEngines;
}
public boolean isEagerStopEngines() {
return eagerStopEngines;
}
/**
* Whether to eager stop engines when there are no active consumer or producers using the engine.
*
* For example when stopping a route, then the engine can be stopped as well. And when the route is started, then
* the engine is started again.
*
* This can be turned off to only stop the engines when Camel is shutdown.
*/
public void setEagerStopEngines(boolean eagerStopEngines) {
this.eagerStopEngines = eagerStopEngines;
}
@Override
public void onCamelContextStarted(CamelContext camelContext, boolean alreadyStarted) throws Exception {
// only start quickfix engines when CamelContext have finished starting
lock.lock();
try {
for (QuickfixjEngine engine : engines.values()) {
startQuickfixjEngine(engine);
}
for (Map.Entry<String, QuickfixjEngine> entry : provisionalEngines.entrySet()) {
startQuickfixjEngine(entry.getValue());
engines.put(entry.getKey(), entry.getValue());
}
provisionalEngines.clear();
} finally {
lock.unlock();
}
}
public void ensureEngineStarted(QuickfixjEngine engine) {
// only start engine after provisional engines is no longer in use
// as they are used for holding created engines during bootstrap of Camel
if (provisionalEngines.isEmpty()) {
ServiceHelper.startService(engine);
}
}
}
|
QuickfixjComponent
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/graphs/EntityGraphAttributeResolutionTest.java
|
{
"start": 4818,
"end": 6188
}
|
class ____ {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@Enumerated(EnumType.STRING)
@ElementCollection(targetClass = Permission.class)
@CollectionTable(name = "USERS_PERMISSIONS", joinColumns = @JoinColumn(name = "user_id"))
private Set<Permission> permissions = EnumSet.of( Permission.FOO );
@ManyToMany(fetch = FetchType.LAZY)
private Set<Group> groups = new HashSet<>();
public User() {}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Set<Permission> getPermissions() {
return permissions;
}
public void setPermissions(Set<Permission> permissions) {
this.permissions = permissions;
}
public void addPermission(Permission p) {
this.permissions.add( p );
}
public Set<Group> getGroups() {
return groups;
}
public void setGroups(Set<Group> groups) {
this.groups = groups;
}
public void addGroup(Group g) {
this.groups.add( g );
}
@Override
public boolean equals(Object o) {
if ( this == o ) return true;
if ( !( o instanceof User ) )
return false;
User other = (User) o;
return id != null &&
id.equals( other.getId() );
}
@Override
public int hashCode() {
return 31;
}
@Override
public String toString() {
return "User{" +
"id=" + id +
'}';
}
}
public
|
User
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/filter/BodyFilterFunctions.java
|
{
"start": 2667,
"end": 7466
}
|
class ____ {
private BodyFilterFunctions() {
}
public static Function<ServerRequest, ServerRequest> adaptCachedBody() {
return request -> {
Object o = getAttribute(request, MvcUtils.CACHED_REQUEST_BODY_ATTR);
if (o instanceof ByteArrayInputStream body) {
return wrapRequest(request, body);
}
return request;
};
}
private static ServerRequestWrapper wrapRequest(ServerRequest request, byte[] body) {
return wrapRequest(request, new ByteArrayInputStream(body));
}
private static ServerRequestWrapper wrapRequest(ServerRequest request, ByteArrayInputStream body) {
ByteArrayServletInputStream inputStream = new ByteArrayServletInputStream(body);
HttpServletRequestWrapper wrapper = new HttpServletRequestWrapper(request.servletRequest()) {
@Override
public ServletInputStream getInputStream() {
return inputStream;
}
};
return new ServerRequestWrapper(request) {
@Override
public HttpServletRequest servletRequest() {
return wrapper;
}
};
}
@SuppressWarnings("unchecked")
public static <T, R> Function<ServerRequest, ServerRequest> modifyRequestBody(Class<T> inClass, Class<R> outClass,
@Nullable String newContentType, RewriteFunction<T, R> rewriteFunction) {
return request -> cacheAndReadBody(request, inClass).map(body -> {
R convertedBody = rewriteFunction.apply(request, body);
// TODO: cache converted body
MediaType contentType = (StringUtils.hasText(newContentType)) ? MediaType.parseMediaType(newContentType)
: request.headers().contentType().orElse(null);
List<HttpMessageConverter<?>> httpMessageConverters = request.messageConverters();
for (HttpMessageConverter<?> messageConverter : httpMessageConverters) {
if (messageConverter.canWrite(outClass, contentType)) {
HttpHeaders headers = new HttpHeaders();
headers.putAll(request.headers().asHttpHeaders());
// the new content type will be computed by converter
// and then set in the request decorator
headers.remove(HttpHeaders.CONTENT_LENGTH);
// if the body is changing content types, set it here, to the
// bodyInserter
// will know about it
if (contentType != null) {
headers.setContentType(contentType);
}
try {
ByteArrayHttpOutputMessage outputMessage = new ByteArrayHttpOutputMessage(headers);
((HttpMessageConverter<R>) messageConverter).write(convertedBody, contentType, outputMessage);
ServerRequest modified = ServerRequest.from(request)
.headers(httpHeaders -> httpHeaders.putAll(headers))
.build();
return wrapRequest(modified, outputMessage.getBytes());
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
return request;
}).orElse(request);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public static <T, R> BiFunction<ServerRequest, ServerResponse, ServerResponse> modifyResponseBody(Class<T> inClass,
Class<R> outClass, @Nullable String newContentType, RewriteResponseFunction<T, R> rewriteFunction) {
return (request, response) -> {
Object o = request.attributes().get(MvcUtils.CLIENT_RESPONSE_INPUT_STREAM_ATTR);
if (o instanceof InputStream inputStream) {
try {
List<HttpMessageConverter<?>> converters = request.messageConverters();
Optional<HttpMessageConverter<?>> inConverter = converters.stream()
.filter(c -> c.canRead(inClass, response.headers().getContentType()))
.findFirst();
if (inConverter.isEmpty()) {
// TODO: throw exception?
return response;
}
HttpMessageConverter<?> inputConverter = inConverter.get();
T input = (T) inputConverter.read((Class) inClass,
new SimpleInputMessage(inputStream, response.headers()));
R output = rewriteFunction.apply(request, response, input);
Optional<HttpMessageConverter<?>> outConverter = converters.stream()
.filter(c -> c.canWrite(outClass, null))
.findFirst();
if (outConverter.isEmpty()) {
// TODO: throw exception?
return response;
}
HttpMessageConverter<R> byteConverter = (HttpMessageConverter<R>) outConverter.get();
ByteArrayHttpOutputMessage outputMessage = new ByteArrayHttpOutputMessage(response.headers());
byteConverter.write(output, null, outputMessage);
request.attributes()
.put(MvcUtils.CLIENT_RESPONSE_INPUT_STREAM_ATTR,
new ByteArrayInputStream(outputMessage.body.toByteArray()));
if (StringUtils.hasText(newContentType)) {
response.headers().setContentType(MediaType.parseMediaType(newContentType));
}
response.headers().remove(HttpHeaders.CONTENT_LENGTH);
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
return response;
};
}
private final static
|
BodyFilterFunctions
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/tofix/ExternalTypeCustomResolver1288Test.java
|
{
"start": 5450,
"end": 6652
}
|
class ____ extends TypeIdResolverBase {
private static final long serialVersionUID = 1L;
@SuppressWarnings("unchecked")
@Override
public String idFromValue(DatabindContext ctxt, Object value) {
if (! (value instanceof PaymentDetails)) {
return null;
}
return FormOfPayment.fromDetailsClass((Class<PaymentDetails>) value.getClass ()).name ();
}
@Override
public String idFromValueAndType(DatabindContext ctxt, Object value, Class<?> suggestedType) {
return idFromValue(ctxt, value);
}
@Override
public JavaType typeFromId(DatabindContext context, String id) {
return context.getTypeFactory().constructType(FormOfPayment.valueOf(id).getDetailsClass ());
}
@Override
public String getDescForKnownTypeIds() {
return "PaymentDetails";
}
@Override
public Id getMechanism() {
return JsonTypeInfo.Id.CUSTOM;
}
}
}
public static
|
PaymentDetailsTypeIdResolver
|
java
|
google__guice
|
extensions/grapher/src/com/google/inject/grapher/NodeId.java
|
{
"start": 1263,
"end": 2255
}
|
class ____. */
TYPE,
/** Instance node, used when something is bound to an instance. */
INSTANCE
}
private final Key<?> key;
private final NodeType nodeType;
private NodeId(Key<?> key, NodeType nodeType) {
this.key = key;
this.nodeType = nodeType;
}
public static NodeId newTypeId(Key<?> key) {
return new NodeId(key, NodeType.TYPE);
}
public static NodeId newInstanceId(Key<?> key) {
return new NodeId(key, NodeType.INSTANCE);
}
public Key<?> getKey() {
return key;
}
@Override
public int hashCode() {
return Objects.hashCode(key, nodeType);
}
@Override
public boolean equals(Object obj) {
if (obj == null || !(obj.getClass().equals(NodeId.class))) {
return false;
}
NodeId other = (NodeId) obj;
return Objects.equal(key, other.key) && Objects.equal(nodeType, other.nodeType);
}
@Override
public String toString() {
return "NodeId{nodeType=" + nodeType + " key=" + key + "}";
}
}
|
node
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java
|
{
"start": 2220,
"end": 2809
}
|
class ____ extends ESSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singleton(MockPayloadAnalyzerPlugin.class);
}
// Delimited payload token filter was moved to analysis-common module,
// This test relies heavily on this token filter, even though it is not testing this token filter.
// Solution for now is copy what delimited payload token filter does in this test.
// Unfortunately MockPayloadAnalyzer couldn't be used here as it misses functionality.
public static
|
GetTermVectorsTests
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
|
{
"start": 10719,
"end": 11036
}
|
class ____ extends LongParam {
/**
* Parameter name.
*/
public static final String NAME = "offset";
/**
* Constructor.
*/
public OffsetParam() {
super(NAME, 0l);
}
}
/**
* Class for newlength parameter.
*/
@InterfaceAudience.Private
public static
|
OffsetParam
|
java
|
apache__camel
|
tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/ValidateComponentMojo.java
|
{
"start": 2445,
"end": 5439
}
|
class ____ one of the threads it
* generated failed.
* @throws org.apache.maven.plugin.MojoFailureException something bad happened...
*/
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
if (validate == null) {
validate = true;
}
if (outDir == null) {
outDir = new File(project.getBuild().getOutputDirectory());
}
if (!validate) {
getLog().info("Validation disabled");
} else {
List<Path> jsonFiles;
try (Stream<Path> stream = PackageHelper.findJsonFiles(outDir.toPath())) {
jsonFiles = stream.toList();
}
boolean failed = false;
for (Path file : jsonFiles) {
final String name = PackageHelper.asName(file);
final ErrorDetail detail = new ErrorDetail();
if (getLog().isDebugEnabled()) {
getLog().debug("Validating file " + file);
}
try {
ValidateHelper.validate(file.toFile(), detail);
} catch (Exception e) {
// ignore as it may not be a camel component json file
}
if (detail.hasErrors()) {
failed = true;
getLog().warn("The " + detail.getKind() + ": " + name + " has validation errors");
if (detail.isMissingDescription()) {
getLog().warn("Missing description on: " + detail.getKind());
}
if (detail.isMissingLabel()) {
getLog().warn("Missing label on: " + detail.getKind());
}
if (detail.isMissingSyntax()) {
getLog().warn("Missing syntax on endpoint");
}
if (detail.isMissingUriPath()) {
getLog().warn("Missing @UriPath on endpoint");
}
if (!detail.getMissingComponentDocumentation().isEmpty()) {
getLog().warn("Missing component documentation for the following options:"
+ Strings.indentCollection("\n\t", detail.getMissingComponentDocumentation()));
}
if (!detail.getMissingEndpointDocumentation().isEmpty()) {
getLog().warn("Missing endpoint documentation for the following options:"
+ Strings.indentCollection("\n\t", detail.getMissingEndpointDocumentation()));
}
}
}
if (failed) {
throw new MojoFailureException("Validating failed, see errors above!");
} else {
getLog().info("Validation complete");
}
}
}
}
|
or
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Listing.java
|
{
"start": 27425,
"end": 27860
}
|
class ____ implements FileStatusAcceptor {
public boolean accept(Path keyPath, S3Object s3Object) {
return true;
}
public boolean accept(Path keyPath, String prefix) {
return true;
}
public boolean accept(FileStatus status) {
return true;
}
}
/**
* Accept all entries except the base path and those which map to S3N
* pseudo directory markers.
*/
public static
|
AcceptAllObjects
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/resource/MultiInterfaceResLocatorIntf2.java
|
{
"start": 167,
"end": 298
}
|
interface ____ {
@GET
@Produces("text/plain")
@Path("hello2")
String resourceMethod2();
}
|
MultiInterfaceResLocatorIntf2
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java
|
{
"start": 23514,
"end": 24050
}
|
class ____ extends CompoundAggTranslator<MatrixStats> {
@Override
protected LeafAgg toAgg(String id, MatrixStats m) {
if (isFieldOrLiteral(m.field())) {
return new MatrixStatsAgg(id, singletonList(field(m, m.field())));
}
throw new SqlIllegalArgumentException(
"Cannot use scalar functions or operators: [{}] in aggregate functions [KURTOSIS] and [SKEWNESS]",
m.field().toString()
);
}
}
static
|
MatrixStatsAggs
|
java
|
apache__camel
|
components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FromFileTransferLoggingLevelVerboseIT.java
|
{
"start": 1006,
"end": 1812
}
|
class ____ extends FtpServerTestSupport {
protected String getFtpUrl() {
return "ftp://admin@localhost:{{ftp.server.port}}"
+ "/tmp3/camel?password=admin&transferLoggingLevel=INFO&transferLoggingVerbose=true";
}
@Test
public void testTransferLoggingLevelVerbose() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(2);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(getFtpUrl()).to("mock:result");
from("file:src/test/data?noop=true").to(getFtpUrl());
}
};
}
}
|
FromFileTransferLoggingLevelVerboseIT
|
java
|
apache__flink
|
flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/nfa/TimesOrMoreITCase.java
|
{
"start": 26045,
"end": 26479
}
|
class ____ {
private static final Event startEvent = new Event(40, "c", 1.0);
private static final Event middleEvent1 = new Event(41, "a", 2.0);
private static final Event middleEvent2 = new Event(42, "a", 3.0);
private static final Event middleEvent3 = new Event(43, "a", 4.0);
private static final Event end = new Event(44, "b", 5.0);
private ConsecutiveData() {}
}
}
|
ConsecutiveData
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-integration/src/test/java/smoketest/integration/producer/ProducerApplication.java
|
{
"start": 1175,
"end": 1960
}
|
class ____ implements ApplicationRunner {
private final ServiceProperties serviceProperties;
public ProducerApplication(ServiceProperties serviceProperties) {
this.serviceProperties = serviceProperties;
}
@Override
public void run(ApplicationArguments args) throws Exception {
this.serviceProperties.getInputDir().mkdirs();
if (!args.getNonOptionArgs().isEmpty()) {
FileOutputStream stream = new FileOutputStream(
new File(this.serviceProperties.getInputDir(), "data" + System.currentTimeMillis() + ".txt"));
for (String arg : args.getNonOptionArgs()) {
stream.write(arg.getBytes());
}
stream.flush();
stream.close();
}
}
public static void main(String[] args) {
SpringApplication.run(ProducerApplication.class, args);
}
}
|
ProducerApplication
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/reflect/ConstructorUtils.java
|
{
"start": 12522,
"end": 13464
}
|
class ____ invocation of {@link SecurityManager#checkPackageAccess(String)} denies access to the
* package of the class.
* @see Constructor#newInstance(Object...)
* @see Constructor#newInstance
*/
public static <T> T invokeConstructor(final Class<T> cls, final Object[] args, final Class<?>[] parameterTypes)
throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
final Object[] actuals = ArrayUtils.nullToEmpty(args);
final Constructor<T> ctor = getMatchingAccessibleConstructor(cls, ArrayUtils.nullToEmpty(parameterTypes));
if (ctor == null) {
throw new NoSuchMethodException("No such accessible constructor on object: " + cls.getName());
}
return ctor.newInstance(MethodUtils.toVarArgs(ctor, actuals));
}
/**
* Returns a new instance of the specified
|
and
|
java
|
apache__camel
|
components/camel-resilience4j/src/test/java/org/apache/camel/component/resilience4j/ResilienceTimeoutWithFallbackTest.java
|
{
"start": 1160,
"end": 3345
}
|
class ____ extends CamelTestSupport {
@Test
public void testFast() {
// this calls the fast route and therefore we get a response
Object out = template.requestBody("direct:start", "fast");
assertEquals("LAST CHANGE", out);
}
@Test
public void testSlow() {
// this calls the slow route and therefore causes a timeout which
// triggers the fallback
Object out = template.requestBody("direct:start", "slow");
assertEquals("LAST CHANGE", out);
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").circuitBreaker()
// enable and use 2 second timeout
.resilience4jConfiguration().timeoutEnabled(true).timeoutDuration(2000).end()
.log("Resilience processing start: ${threadName}").toD("direct:${body}")
.log("Resilience processing end: ${threadName}").onFallback()
// use fallback if there was an exception or timeout
.log("Resilience fallback start: ${threadName}").transform().constant("Fallback response")
.log("Resilience fallback end: ${threadName}").end()
.log("After Resilience ${body}").transform(simple("A CHANGE")).transform(simple("LAST CHANGE"))
.log("End ${body}");
from("direct:fast")
// this is a fast route and takes 1 second to respond
.log("Fast processing start: ${threadName}").delay(1000).transform().constant("Fast response")
.log("Fast processing end: ${threadName}");
from("direct:slow")
// this is a slow route and takes 3 second to respond
.log("Slow processing start: ${threadName}").delay(3000).transform().constant("Slow response")
.log("Slow processing end: ${threadName}");
}
};
}
}
|
ResilienceTimeoutWithFallbackTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_hasSizeGreaterThanOrEqualTo_Test.java
|
{
"start": 834,
"end": 1244
}
|
class ____ extends AtomicReferenceArrayAssertBaseTest {
@Override
protected AtomicReferenceArrayAssert<Object> invoke_api_method() {
return assertions.hasSizeGreaterThanOrEqualTo(6);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertHasSizeGreaterThanOrEqualTo(getInfo(assertions), internalArray(), 6);
}
}
|
AtomicReferenceArrayAssert_hasSizeGreaterThanOrEqualTo_Test
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/dataset/ListDataSetConsumerTest.java
|
{
"start": 1193,
"end": 2701
}
|
class ____ extends ContextTestSupport {
protected ListDataSet dataSet;
final String resultUri = "mock://result";
final String dataSetName = "foo";
final String dataSetUri = "dataset://" + dataSetName;
@Override
protected Registry createCamelRegistry() throws Exception {
Registry answer = super.createCamelRegistry();
answer.bind("foo", dataSet);
return answer;
}
@Test
public void testDefaultListDataSet() throws Exception {
MockEndpoint result = getMockEndpoint(resultUri);
result.expectedMinimumMessageCount((int) dataSet.getSize());
result.assertIsSatisfied(Duration.ofSeconds(5).toMillis());
}
@Test
public void testDefaultListDataSetWithSizeGreaterThanListSize() throws Exception {
MockEndpoint result = getMockEndpoint(resultUri);
dataSet.setSize(10);
result.expectedMinimumMessageCount((int) dataSet.getSize());
result.assertIsSatisfied(Duration.ofSeconds(5).toMillis());
}
@Override
@BeforeEach
public void setUp() throws Exception {
List<Object> bodies = new LinkedList<>();
bodies.add("<hello>world!</hello>");
dataSet = new ListDataSet(bodies);
super.setUp();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(dataSetUri).to("mock://result");
}
};
}
}
|
ListDataSetConsumerTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java
|
{
"start": 109570,
"end": 143885
}
|
class ____ extends SimpleMappedFieldType {
private final Element element;
private final Integer dims;
private final boolean indexed;
private final VectorSimilarity similarity;
private final IndexVersion indexVersionCreated;
private final DenseVectorIndexOptions indexOptions;
private final boolean isSyntheticSource;
public DenseVectorFieldType(
String name,
IndexVersion indexVersionCreated,
ElementType elementType,
Integer dims,
boolean indexed,
VectorSimilarity similarity,
DenseVectorIndexOptions indexOptions,
Map<String, String> meta,
boolean isSyntheticSource
) {
super(name, indexed ? IndexType.vectors() : IndexType.docValuesOnly(), false, meta);
this.element = Element.getElement(elementType);
this.dims = dims;
this.indexed = indexed;
this.similarity = similarity;
this.indexVersionCreated = indexVersionCreated;
this.indexOptions = indexOptions;
this.isSyntheticSource = isSyntheticSource;
}
public VectorSimilarity similarity() {
return similarity;
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
// TODO add support to `binary` and `vector` formats to unify the formats
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Set<String> sourcePaths = context.isSourceEnabled() ? context.sourcePath(name()) : Collections.emptySet();
return new SourceValueFetcher(name(), context) {
@Override
public List<Object> fetchValues(Source source, int doc, List<Object> ignoredValues) {
ArrayList<Object> values = new ArrayList<>();
for (var path : sourcePaths) {
Object sourceValue = source.extractValue(path, null);
if (sourceValue == null) {
return List.of();
}
switch (sourceValue) {
case List<?> v -> values.addAll(v);
case String s -> values.add(s);
default -> ignoredValues.add(sourceValue);
}
}
values.trimToSize();
return values;
}
@Override
protected Object parseSourceValue(Object value) {
throw new IllegalStateException("parsing dense vector from source is not supported here");
}
};
}
@Override
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
// TODO we should add DENSE_VECTOR_BINARY?
return DocValueFormat.DENSE_VECTOR;
}
@Override
public boolean isSearchable() {
return indexed;
}
@Override
public boolean isAggregatable() {
return false;
}
@Override
public boolean isVectorEmbedding() {
return true;
}
@Override
public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) {
return element.fielddataBuilder(this, fieldDataContext);
}
@Override
public Query existsQuery(SearchExecutionContext context) {
return new FieldExistsQuery(name());
}
@Override
public Query termQuery(Object value, SearchExecutionContext context) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support term queries");
}
public Query createExactKnnQuery(VectorData queryVector, Float vectorSimilarity) {
if (indexType() == IndexType.NONE) {
throw new IllegalArgumentException(
"to perform knn search on field [" + name() + "], its mapping must have [index] set to [true]"
);
}
Query knnQuery = switch (element.elementType()) {
case BYTE -> createExactKnnByteQuery(queryVector.asByteVector());
case FLOAT, BFLOAT16 -> createExactKnnFloatQuery(queryVector.asFloatVector());
case BIT -> createExactKnnBitQuery(queryVector.asByteVector());
};
if (vectorSimilarity != null) {
knnQuery = new VectorSimilarityQuery(
knnQuery,
vectorSimilarity,
similarity.score(vectorSimilarity, element.elementType(), dims)
);
}
return knnQuery;
}
public boolean isNormalized() {
return indexVersionCreated.onOrAfter(NORMALIZE_COSINE) && VectorSimilarity.COSINE.equals(similarity);
}
private Query createExactKnnBitQuery(byte[] queryVector) {
element.checkDimensions(dims, queryVector.length);
return new DenseVectorQuery.Bytes(queryVector, name());
}
private Query createExactKnnByteQuery(byte[] queryVector) {
element.checkDimensions(dims, queryVector.length);
if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) {
float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector);
element.checkVectorMagnitude(similarity, ByteElement.errorElementsAppender(queryVector), squaredMagnitude);
}
return new DenseVectorQuery.Bytes(queryVector, name());
}
private Query createExactKnnFloatQuery(float[] queryVector) {
element.checkDimensions(dims, queryVector.length);
element.checkVectorBounds(queryVector);
if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) {
float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector);
element.checkVectorMagnitude(similarity, FloatElement.errorElementsAppender(queryVector), squaredMagnitude);
if (isNormalized() && element.isUnitVector(squaredMagnitude) == false) {
float length = (float) Math.sqrt(squaredMagnitude);
queryVector = Arrays.copyOf(queryVector, queryVector.length);
for (int i = 0; i < queryVector.length; i++) {
queryVector[i] /= length;
}
}
}
return new DenseVectorQuery.Floats(queryVector, name());
}
public Query createKnnQuery(
VectorData queryVector,
int k,
int numCands,
Float visitPercentage,
Float oversample,
Query filter,
Float similarityThreshold,
BitSetProducer parentFilter,
FilterHeuristic heuristic,
boolean hnswEarlyTermination
) {
if (indexType.hasVectors() == false) {
throw new IllegalArgumentException(
"to perform knn search on field [" + name() + "], its mapping must have [index] set to [true]"
);
}
if (dims == null) {
return new MatchNoDocsQuery("No data has been indexed for field [" + name() + "]");
}
KnnSearchStrategy knnSearchStrategy = heuristic.getKnnSearchStrategy();
hnswEarlyTermination &= canApplyPatienceQuery();
return switch (getElementType()) {
case BYTE -> createKnnByteQuery(
queryVector.asByteVector(),
k,
numCands,
filter,
similarityThreshold,
parentFilter,
knnSearchStrategy,
hnswEarlyTermination
);
case FLOAT, BFLOAT16 -> createKnnFloatQuery(
queryVector.asFloatVector(),
k,
numCands,
visitPercentage,
oversample,
filter,
similarityThreshold,
parentFilter,
knnSearchStrategy,
hnswEarlyTermination
);
case BIT -> createKnnBitQuery(
queryVector.asByteVector(),
k,
numCands,
filter,
similarityThreshold,
parentFilter,
knnSearchStrategy,
hnswEarlyTermination
);
};
}
private boolean needsRescore(Float rescoreOversample) {
return rescoreOversample != null && rescoreOversample > 0 && isQuantized();
}
private boolean isQuantized() {
return indexOptions != null && indexOptions.type != null && indexOptions.type.isQuantized();
}
private boolean canApplyPatienceQuery() {
return indexOptions instanceof HnswIndexOptions
|| indexOptions instanceof Int8HnswIndexOptions
|| indexOptions instanceof Int4HnswIndexOptions
|| indexOptions instanceof BBQHnswIndexOptions;
}
private Query createKnnBitQuery(
byte[] queryVector,
int k,
int numCands,
Query filter,
Float similarityThreshold,
BitSetProducer parentFilter,
KnnSearchStrategy searchStrategy,
boolean hnswEarlyTermination
) {
element.checkDimensions(dims, queryVector.length);
Query knnQuery;
if (indexOptions != null && indexOptions.isFlat()) {
var exactKnnQuery = parentFilter != null
? new DiversifyingParentBlockQuery(parentFilter, createExactKnnBitQuery(queryVector))
: createExactKnnBitQuery(queryVector);
knnQuery = filter == null
? exactKnnQuery
: new BooleanQuery.Builder().add(exactKnnQuery, BooleanClause.Occur.SHOULD)
.add(filter, BooleanClause.Occur.FILTER)
.build();
} else {
knnQuery = parentFilter != null
? new ESDiversifyingChildrenByteKnnVectorQuery(
name(),
queryVector,
filter,
k,
numCands,
parentFilter,
searchStrategy,
hnswEarlyTermination
)
: new ESKnnByteVectorQuery(name(), queryVector, k, numCands, filter, searchStrategy, hnswEarlyTermination);
}
if (similarityThreshold != null) {
knnQuery = new VectorSimilarityQuery(
knnQuery,
similarityThreshold,
similarity.score(similarityThreshold, element.elementType(), dims)
);
}
return knnQuery;
}
private Query createKnnByteQuery(
byte[] queryVector,
int k,
int numCands,
Query filter,
Float similarityThreshold,
BitSetProducer parentFilter,
KnnSearchStrategy searchStrategy,
boolean hnswEarlyTermination
) {
element.checkDimensions(dims, queryVector.length);
if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) {
float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector);
element.checkVectorMagnitude(similarity, ByteElement.errorElementsAppender(queryVector), squaredMagnitude);
}
Query knnQuery;
if (indexOptions != null && indexOptions.isFlat()) {
var exactKnnQuery = parentFilter != null
? new DiversifyingParentBlockQuery(parentFilter, createExactKnnByteQuery(queryVector))
: createExactKnnByteQuery(queryVector);
knnQuery = filter == null
? exactKnnQuery
: new BooleanQuery.Builder().add(exactKnnQuery, BooleanClause.Occur.SHOULD)
.add(filter, BooleanClause.Occur.FILTER)
.build();
} else {
knnQuery = parentFilter != null
? new ESDiversifyingChildrenByteKnnVectorQuery(
name(),
queryVector,
filter,
k,
numCands,
parentFilter,
searchStrategy,
hnswEarlyTermination
)
: new ESKnnByteVectorQuery(name(), queryVector, k, numCands, filter, searchStrategy, hnswEarlyTermination);
}
if (similarityThreshold != null) {
knnQuery = new VectorSimilarityQuery(
knnQuery,
similarityThreshold,
similarity.score(similarityThreshold, element.elementType(), dims)
);
}
return knnQuery;
}
private Query createKnnFloatQuery(
float[] queryVector,
int k,
int numCands,
Float visitPercentage,
Float queryOversample,
Query filter,
Float similarityThreshold,
BitSetProducer parentFilter,
KnnSearchStrategy knnSearchStrategy,
boolean hnswEarlyTermination
) {
element.checkDimensions(dims, queryVector.length);
element.checkVectorBounds(queryVector);
if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) {
float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector);
element.checkVectorMagnitude(similarity, FloatElement.errorElementsAppender(queryVector), squaredMagnitude);
if (isNormalized() && element.isUnitVector(squaredMagnitude) == false) {
float length = (float) Math.sqrt(squaredMagnitude);
queryVector = Arrays.copyOf(queryVector, queryVector.length);
for (int i = 0; i < queryVector.length; i++) {
queryVector[i] /= length;
}
}
}
int adjustedK = k;
// By default utilize the quantized oversample is configured
// allow the user provided at query time overwrite
Float oversample = queryOversample;
if (oversample == null
&& indexOptions instanceof QuantizedIndexOptions quantizedIndexOptions
&& quantizedIndexOptions.rescoreVector != null) {
oversample = quantizedIndexOptions.rescoreVector.oversample;
}
boolean rescore = needsRescore(oversample);
if (rescore) {
// Will get k * oversample for rescoring, and get the top k
adjustedK = Math.min((int) Math.ceil(k * oversample), OVERSAMPLE_LIMIT);
numCands = Math.max(adjustedK, numCands);
}
Query knnQuery;
if (indexOptions != null && indexOptions.isFlat()) {
var exactKnnQuery = parentFilter != null
? new DiversifyingParentBlockQuery(parentFilter, createExactKnnFloatQuery(queryVector))
: createExactKnnFloatQuery(queryVector);
knnQuery = filter == null
? exactKnnQuery
: new BooleanQuery.Builder().add(exactKnnQuery, BooleanClause.Occur.SHOULD)
.add(filter, BooleanClause.Occur.FILTER)
.build();
} else if (indexOptions instanceof BBQIVFIndexOptions bbqIndexOptions) {
float defaultVisitRatio = (float) (bbqIndexOptions.defaultVisitPercentage / 100d);
float visitRatio = visitPercentage == null ? defaultVisitRatio : (float) (visitPercentage / 100d);
knnQuery = parentFilter != null
? new DiversifyingChildrenIVFKnnFloatVectorQuery(
name(),
queryVector,
adjustedK,
numCands,
filter,
parentFilter,
visitRatio
)
: new IVFKnnFloatVectorQuery(name(), queryVector, adjustedK, numCands, filter, visitRatio);
} else {
knnQuery = parentFilter != null
? new ESDiversifyingChildrenFloatKnnVectorQuery(
name(),
queryVector,
filter,
adjustedK,
numCands,
parentFilter,
knnSearchStrategy
)
: new ESKnnFloatVectorQuery(name(), queryVector, adjustedK, numCands, filter, knnSearchStrategy, hnswEarlyTermination);
}
if (rescore) {
knnQuery = RescoreKnnVectorQuery.fromInnerQuery(
name(),
queryVector,
similarity.vectorSimilarityFunction(indexVersionCreated, ElementType.FLOAT),
k,
adjustedK,
knnQuery
);
}
if (similarityThreshold != null) {
knnQuery = new VectorSimilarityQuery(
knnQuery,
similarityThreshold,
similarity.score(similarityThreshold, element.elementType(), dims)
);
}
return knnQuery;
}
public VectorSimilarity getSimilarity() {
return similarity;
}
public int getVectorDimensions() {
return dims;
}
public ElementType getElementType() {
return element.elementType();
}
public DenseVectorIndexOptions getIndexOptions() {
return indexOptions;
}
@Override
public BlockLoader blockLoader(MappedFieldType.BlockLoaderContext blContext) {
if (dims == null) {
// No data has been indexed yet
return BlockLoader.CONSTANT_NULLS;
}
BlockLoaderFunctionConfig cfg = blContext.blockLoaderFunctionConfig();
if (indexed) {
if (cfg == null) {
return new DenseVectorBlockLoader<>(
name(),
dims,
this,
new DenseVectorBlockLoaderProcessor.DenseVectorLoaderProcessor()
);
}
return switch (cfg.function()) {
case V_COSINE, V_DOT_PRODUCT, V_HAMMING, V_L1NORM, V_L2NORM -> {
VectorSimilarityFunctionConfig similarityConfig = (VectorSimilarityFunctionConfig) cfg;
yield new DenseVectorBlockLoader<>(
name(),
dims,
this,
new DenseVectorBlockLoaderProcessor.DenseVectorSimilarityProcessor(similarityConfig)
);
}
default -> throw new UnsupportedOperationException("Unknown block loader function config: " + cfg.function());
};
}
if (cfg != null) {
throw new IllegalArgumentException(
"Field ["
+ name()
+ "] of type ["
+ typeName()
+ "] doesn't support block loader functions when [index] is set to [false]"
);
}
if (hasDocValues() && (blContext.fieldExtractPreference() != FieldExtractPreference.STORED || isSyntheticSource)) {
return new DenseVectorFromBinaryBlockLoader(name(), dims, indexVersionCreated, element.elementType());
}
BlockSourceReader.LeafIteratorLookup lookup = BlockSourceReader.lookupMatchingAll();
return new BlockSourceReader.DenseVectorBlockLoader(
sourceValueFetcher(blContext.sourcePaths(name()), blContext.indexSettings()),
lookup,
dims
);
}
@Override
public boolean supportsBlockLoaderConfig(BlockLoaderFunctionConfig config, FieldExtractPreference preference) {
if (dims == null) {
// No data has been indexed yet
return true;
}
if (indexed) {
return switch (config.function()) {
case V_COSINE, V_DOT_PRODUCT, V_HAMMING, V_L1NORM, V_L2NORM -> true;
default -> false;
};
}
return false;
}
private SourceValueFetcher sourceValueFetcher(Set<String> sourcePaths, IndexSettings indexSettings) {
return new SourceValueFetcher(sourcePaths, null, indexSettings.getIgnoredSourceFormat()) {
@Override
public List<Object> fetchValues(Source source, int doc, List<Object> ignoredValues) {
ArrayList<Object> values = new ArrayList<>();
for (var path : sourcePaths) {
Object sourceValue = source.extractValue(path, null);
if (sourceValue == null) {
return List.of();
}
try {
switch (sourceValue) {
case List<?> v -> {
for (Object o : v) {
values.add(NumberFieldMapper.NumberType.FLOAT.parse(o, false));
}
}
case String s -> {
if ((element.elementType() == ElementType.BYTE || element.elementType() == ElementType.BIT)
&& s.length() == dims * 2
&& ByteElement.isMaybeHexString(s)) {
byte[] bytes;
try {
bytes = HexFormat.of().parseHex(s);
} catch (IllegalArgumentException e) {
bytes = Base64.getDecoder().decode(s);
}
for (byte b : bytes) {
values.add((float) b);
}
} else {
byte[] floatBytes = Base64.getDecoder().decode(s);
float[] floats = new float[dims];
ByteBuffer.wrap(floatBytes).asFloatBuffer().get(floats);
for (float f : floats) {
values.add(f);
}
}
}
default -> ignoredValues.add(sourceValue);
}
} catch (Exception e) {
// if parsing fails here then it would have failed at index time
// as well, meaning that we must be ignoring malformed values.
ignoredValues.add(sourceValue);
}
}
values.trimToSize();
return values;
}
@Override
protected Object parseSourceValue(Object value) {
throw new IllegalStateException("parsing dense vector from source is not supported here");
}
};
}
}
private final DenseVectorIndexOptions indexOptions;
private final IndexVersion indexCreatedVersion;
private final boolean isExcludeSourceVectors;
private final List<VectorsFormatProvider> extraVectorsFormatProviders;
private DenseVectorFieldMapper(
String simpleName,
MappedFieldType mappedFieldType,
BuilderParams params,
DenseVectorIndexOptions indexOptions,
IndexVersion indexCreatedVersion,
boolean isExcludeSourceVectorsFinal,
List<VectorsFormatProvider> vectorsFormatProviders
) {
super(simpleName, mappedFieldType, params);
this.indexOptions = indexOptions;
this.indexCreatedVersion = indexCreatedVersion;
this.isExcludeSourceVectors = isExcludeSourceVectorsFinal;
this.extraVectorsFormatProviders = vectorsFormatProviders;
}
@Override
public DenseVectorFieldType fieldType() {
return (DenseVectorFieldType) super.fieldType();
}
@Override
public boolean parsesArrayValue() {
return true;
}
@Override
public void parse(DocumentParserContext context) throws IOException {
if (context.doc().getByKey(fieldType().name()) != null) {
throw new IllegalArgumentException(
"Field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support indexing multiple values for the same field in the same document"
);
}
if (Token.VALUE_NULL == context.parser().currentToken()) {
return;
}
if (fieldType().dims == null) {
int dims = fieldType().element.parseDimensionCount(context);
DenseVectorFieldMapper.Builder builder = (Builder) getMergeBuilder();
builder.dimensions(dims);
Mapper update = builder.build(context.createDynamicMapperBuilderContext());
context.addDynamicMapper(update);
return;
}
if (fieldType().indexed) {
parseKnnVectorAndIndex(context);
} else {
parseBinaryDocValuesVectorAndIndex(context);
}
}
private void parseKnnVectorAndIndex(DocumentParserContext context) throws IOException {
fieldType().element.parseKnnVectorAndIndex(context, this);
}
private void parseBinaryDocValuesVectorAndIndex(DocumentParserContext context) throws IOException {
// encode array of floats as array of integers and store into buf
// this code is here and not in the VectorEncoderDecoder so not to create extra arrays
int dims = fieldType().dims;
Element element = fieldType().element;
int numBytes = indexCreatedVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION)
? element.getNumBytes(dims) + MAGNITUDE_BYTES
: element.getNumBytes(dims);
ByteBuffer byteBuffer = element.createByteBuffer(indexCreatedVersion, numBytes);
VectorData vectorData = element.parseKnnVector(context, dims, (i, b) -> {
if (b) {
checkDimensionMatches(i, context);
} else {
checkDimensionExceeded(i, context);
}
}, fieldType().similarity);
vectorData.addToBuffer(element, byteBuffer);
if (indexCreatedVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION)) {
// encode vector magnitude at the end
double dotProduct = element.computeSquaredMagnitude(vectorData);
float vectorMagnitude = (float) Math.sqrt(dotProduct);
byteBuffer.putFloat(vectorMagnitude);
}
Field field = new BinaryDocValuesField(fieldType().name(), new BytesRef(byteBuffer.array()));
context.doc().addWithKey(fieldType().name(), field);
}
private void checkDimensionExceeded(int index, DocumentParserContext context) {
if (index >= fieldType().dims) {
throw new IllegalArgumentException(
"The ["
+ typeName()
+ "] field ["
+ fullPath()
+ "] in doc ["
+ context.documentDescription()
+ "] has more dimensions "
+ "than defined in the mapping ["
+ fieldType().dims
+ "]"
);
}
}
private void checkDimensionMatches(int index, DocumentParserContext context) {
if (index != fieldType().dims) {
throw new IllegalArgumentException(
"The ["
+ typeName()
+ "] field ["
+ fullPath()
+ "] in doc ["
+ context.documentDescription()
+ "] has a different number of dimensions "
+ "["
+ index
+ "] than defined in the mapping ["
+ fieldType().dims
+ "]"
);
}
}
@Override
protected void parseCreateField(DocumentParserContext context) {
throw new AssertionError("parse is implemented directly");
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public FieldMapper.Builder getMergeBuilder() {
return new Builder(leafName(), indexCreatedVersion, isExcludeSourceVectors, extraVectorsFormatProviders).init(this);
}
private static DenseVectorIndexOptions parseIndexOptions(String fieldName, Object propNode, IndexVersion indexVersion) {
@SuppressWarnings("unchecked")
Map<String, ?> indexOptionsMap = (Map<String, ?>) propNode;
Object typeNode = indexOptionsMap.remove("type");
if (typeNode == null) {
throw new MapperParsingException("[index_options] requires field [type] to be configured");
}
String type = XContentMapValues.nodeStringValue(typeNode);
Optional<VectorIndexType> vectorIndexType = VectorIndexType.fromString(type);
if (vectorIndexType.isEmpty()) {
throw new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]");
}
VectorIndexType parsedType = vectorIndexType.get();
return parsedType.parseIndexOptions(fieldName, indexOptionsMap, indexVersion);
}
/**
* @return the custom kNN vectors format that is configured for this field or
* {@code null} if the default format should be used.
*/
public KnnVectorsFormat getKnnVectorsFormatForField(KnnVectorsFormat defaultFormat, IndexSettings indexSettings) {
final KnnVectorsFormat format;
if (indexOptions == null) {
format = fieldType().element.elementType() == ElementType.BIT ? new ES815HnswBitVectorsFormat() : defaultFormat;
} else {
// if plugins provided alternative KnnVectorsFormat for this indexOptions, use it instead of standard
KnnVectorsFormat extraKnnFormat = null;
for (VectorsFormatProvider vectorsFormatProvider : extraVectorsFormatProviders) {
extraKnnFormat = vectorsFormatProvider.getKnnVectorsFormat(indexSettings, indexOptions, fieldType().similarity());
if (extraKnnFormat != null) {
break;
}
}
format = extraKnnFormat != null ? extraKnnFormat : indexOptions.getVectorsFormat(fieldType().element.elementType());
}
// It's legal to reuse the same format name as this is the same on-disk format.
return new KnnVectorsFormat(format.getName()) {
@Override
public KnnVectorsWriter fieldsWriter(SegmentWriteState state) throws IOException {
return format.fieldsWriter(state);
}
@Override
public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException {
return format.fieldsReader(state);
}
@Override
public int getMaxDimensions(String fieldName) {
return MAX_DIMS_COUNT;
}
@Override
public String toString() {
return format.toString();
}
};
}
@Override
public SourceLoader.SyntheticVectorsLoader syntheticVectorsLoader() {
if (isExcludeSourceVectors) {
return new SyntheticVectorsPatchFieldLoader<>(
// Recreate the object for each leaf so that different segments can be searched concurrently.
() -> new IndexedSyntheticFieldLoader(indexCreatedVersion, fieldType().similarity),
IndexedSyntheticFieldLoader::copyVectorAsList
);
}
return null;
}
@Override
protected SyntheticSourceSupport syntheticSourceSupport() {
return new SyntheticSourceSupport.Native(
() -> fieldType().indexed
? new IndexedSyntheticFieldLoader(indexCreatedVersion, fieldType().similarity)
: new DocValuesSyntheticFieldLoader(indexCreatedVersion)
);
}
private
|
DenseVectorFieldType
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java
|
{
"start": 1417,
"end": 6893
}
|
class ____ extends InternalMultiBucketAggregationTestCase<InternalIpPrefix> {
@Override
protected InternalIpPrefix createTestInstance(String name, Map<String, Object> metadata, InternalAggregations aggregations) {
return createTestInstance(name, metadata, aggregations, randomPrefixLength(), randomMinDocCount());
}
private int randomPrefixLength() {
return between(1, InetAddressPoint.BYTES * 8);
}
private long randomMinDocCount() {
return randomBoolean() ? 1 : randomLongBetween(1, Long.MAX_VALUE / (maxNumberOfBuckets() + 1));
}
private InternalIpPrefix createTestInstance(
String name,
Map<String, Object> metadata,
InternalAggregations aggregations,
int prefixLength,
long minDocCount
) {
boolean keyed = randomBoolean();
boolean appendPrefixLength = randomBoolean();
boolean canBeV4 = prefixLength <= 32;
int bucketsCount = between(1, maxNumberOfBuckets());
Set<BytesRef> keys = new TreeSet<>();
while (keys.size() < bucketsCount) {
boolean v4 = canBeV4 && randomBoolean();
byte[] ip = InetAddressPoint.encode(randomIp(v4));
byte[] mask = mask(v4 ? prefixLength + 96 : prefixLength);
byte[] subnet = new byte[InetAddressPoint.BYTES];
for (int i = 0; i < InetAddressPoint.BYTES; i++) {
subnet[i] = (byte) (ip[i] & mask[i]);
}
keys.add(new BytesRef(ip));
}
List<InternalIpPrefix.Bucket> buckets = new ArrayList<>(keys.size());
for (Iterator<BytesRef> itr = keys.iterator(); itr.hasNext();) {
BytesRef key = itr.next();
boolean v6 = InetAddressPoint.decode(key.bytes) instanceof Inet6Address;
buckets.add(
new InternalIpPrefix.Bucket(key, v6, prefixLength, appendPrefixLength, randomLongBetween(0, Long.MAX_VALUE), aggregations)
);
}
return new InternalIpPrefix(name, DocValueFormat.IP, keyed, minDocCount, buckets, metadata);
}
private byte[] mask(int prefixLength) {
byte[] mask = new byte[InetAddressPoint.BYTES];
int m = 0;
int b = 0x80;
for (int i = 0; i < prefixLength; i++) {
mask[m] |= (byte) b;
b = b >> 1;
if (b == 0) {
m++;
b = 0x80;
}
}
return mask;
}
@Override
protected BuilderAndToReduce<InternalIpPrefix> randomResultsToReduce(String name, int size) {
Map<String, Object> metadata = createTestMetadata();
InternalAggregations aggregations = createSubAggregations();
int prefixLength = randomPrefixLength();
long minDocCount = randomMinDocCount();
List<InternalIpPrefix> inputs = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
InternalIpPrefix t = createTestInstance(name, metadata, aggregations, prefixLength, minDocCount);
inputs.add(t);
}
return new BuilderAndToReduce<>(mockBuilder(inputs), inputs);
}
@Override
protected void assertReduced(InternalIpPrefix reduced, List<InternalIpPrefix> inputs) {
// we cannot check the current attribute values as they depend on the first aggregator during the reduced phase
Map<BytesRef, Long> expectedCounts = new HashMap<>();
for (InternalIpPrefix i : inputs) {
for (InternalIpPrefix.Bucket b : i.getBuckets()) {
long acc = expectedCounts.getOrDefault(b.getKey(), 0L);
acc += b.getDocCount();
expectedCounts.put(b.getKey(), acc);
}
}
MapMatcher countsMatches = matchesMap();
for (Map.Entry<BytesRef, Long> e : expectedCounts.entrySet()) {
if (e.getValue() >= inputs.get(0).minDocCount) {
countsMatches = countsMatches.entry(DocValueFormat.IP.format(e.getKey()), e.getValue());
}
}
assertMap(
new TreeMap<>(reduced.getBuckets().stream().collect(toMap(b -> b.getKeyAsString(), b -> b.getDocCount()))),
countsMatches
);
}
public void testPartialReduceNoMinDocCount() {
InternalIpPrefix.Bucket b1 = new InternalIpPrefix.Bucket(
new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0.1"))),
false,
1,
false,
1,
InternalAggregations.EMPTY
);
InternalIpPrefix.Bucket b2 = new InternalIpPrefix.Bucket(
new BytesRef(InetAddressPoint.encode(InetAddresses.forString("200.0.0.1"))),
false,
1,
false,
2,
InternalAggregations.EMPTY
);
InternalIpPrefix t = new InternalIpPrefix("test", DocValueFormat.IP, false, 100, List.of(b1, b2), null);
InternalIpPrefix reduced = (InternalIpPrefix) InternalAggregationTestCase.reduce(
List.of(t),
emptyReduceContextBuilder().forPartialReduction()
);
assertThat(reduced.getBuckets().get(0).getDocCount(), equalTo(1L));
assertThat(reduced.getBuckets().get(1).getDocCount(), equalTo(2L));
}
@Override
protected InternalIpPrefix mutateInstance(InternalIpPrefix instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
}
|
InternalIpPrefixTests
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-mcp/src/main/java/org/apache/dubbo/mcp/core/McpServiceFilter.java
|
{
"start": 6833,
"end": 14344
}
|
class
____.error(
LoggerCodeConstants.COMMON_UNEXPECTED_EXCEPTION,
"",
"",
"Method not found in implementation class: " + methodName + " with parameters: "
+ Arrays.toString(paramTypes),
e);
}
}
McpTool interfaceMcpTool = method.getAnnotation(McpTool.class);
if (interfaceMcpTool != null) {
return interfaceMcpTool;
}
Class<?> serviceInterface = providerModel.getServiceModel().getServiceInterfaceClass();
if (serviceInterface != null) {
try {
Method interfaceMethod = serviceInterface.getMethod(methodName, paramTypes);
return interfaceMethod.getAnnotation(McpTool.class);
} catch (NoSuchMethodException e) {
// Method not found in service interface
}
}
return null;
}
public McpToolConfig getMcpToolConfig(ProviderModel providerModel, Method method) {
String interfaceName = providerModel.getServiceModel().getInterfaceName();
McpToolConfig config = new McpToolConfig();
config.setToolName(method.getName());
McpTool mcpTool = getMethodMcpTool(providerModel, method);
if (mcpTool != null) {
if (StringUtils.isNotEmpty(mcpTool.name())) {
config.setToolName(mcpTool.name());
}
if (StringUtils.isNotEmpty(mcpTool.description())) {
config.setDescription(mcpTool.description());
}
if (mcpTool.tags().length > 0) {
config.setTags(Arrays.asList(mcpTool.tags()));
}
config.setPriority(mcpTool.priority());
}
String methodPrefix =
McpConstant.SETTINGS_MCP_SERVICE_PREFIX + "." + interfaceName + ".methods." + method.getName() + ".";
String configToolName = configuration.getString(methodPrefix + "name");
if (StringUtils.isNotEmpty(configToolName)) {
config.setToolName(configToolName);
}
String configDescription = configuration.getString(methodPrefix + "description");
if (StringUtils.isNotEmpty(configDescription)) {
config.setDescription(configDescription);
}
String configTags = configuration.getString(methodPrefix + "tags");
if (StringUtils.isNotEmpty(configTags)) {
config.setTags(Arrays.asList(configTags.split(",")));
}
URL serviceUrl = getServiceUrl(providerModel);
if (serviceUrl != null) {
String urlToolName = serviceUrl.getMethodParameter(method.getName(), McpConstant.PARAM_MCP_TOOL_NAME);
if (urlToolName != null && StringUtils.isNotEmpty(urlToolName)) {
config.setToolName(urlToolName);
}
String urlDescription = serviceUrl.getMethodParameter(method.getName(), McpConstant.PARAM_MCP_DESCRIPTION);
if (urlDescription != null && StringUtils.isNotEmpty(urlDescription)) {
config.setDescription(urlDescription);
}
String urlTags = serviceUrl.getMethodParameter(method.getName(), McpConstant.PARAM_MCP_TAGS);
if (urlTags != null && StringUtils.isNotEmpty(urlTags)) {
config.setTags(Arrays.asList(urlTags.split(",")));
}
String urlPriority = serviceUrl.getMethodParameter(method.getName(), McpConstant.PARAM_MCP_PRIORITY);
if (urlPriority != null && StringUtils.isNotEmpty(urlPriority)) {
try {
config.setPriority(Integer.parseInt(urlPriority));
} catch (NumberFormatException e) {
logger.warn(
LoggerCodeConstants.COMMON_UNEXPECTED_EXCEPTION,
"",
"",
"Invalid URL priority value: " + urlPriority + " for method: " + method.getName());
}
}
}
return config;
}
public McpToolConfig getMcpToolConfig(ProviderModel providerModel) {
String interfaceName = providerModel.getServiceModel().getInterfaceName();
McpToolConfig config = new McpToolConfig();
String servicePrefix = McpConstant.SETTINGS_MCP_SERVICE_PREFIX + "." + interfaceName + ".";
String configToolName = configuration.getString(servicePrefix + "name");
if (StringUtils.isNotEmpty(configToolName)) {
config.setToolName(configToolName);
}
String configDescription = configuration.getString(servicePrefix + "description");
if (StringUtils.isNotEmpty(configDescription)) {
config.setDescription(configDescription);
}
String configTags = configuration.getString(servicePrefix + "tags");
if (StringUtils.isNotEmpty(configTags)) {
config.setTags(Arrays.asList(configTags.split(",")));
}
URL serviceUrl = getServiceUrl(providerModel);
if (serviceUrl != null) {
String urlToolName = serviceUrl.getParameter(McpConstant.PARAM_MCP_TOOL_NAME);
if (urlToolName != null && StringUtils.isNotEmpty(urlToolName)) {
config.setToolName(urlToolName);
}
String urlDescription = serviceUrl.getParameter(McpConstant.PARAM_MCP_DESCRIPTION);
if (urlDescription != null && StringUtils.isNotEmpty(urlDescription)) {
config.setDescription(urlDescription);
}
String urlTags = serviceUrl.getParameter(McpConstant.PARAM_MCP_TAGS);
if (urlTags != null && StringUtils.isNotEmpty(urlTags)) {
config.setTags(Arrays.asList(urlTags.split(",")));
}
String urlPriority = serviceUrl.getParameter(McpConstant.PARAM_MCP_PRIORITY);
if (urlPriority != null && StringUtils.isNotEmpty(urlPriority)) {
try {
config.setPriority(Integer.parseInt(urlPriority));
} catch (NumberFormatException e) {
logger.warn(
LoggerCodeConstants.COMMON_UNEXPECTED_EXCEPTION,
"",
"",
"Invalid URL priority value: " + urlPriority + " for service: " + interfaceName);
}
}
}
return config;
}
private URL getServiceUrl(ProviderModel providerModel) {
List<URL> serviceUrls = providerModel.getServiceUrls();
if (serviceUrls != null && !serviceUrls.isEmpty()) {
return serviceUrls.get(0);
}
return null;
}
private Pattern[] parsePatterns(String patternStr) {
if (StringUtils.isEmpty(patternStr)) {
return new Pattern[0];
}
return Arrays.stream(patternStr.split(","))
.map(String::trim)
.filter(StringUtils::isNotEmpty)
.map(pattern -> Pattern.compile(pattern.replace("*", ".*")))
.toArray(Pattern[]::new);
}
private boolean isMatchedByPatterns(String text, Pattern[] patterns) {
for (Pattern pattern : patterns) {
if (pattern.matcher(text).matches()) {
return true;
}
}
return false;
}
public static
|
logger
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/DaggerSuperficialValidationTest.java
|
{
"start": 1885,
"end": 2518
}
|
enum ____ {
JAVA,
KOTLIN
}
@Parameters(name = "sourceKind={0}")
public static ImmutableList<Object[]> parameters() {
return ImmutableList.of(new Object[] {SourceKind.JAVA}, new Object[] {SourceKind.KOTLIN});
}
private final SourceKind sourceKind;
public DaggerSuperficialValidationTest(SourceKind sourceKind) {
this.sourceKind = sourceKind;
}
private static final Joiner NEW_LINES = Joiner.on("\n ");
@Test
public void missingReturnType() {
runTest(
CompilerTests.javaSource(
"test.TestClass",
"package test;",
"",
"abstract
|
SourceKind
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/scheduling/config/AnnotationDrivenBeanDefinitionParserTests.java
|
{
"start": 1101,
"end": 2773
}
|
class ____ {
private ConfigurableApplicationContext context = new ClassPathXmlApplicationContext(
"annotationDrivenContext.xml", AnnotationDrivenBeanDefinitionParserTests.class);
@AfterEach
void closeApplicationContext() {
context.close();
}
@Test
void asyncPostProcessorRegistered() {
assertThat(context.containsBean(TaskManagementConfigUtils.ASYNC_ANNOTATION_PROCESSOR_BEAN_NAME)).isTrue();
}
@Test
void scheduledPostProcessorRegistered() {
assertThat(context.containsBean(TaskManagementConfigUtils.SCHEDULED_ANNOTATION_PROCESSOR_BEAN_NAME)).isTrue();
}
@Test
void asyncPostProcessorExecutorReference() {
Object executor = context.getBean("testExecutor");
Object postProcessor = context.getBean(TaskManagementConfigUtils.ASYNC_ANNOTATION_PROCESSOR_BEAN_NAME);
assertThat(((Supplier<?>) new DirectFieldAccessor(postProcessor).getPropertyValue("executor")).get()).isSameAs(executor);
}
@Test
void scheduledPostProcessorSchedulerReference() {
Object scheduler = context.getBean("testScheduler");
Object postProcessor = context.getBean(TaskManagementConfigUtils.SCHEDULED_ANNOTATION_PROCESSOR_BEAN_NAME);
assertThat(new DirectFieldAccessor(postProcessor).getPropertyValue("scheduler")).isSameAs(scheduler);
}
@Test
void asyncPostProcessorExceptionHandlerReference() {
Object exceptionHandler = context.getBean("testExceptionHandler");
Object postProcessor = context.getBean(TaskManagementConfigUtils.ASYNC_ANNOTATION_PROCESSOR_BEAN_NAME);
assertThat(((Supplier<?>) new DirectFieldAccessor(postProcessor).getPropertyValue("exceptionHandler")).get()).isSameAs(exceptionHandler);
}
}
|
AnnotationDrivenBeanDefinitionParserTests
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/BootstrapConstants.java
|
{
"start": 78,
"end": 2727
}
|
interface ____ {
String SERIALIZED_APP_MODEL = "quarkus-internal.serialized-app-model.path";
String SERIALIZED_TEST_APP_MODEL = "quarkus-internal-test.serialized-app-model.path";
String DESCRIPTOR_FILE_NAME = "quarkus-extension.properties";
String CONDITIONAL_DEPENDENCIES = "conditional-dependencies";
String CONDITIONAL_DEV_DEPENDENCIES = "conditional-dev-dependencies";
String DEPENDENCY_CONDITION = "dependency-condition";
/**
* Constant for sharing the additional mappings between test-sources and the corresponding application-sources.
* The Gradle plugin populates this data which is then read by the PathTestHelper when executing tests.
*/
String TEST_TO_MAIN_MAPPINGS = "TEST_TO_MAIN_MAPPINGS";
String OUTPUT_SOURCES_DIR = "OUTPUT_SOURCES_DIR";
String QUARKUS_EXTENSION_FILE_NAME = "quarkus-extension.yaml";
String META_INF = "META-INF";
String DESCRIPTOR_PATH = META_INF + '/' + DESCRIPTOR_FILE_NAME;
String BUILD_STEPS_PATH = META_INF + "/quarkus-build-steps.list";
String EXTENSION_METADATA_PATH = META_INF + '/' + QUARKUS_EXTENSION_FILE_NAME;
String PROP_DEPLOYMENT_ARTIFACT = "deployment-artifact";
String PROP_PROVIDES_CAPABILITIES = "provides-capabilities";
String PROP_REQUIRES_CAPABILITIES = "requires-capabilities";
String PARENT_FIRST_ARTIFACTS = "parent-first-artifacts";
String EXCLUDED_ARTIFACTS = "excluded-artifacts";
String LESSER_PRIORITY_ARTIFACTS = "lesser-priority-artifacts";
String EMPTY = "";
String PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX = "-quarkus-platform-descriptor";
String PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX = "-quarkus-platform-properties";
String PLATFORM_PROPERTY_PREFIX = "platform.";
String QUARKUS_BOOTSTRAP_WORKSPACE_DISCOVERY = "quarkus.bootstrap.workspace-discovery";
/**
* Prefix for properties configuring extension Dev mode JVM arguments
*/
String EXT_DEV_MODE_JVM_OPTION_PREFIX = "dev-mode.jvm-option.";
/**
* {@code quarkus-extension.properties} property listing JVM options whose values shouldn't change by
* the default parameters values of the Quarkus Maven and Gradle plugins launching an application in dev mode
*/
String EXT_DEV_MODE_LOCK_JVM_OPTIONS = "dev-mode.lock.jvm-options";
/**
* {@code quarkus-extension.properties} property listing JVM XX options whose values shouldn't change by
* the default parameters values of the Quarkus Maven and Gradle plugins launching an application in dev mode
*/
String EXT_DEV_MODE_LOCK_XX_JVM_OPTIONS = "dev-mode.lock.xx-jvm-options";
}
|
BootstrapConstants
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/stream/StreamExecChangelogNormalize.java
|
{
"start": 4191,
"end": 11015
}
|
class ____ extends ExecNodeBase<RowData>
implements StreamExecNode<RowData>, SingleTransformationTranslator<RowData> {
public static final String CHANGELOG_NORMALIZE_TRANSFORMATION = "changelog-normalize";
public static final String FIELD_NAME_UNIQUE_KEYS = "uniqueKeys";
public static final String FIELD_NAME_GENERATE_UPDATE_BEFORE = "generateUpdateBefore";
public static final String STATE_NAME = "changelogNormalizeState";
public static final String FIELD_NAME_FILTER_CONDITION = "filterCondition";
@JsonProperty(FIELD_NAME_UNIQUE_KEYS)
private final int[] uniqueKeys;
@JsonProperty(FIELD_NAME_GENERATE_UPDATE_BEFORE)
private final boolean generateUpdateBefore;
@Nullable
@JsonProperty(FIELD_NAME_FILTER_CONDITION)
@JsonInclude(JsonInclude.Include.NON_NULL)
private final RexNode filterCondition;
@Nullable
@JsonProperty(FIELD_NAME_STATE)
@JsonInclude(JsonInclude.Include.NON_NULL)
private final List<StateMetadata> stateMetadataList;
public StreamExecChangelogNormalize(
ReadableConfig tableConfig,
int[] uniqueKeys,
boolean generateUpdateBefore,
@Nullable RexNode filterCondition,
InputProperty inputProperty,
RowType outputType,
String description) {
this(
ExecNodeContext.newNodeId(),
ExecNodeContext.newContext(StreamExecChangelogNormalize.class),
ExecNodeContext.newPersistedConfig(StreamExecChangelogNormalize.class, tableConfig),
uniqueKeys,
generateUpdateBefore,
StateMetadata.getOneInputOperatorDefaultMeta(tableConfig, STATE_NAME),
Collections.singletonList(inputProperty),
outputType,
description,
filterCondition);
}
@JsonCreator
public StreamExecChangelogNormalize(
@JsonProperty(FIELD_NAME_ID) Integer id,
@JsonProperty(FIELD_NAME_TYPE) ExecNodeContext context,
@JsonProperty(FIELD_NAME_CONFIGURATION) ReadableConfig persistedConfig,
@JsonProperty(FIELD_NAME_UNIQUE_KEYS) int[] uniqueKeys,
@JsonProperty(FIELD_NAME_GENERATE_UPDATE_BEFORE) boolean generateUpdateBefore,
@Nullable @JsonProperty(FIELD_NAME_STATE) List<StateMetadata> stateMetadataList,
@JsonProperty(FIELD_NAME_INPUT_PROPERTIES) List<InputProperty> inputProperties,
@JsonProperty(FIELD_NAME_OUTPUT_TYPE) RowType outputType,
@JsonProperty(FIELD_NAME_DESCRIPTION) String description,
@JsonProperty(FIELD_NAME_FILTER_CONDITION) @Nullable RexNode filterCondition) {
super(id, context, persistedConfig, inputProperties, outputType, description);
this.uniqueKeys = uniqueKeys;
this.generateUpdateBefore = generateUpdateBefore;
this.stateMetadataList = stateMetadataList;
this.filterCondition = filterCondition;
}
@SuppressWarnings("unchecked")
@Override
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
final ExecEdge inputEdge = getInputEdges().get(0);
final Transformation<RowData> inputTransform =
(Transformation<RowData>) inputEdge.translateToPlan(planner);
final InternalTypeInfo<RowData> rowTypeInfo =
(InternalTypeInfo<RowData>) inputTransform.getOutputType();
final OneInputStreamOperator<RowData, RowData> operator;
final long stateRetentionTime =
StateMetadata.getStateTtlForOneInputOperator(config, stateMetadataList);
final boolean isMiniBatchEnabled = MinibatchUtil.isMiniBatchEnabled(config);
final ClassLoader classLoader = planner.getFlinkContext().getClassLoader();
final RowType inputType = rowTypeInfo.toRowType();
final GeneratedRecordEqualiser generatedEqualiser =
new EqualiserCodeGenerator(inputType, classLoader)
.generateRecordEqualiser("DeduplicateRowEqualiser");
// Depending on whether filterCondition is null or not, there is different logic
// at ProcTimeMiniBatchDeduplicateKeepLastRowFunction#finishBundle
final GeneratedFilterCondition generatedFilterCondition =
filterCondition == null
? null
: FilterCodeGenerator.generateFilterCondition(
config, classLoader, filterCondition, inputType);
if (isMiniBatchEnabled) {
TypeSerializer<RowData> rowSerializer =
rowTypeInfo.createSerializer(
planner.getExecEnv().getConfig().getSerializerConfig());
ProcTimeMiniBatchDeduplicateKeepLastRowFunction processFunction =
new ProcTimeMiniBatchDeduplicateKeepLastRowFunction(
rowTypeInfo,
rowSerializer,
stateRetentionTime,
generateUpdateBefore,
true, // generateInsert
false, // inputInsertOnly
generatedEqualiser,
generatedFilterCondition);
CountBundleTrigger<RowData> trigger = MinibatchUtil.createMiniBatchTrigger(config);
operator = new KeyedMapBundleOperator<>(processFunction, trigger);
} else {
ProcTimeDeduplicateKeepLastRowFunction processFunction =
new ProcTimeDeduplicateKeepLastRowFunction(
rowTypeInfo,
stateRetentionTime,
generateUpdateBefore,
true, // generateInsert
false, // inputInsertOnly
generatedEqualiser,
generatedFilterCondition);
operator = new KeyedProcessOperator<>(processFunction);
}
final OneInputTransformation<RowData, RowData> transform =
ExecNodeUtil.createOneInputTransformation(
inputTransform,
createTransformationMeta(CHANGELOG_NORMALIZE_TRANSFORMATION, config),
operator,
rowTypeInfo,
inputTransform.getParallelism(),
false);
final RowDataKeySelector selector =
KeySelectorUtil.getRowDataSelector(classLoader, uniqueKeys, rowTypeInfo);
transform.setStateKeySelector(selector);
transform.setStateKeyType(selector.getProducedType());
return transform;
}
}
|
StreamExecChangelogNormalize
|
java
|
junit-team__junit5
|
junit-vintage-engine/src/testFixtures/java/org/junit/vintage/engine/samples/junit4/SingleFailingTheoryTestCase.java
|
{
"start": 575,
"end": 693
}
|
class ____ {
@Theory
public void theory() {
Assert.fail("this theory should fail");
}
}
|
SingleFailingTheoryTestCase
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/service/invoker/RequestBodyArgumentResolverTests.java
|
{
"start": 5764,
"end": 6613
}
|
interface ____ {
@GetExchange
void execute(@RequestBody String body);
@GetExchange
void executeNullable(@Nullable @RequestBody String body);
@GetExchange
void executeNotRequired(@RequestBody(required = false) String body);
@GetExchange
void executeOptional(@RequestBody Optional<String> body);
@GetExchange
void executeMono(@RequestBody Mono<String> body);
@GetExchange
void executeNullableMono(@Nullable @RequestBody Mono<String> body);
@GetExchange
void executeNotRequiredMono(@RequestBody(required = false) Mono<String> body);
@GetExchange
void executeSingle(@RequestBody Single<String> body);
@GetExchange
void executeMonoVoid(@RequestBody Mono<Void> body);
@GetExchange
void executeCompletable(@RequestBody Completable body);
@GetExchange
void executeNotRequestBody(String body);
}
}
|
Service
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/createTable/MySqlCreateTableTest81.java
|
{
"start": 951,
"end": 3117
}
|
class ____ extends MysqlTest {
public void test_one() throws Exception {
String sql = "CREATE TABLE `admin` (\n" +
" `id` char(20) NOT NULL,\n" +
" `username` varchar(16) NOT NULL COMMENT '用户名',\n" +
" `password` varchar(32) NOT NULL COMMENT '密码',\n" +
" `permission` varchar(255) NOT NULL DEFAULT '' COMMENT '权限',\n" +
" PRIMARY KEY (`id`) USING BTREE\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='管理员';";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseCreateTable();
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
//
// Column column = visitor.getColumn("tb_custom_vip_show_message", "custom_vip_show_message_seq");
// assertNotNull(column);
// assertEquals("INT", column.getDataType());
System.out.println(stmt);
{
String output = SQLUtils.toMySqlString(stmt);
assertEquals("CREATE TABLE `admin` (\n" +
"\t`id` char(20) NOT NULL,\n" +
"\t`username` varchar(16) NOT NULL COMMENT '用户名',\n" +
"\t`password` varchar(32) NOT NULL COMMENT '密码',\n" +
"\t`permission` varchar(255) NOT NULL DEFAULT '' COMMENT '权限',\n" +
"\tPRIMARY KEY USING BTREE (`id`)\n" +
") ENGINE = InnoDB CHARSET = utf8 COMMENT '管理员'", output);
}
{
String output = SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION);
assertEquals("create table `admin` (\n" +
"\t`id` char(20) not null,\n" +
"\t`username` varchar(16) not null comment '用户名',\n" +
"\t`password` varchar(32) not null comment '密码',\n" +
"\t`permission` varchar(255) not null default '' comment '权限',\n" +
"\tprimary key using BTREE (`id`)\n" +
") engine = InnoDB charset = utf8 comment '管理员'", output);
}
}
}
|
MySqlCreateTableTest81
|
java
|
spring-projects__spring-security
|
ldap/src/integration-test/java/org/springframework/security/ldap/userdetails/DefaultLdapAuthoritiesPopulatorGetGrantedAuthoritiesTests.java
|
{
"start": 1868,
"end": 3009
}
|
class ____ {
@Autowired
private DefaultSpringSecurityContextSource contextSource;
private DefaultLdapAuthoritiesPopulator populator;
@BeforeEach
public void setUp() {
this.populator = new DefaultLdapAuthoritiesPopulator(this.contextSource, "ou=groups");
this.populator.setIgnorePartialResultException(false);
}
@Test
public void groupSearchDoesNotAllowNullRoles() {
this.populator.setRolePrefix("ROLE_");
this.populator.setGroupRoleAttribute("ou");
this.populator.setSearchSubtree(true);
this.populator.setSearchSubtree(false);
this.populator.setConvertToUpperCase(true);
this.populator.setGroupSearchFilter("(member={0})");
DirContextAdapter ctx = new DirContextAdapter(
new DistinguishedName("uid=dayan,ou=people,dc=springframework,dc=org"));
Set<String> authorities = AuthorityUtils.authorityListToSet(this.populator.getGrantedAuthorities(ctx, "dayan"));
assertThat(authorities).as("Should have 1 role").hasSize(2);
assertThat(authorities).contains("ROLE_DEVELOPER");
assertThat(authorities).contains("ROLE_");
}
@Configuration
static
|
DefaultLdapAuthoritiesPopulatorGetGrantedAuthoritiesTests
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/bug/Issue_for_huangfeng.java
|
{
"start": 155,
"end": 1278
}
|
class ____ extends TestCase {
public void test_for_huangfeng() throws Exception {
String json = "{\"success\":\"Y\"}";
Model model = JSON.parseObject(json, Model.class);
assertTrue(model.isSuccess());
}
public void test_for_huangfeng_t() throws Exception {
String json = "{\"success\":\"T\"}";
Model model = JSON.parseObject(json, Model.class);
assertTrue(model.isSuccess());
}
public void test_for_huangfeng_is_t() throws Exception {
String json = "{\"isSuccess\":\"T\"}";
Model model = JSON.parseObject(json, Model.class);
assertTrue(model.isSuccess());
}
public void test_for_huangfeng_false() throws Exception {
String json = "{\"success\":\"N\"}";
Model model = JSON.parseObject(json, Model.class);
assertFalse(model.isSuccess());
}
public void test_for_huangfeng_false_f() throws Exception {
String json = "{\"success\":\"F\"}";
Model model = JSON.parseObject(json, Model.class);
assertFalse(model.isSuccess());
}
public static
|
Issue_for_huangfeng
|
java
|
elastic__elasticsearch
|
libs/core/src/test/java/org/elasticsearch/core/GlobTests.java
|
{
"start": 638,
"end": 7611
}
|
class ____ extends ESTestCase {
public void testMatchNull() {
assertThat(Glob.globMatch(null, null), is(false));
assertThat(Glob.globMatch(randomAlphaOfLengthBetween(1, 10), null), is(false));
assertThat(Glob.globMatch(null, randomAlphaOfLengthBetween(1, 10)), is(false));
}
public void testMatchLiteral() {
assertMatch("", "");
var str = randomAlphaOfLengthBetween(1, 12);
assertMatch(str, str);
str = randomAlphanumericOfLength(randomIntBetween(1, 12));
assertMatch(str, str);
str = randomAsciiStringNoAsterisks(randomIntBetween(1, 24));
assertMatch(str, str);
}
public void testSingleAsterisk() {
assertMatch("*", "");
assertMatch("*", randomAlphaOfLengthBetween(1, 12));
assertMatch("*", randomAlphanumericOfLength(randomIntBetween(1, 12)));
assertMatch("*", randomAsciiString(randomIntBetween(1, 24), ch -> ch >= ' ' && ch <= '~'));
assertMatch("*", "*".repeat(randomIntBetween(1, 5)));
}
public void testMultipleConsecutiveAsterisk() {
var pattern = "*".repeat(randomIntBetween(2, 5));
assertMatch(pattern, "");
assertMatch(pattern, randomAlphaOfLengthBetween(1, 12));
assertMatch(pattern, randomAlphanumericOfLength(randomIntBetween(1, 12)));
assertMatch(pattern, randomAsciiString(randomIntBetween(1, 24)));
assertMatch(pattern, "*".repeat(randomIntBetween(1, 5)));
}
public void testPrefixMatch() {
assertMatch("123*", "123");
assertMatch("123*", "123abc");
assertMatch("123*", "123123123");
assertNonMatch("123*", "12");
assertNonMatch("123*", "124");
assertNonMatch("123*", "23");
assertNonMatch("123*", "23x");
assertNonMatch("123*", "x23");
assertNonMatch("123*", "12*");
assertNonMatch("123*", "12-3");
assertNonMatch("123*", "1.2.3");
assertNonMatch("123*", "abc123");
assertNonMatch("123*", "abc123def");
var prefix = randomAsciiStringNoAsterisks(randomIntBetween(2, 12));
var pattern = prefix + "*";
assertMatch(pattern, prefix);
assertMatch(pattern, prefix + randomAsciiString(randomIntBetween(1, 30)));
assertNonMatch(
pattern,
randomValueOtherThanMany(s -> s.charAt(0) == prefix.charAt(0), () -> randomAsciiString(randomIntBetween(1, 30))) + prefix
);
assertNonMatch(pattern, prefix.substring(0, prefix.length() - 1));
assertNonMatch(pattern, prefix.substring(1));
}
public void testSuffixMatch() {
assertMatch("*123", "123");
assertMatch("*123", "abc123");
assertMatch("*123", "123123123");
assertNonMatch("*123", "12");
assertNonMatch("*123", "x12");
assertNonMatch("*123", "23");
assertNonMatch("*123", "x23");
assertNonMatch("*123", "12*");
assertNonMatch("*123", "1.2.3");
assertNonMatch("*123", "123abc");
assertNonMatch("*123", "abc123def");
var suffix = randomAsciiStringNoAsterisks(randomIntBetween(2, 12));
var pattern = "*" + suffix;
assertMatch(pattern, suffix);
assertMatch(pattern, randomAsciiString(randomIntBetween(1, 30)) + suffix);
assertNonMatch(
pattern,
randomValueOtherThanMany(str -> str.endsWith(suffix), () -> suffix + "#" + randomAsciiString(randomIntBetween(1, 30)))
);
assertNonMatch(pattern, suffix.substring(0, suffix.length() - 1));
assertNonMatch(pattern, suffix.substring(1));
}
public void testInfixStringMatch() {
assertMatch("*123*", "abc123def");
assertMatch("*123*", "abc123");
assertMatch("*123*", "123def");
assertMatch("*123*", "123");
assertMatch("*123*", "123123123");
assertMatch("*123*", "1.12.123.1234");
assertNonMatch("*123*", "12");
assertNonMatch("*123*", "23");
assertNonMatch("*123*", "x23");
assertNonMatch("*123*", "12*");
assertNonMatch("*123*", "1.2.3");
var infix = randomAsciiStringNoAsterisks(randomIntBetween(2, 12));
var pattern = "*" + infix + "*";
assertMatch(pattern, infix);
assertMatch(pattern, randomAsciiString(randomIntBetween(1, 30)) + infix + randomAsciiString(randomIntBetween(1, 30)));
assertMatch(pattern, randomAsciiString(randomIntBetween(1, 30)) + infix);
assertMatch(pattern, infix + randomAsciiString(randomIntBetween(1, 30)));
assertNonMatch(pattern, infix.substring(0, infix.length() - 1));
assertNonMatch(pattern, infix.substring(1));
}
public void testInfixAsteriskMatch() {
assertMatch("abc*xyz", "abcxyz");
assertMatch("abc*xyz", "abc#xyz");
assertMatch("abc*xyz", "abc*xyz");
assertMatch("abc*xyz", "abcdefghijklmnopqrstuvwxyz");
assertNonMatch("abc*xyz", "ABC.xyz");
assertNonMatch("abc*xyz", "RabcSxyzT");
assertNonMatch("abc*xyz", "RabcSxyz");
assertNonMatch("abc*xyz", "abcSxyzT");
assertMatch("123*321", "123321");
assertMatch("123*321", "12345678987654321");
assertNonMatch("123*321", "12321");
var prefix = randomAsciiStringNoAsterisks(randomIntBetween(2, 12));
var suffix = randomAsciiStringNoAsterisks(randomIntBetween(2, 12));
var pattern = prefix + "*" + suffix;
assertMatch(pattern, prefix + suffix);
assertMatch(pattern, prefix + randomAsciiString(randomIntBetween(1, 30)) + suffix);
assertNonMatch(pattern, prefix.substring(0, prefix.length() - 1) + suffix);
assertNonMatch(pattern, prefix + suffix.substring(1));
}
public void testLiteralSubstringMatching() {
assertMatch("start*middle*end", "startmiddleend");
assertMatch("start*middle*end", "start.middle.end");
assertMatch("start*middle*end", "start.middlX.middle.end");
assertMatch("start*middle*end", "start.middlmiddle.end");
assertMatch("start*middle*end", "start.middle.eend");
assertMatch("start*middle*end", "start.middle.enend");
assertMatch("start*middle*end", "start.middle.endend");
assertNonMatch("start*middle*end", "startmiddlend");
assertNonMatch("start*middle*end", "start.end");
assertNonMatch("start*middle*end", "start+MIDDLE+end");
assertNonMatch("start*middle*end", "start+mid+dle+end");
assertNonMatch("start*middle*end", "start+mid+middle+en");
}
private static void assertMatch(String pattern, String str) {
assertThat("Expect [" + str + "] to match '" + pattern + "'", Glob.globMatch(pattern, str), is(true));
}
private static void assertNonMatch(String pattern, String str) {
assertThat("Expect [" + str + "] to not match '" + pattern + "'", Glob.globMatch(pattern, str), is(false));
}
@FunctionalInterface
|
GlobTests
|
java
|
apache__camel
|
components/camel-bean-validator/src/test/java/org/apache/camel/component/bean/validator/CarWithRedefinedDefaultGroup.java
|
{
"start": 1072,
"end": 1929
}
|
class ____ implements Car {
@NotNull(groups = RequiredChecks.class)
private String manufacturer;
@NotNull(groups = RequiredChecks.class)
@Size(min = 5, max = 14, groups = OptionalChecks.class)
private String licensePlate;
public CarWithRedefinedDefaultGroup(String manufacturer, String licencePlate) {
this.manufacturer = manufacturer;
this.licensePlate = licencePlate;
}
@Override
public String getManufacturer() {
return manufacturer;
}
@Override
public void setManufacturer(String manufacturer) {
this.manufacturer = manufacturer;
}
@Override
public String getLicensePlate() {
return licensePlate;
}
@Override
public void setLicensePlate(String licensePlate) {
this.licensePlate = licensePlate;
}
}
|
CarWithRedefinedDefaultGroup
|
java
|
google__error-prone
|
annotation/src/test/java/com/google/errorprone/BugPatternValidatorTest.java
|
{
"start": 4542,
"end": 5175
}
|
class ____ {}
BugPattern annotation = BugPatternTestClass.class.getAnnotation(BugPattern.class);
BugPatternValidator.validate(annotation);
}
@Test
public void multipleCustomSuppressionAnnotations() throws Exception {
@BugPattern(
name = "customSuppressionAnnotation",
summary = "Uses multiple custom suppression annotations",
explanation = "Uses multiple custom suppression annotations",
severity = SeverityLevel.ERROR,
suppressionAnnotations = {
CustomSuppressionAnnotation.class,
CustomSuppressionAnnotation2.class
})
final
|
BugPatternTestClass
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/filters/PreventAbortResteasyReactiveContainerRequestContext.java
|
{
"start": 742,
"end": 3926
}
|
class ____ implements ContainerRequestContext {
private final ContainerRequestContext delegate;
public PreventAbortResteasyReactiveContainerRequestContext(ContainerRequestContext delegate) {
this.delegate = delegate;
}
@Override
public Object getProperty(String name) {
return delegate.getProperty(name);
}
@Override
public Collection<String> getPropertyNames() {
return delegate.getPropertyNames();
}
@Override
public void setProperty(String name, Object object) {
delegate.setProperty(name, object);
}
@Override
public void removeProperty(String name) {
delegate.removeProperty(name);
}
@Override
public UriInfo getUriInfo() {
return delegate.getUriInfo();
}
@Override
public void setRequestUri(URI requestUri) {
delegate.setRequestUri(requestUri);
}
@Override
public void setRequestUri(URI baseUri, URI requestUri) {
delegate.setRequestUri(baseUri, requestUri);
}
@Override
public Request getRequest() {
return delegate.getRequest();
}
@Override
public String getMethod() {
return delegate.getMethod();
}
@Override
public void setMethod(String method) {
delegate.setMethod(method);
}
@Override
public MultivaluedMap<String, String> getHeaders() {
return delegate.getHeaders();
}
@Override
public String getHeaderString(String name) {
return delegate.getHeaderString(name);
}
@Override
public Date getDate() {
return delegate.getDate();
}
@Override
public Locale getLanguage() {
return delegate.getLanguage();
}
@Override
public int getLength() {
return delegate.getLength();
}
@Override
public MediaType getMediaType() {
return delegate.getMediaType();
}
@Override
public List<MediaType> getAcceptableMediaTypes() {
return delegate.getAcceptableMediaTypes();
}
@Override
public List<Locale> getAcceptableLanguages() {
return delegate.getAcceptableLanguages();
}
@Override
public Map<String, Cookie> getCookies() {
return delegate.getCookies();
}
@Override
public boolean hasEntity() {
return delegate.hasEntity();
}
@Override
public InputStream getEntityStream() {
return delegate.getEntityStream();
}
@Override
public void setEntityStream(InputStream input) {
delegate.setEntityStream(input);
}
@Override
public SecurityContext getSecurityContext() {
return delegate.getSecurityContext();
}
@Override
public void setSecurityContext(SecurityContext context) {
delegate.setSecurityContext(context);
}
@Override
public void abortWith(Response response) {
throw new IllegalStateException(
"Calling 'abortWith' is not permitted when using @ServerRequestFilter or @ServerResponseFilter. If you need to abort processing, consider returning 'Response' or 'Uni<Response>'");
}
}
|
PreventAbortResteasyReactiveContainerRequestContext
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/testkit/TestData.java
|
{
"start": 1035,
"end": 2076
}
|
class ____ {
private static final WritableAssertionInfo ASSERTION_INFO = new WritableAssertionInfo();
private static final WritableAssertionInfo ASSERTION_INFO_AS_HEX = new WritableAssertionInfo();
private static final TextDescription DESCRIPTION = new TextDescription("who's the more foolish: the fool, or the fool who follows him?");
private static final Index INDEX = atIndex(0);
private static final Pattern MATCH_ANYTHING = Pattern.compile(".*");
static {
ASSERTION_INFO_AS_HEX.useHexadecimalRepresentation();
}
public static Pattern matchAnything() {
return MATCH_ANYTHING;
}
public static Index someIndex() {
return INDEX;
}
public static WritableAssertionInfo someInfo() {
return ASSERTION_INFO;
}
public static AssertionInfo someHexInfo() {
return ASSERTION_INFO_AS_HEX;
}
public static Description someDescription() {
return DESCRIPTION;
}
public static String someTextDescription() {
return "there's always a bigger fish";
}
private TestData() {}
}
|
TestData
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/beans/factory/support/InjectAnnotationAutowireContextTests.java
|
{
"start": 1886,
"end": 26842
}
|
class ____ {
private static final String PERSON1 = "person1";
private static final String PERSON2 = "person2";
private static final String JUERGEN = "juergen";
private static final String MARK = "mark";
@Test
void autowiredFieldWithSingleNonQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
context.registerBeanDefinition(PERSON1, person);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredMethodParameterWithSingleNonQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
context.registerBeanDefinition(PERSON1, person);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredConstructorArgumentWithSingleNonQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
context.registerBeanDefinition(PERSON1, person);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedConstructorArgumentTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(UnsatisfiedDependencyException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> assertThat(ex.getBeanName()).isEqualTo("autowired"));
}
@Test
void autowiredFieldWithSingleQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
person.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
context.registerBeanDefinition(PERSON1, person);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldTestBean bean = (QualifiedFieldTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredMethodParameterWithSingleQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
person.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
context.registerBeanDefinition(PERSON1, person);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedMethodParameterTestBean bean =
(QualifiedMethodParameterTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredMethodParameterWithStaticallyQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(QualifiedPerson.class, cavs, null);
context.registerBeanDefinition(PERSON1,
ScopedProxyUtils.createScopedProxy(new BeanDefinitionHolder(person, JUERGEN), context, true).getBeanDefinition());
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedMethodParameterTestBean bean =
(QualifiedMethodParameterTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredMethodParameterWithStaticallyQualifiedCandidateAmongOthers() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(QualifiedPerson.class, cavs, null);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedMethodParameterTestBean bean =
(QualifiedMethodParameterTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredConstructorArgumentWithSingleQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
person.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
context.registerBeanDefinition(PERSON1, person);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedConstructorArgumentTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedConstructorArgumentTestBean bean =
(QualifiedConstructorArgumentTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredFieldWithMultipleNonQualifiedCandidates() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredMethodParameterWithMultipleNonQualifiedCandidates() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredConstructorArgumentWithMultipleNonQualifiedCandidates() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedConstructorArgumentTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(UnsatisfiedDependencyException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> assertThat(ex.getBeanName()).isEqualTo("autowired"));
}
@Test
void autowiredFieldResolvesQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
person1.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldTestBean bean = (QualifiedFieldTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredMethodParameterResolvesQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
person1.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedMethodParameterTestBean bean =
(QualifiedMethodParameterTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredConstructorArgumentResolvesQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
person1.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedConstructorArgumentTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedConstructorArgumentTestBean bean =
(QualifiedConstructorArgumentTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test // gh-33345
void autowiredConstructorArgumentResolvesJakartaNamedCandidate() {
Class<JakartaNamedConstructorArgumentTestBean> testBeanClass = JakartaNamedConstructorArgumentTestBean.class;
AnnotationConfigApplicationContext context =
new AnnotationConfigApplicationContext(testBeanClass, JakartaCat.class, JakartaDog.class);
JakartaNamedConstructorArgumentTestBean bean = context.getBean(testBeanClass);
assertThat(bean.getAnimal1().getName()).isEqualTo("Jakarta Tiger");
assertThat(bean.getAnimal2().getName()).isEqualTo("Jakarta Fido");
}
@Test
void autowiredFieldResolvesQualifiedCandidateWithDefaultValueAndNoValueOnBeanDefinition() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
// qualifier added, but includes no value
person1.addQualifier(new AutowireCandidateQualifier(TestQualifierWithDefaultValue.class));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldWithDefaultValueTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldWithDefaultValueTestBean bean =
(QualifiedFieldWithDefaultValueTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredFieldDoesNotResolveCandidateWithDefaultValueAndConflictingValueOnBeanDefinition() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
// qualifier added, and non-default value specified
person1.addQualifier(new AutowireCandidateQualifier(TestQualifierWithDefaultValue.class, "not the default"));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldWithDefaultValueTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredFieldResolvesWithDefaultValueAndExplicitDefaultValueOnBeanDefinition() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
// qualifier added, and value matches the default
person1.addQualifier(new AutowireCandidateQualifier(TestQualifierWithDefaultValue.class, "default"));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldWithDefaultValueTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldWithDefaultValueTestBean bean =
(QualifiedFieldWithDefaultValueTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredFieldResolvesWithMultipleQualifierValues() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
AutowireCandidateQualifier qualifier = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier.setAttribute("number", 456);
person1.addQualifier(qualifier);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
AutowireCandidateQualifier qualifier2 = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier2.setAttribute("number", 123);
person2.addQualifier(qualifier2);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldWithMultipleAttributesTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldWithMultipleAttributesTestBean bean =
(QualifiedFieldWithMultipleAttributesTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(MARK);
}
@Test
void autowiredFieldDoesNotResolveWithMultipleQualifierValuesAndConflictingDefaultValue() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
AutowireCandidateQualifier qualifier = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier.setAttribute("number", 456);
person1.addQualifier(qualifier);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
AutowireCandidateQualifier qualifier2 = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier2.setAttribute("number", 123);
qualifier2.setAttribute("value", "not the default");
person2.addQualifier(qualifier2);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldWithMultipleAttributesTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredFieldResolvesWithMultipleQualifierValuesAndExplicitDefaultValue() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
AutowireCandidateQualifier qualifier = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier.setAttribute("number", 456);
person1.addQualifier(qualifier);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
AutowireCandidateQualifier qualifier2 = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier2.setAttribute("number", 123);
qualifier2.setAttribute("value", "default");
person2.addQualifier(qualifier2);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldWithMultipleAttributesTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldWithMultipleAttributesTestBean bean =
(QualifiedFieldWithMultipleAttributesTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(MARK);
}
@Test
void autowiredFieldDoesNotResolveWithMultipleQualifierValuesAndMultipleMatchingCandidates() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
AutowireCandidateQualifier qualifier = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier.setAttribute("number", 123);
person1.addQualifier(qualifier);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
AutowireCandidateQualifier qualifier2 = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier2.setAttribute("number", 123);
qualifier2.setAttribute("value", "default");
person2.addQualifier(qualifier2);
context.registerBeanDefinition(PERSON1, person1);
context.registerBeanDefinition(PERSON2, person2);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldWithMultipleAttributesTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredConstructorArgumentDoesNotResolveWithBaseQualifierAndNonDefaultValueAndMultipleMatchingCandidates() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue("the real juergen");
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
person1.addQualifier(new AutowireCandidateQualifier(Qualifier.class, JUERGEN));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue("juergen imposter");
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
person2.addQualifier(new AutowireCandidateQualifier(Qualifier.class, JUERGEN));
context.registerBeanDefinition("juergen1", person1);
context.registerBeanDefinition("juergen2", person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedConstructorArgumentWithBaseQualifierNonDefaultValueTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(UnsatisfiedDependencyException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> assertThat(ex.getBeanName()).isEqualTo("autowired"));
}
private static
|
InjectAnnotationAutowireContextTests
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/Synchronized.java
|
{
"start": 47497,
"end": 50393
}
|
class ____<E extends @Nullable Object> extends SynchronizedQueue<E>
implements Deque<E> {
SynchronizedDeque(Deque<E> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override
Deque<E> delegate() {
return (Deque<E>) super.delegate();
}
@Override
public void addFirst(E e) {
synchronized (mutex) {
delegate().addFirst(e);
}
}
@Override
public void addLast(E e) {
synchronized (mutex) {
delegate().addLast(e);
}
}
@Override
public boolean offerFirst(E e) {
synchronized (mutex) {
return delegate().offerFirst(e);
}
}
@Override
public boolean offerLast(E e) {
synchronized (mutex) {
return delegate().offerLast(e);
}
}
@Override
public E removeFirst() {
synchronized (mutex) {
return delegate().removeFirst();
}
}
@Override
public E removeLast() {
synchronized (mutex) {
return delegate().removeLast();
}
}
@Override
public @Nullable E pollFirst() {
synchronized (mutex) {
return delegate().pollFirst();
}
}
@Override
public @Nullable E pollLast() {
synchronized (mutex) {
return delegate().pollLast();
}
}
@Override
public E getFirst() {
synchronized (mutex) {
return delegate().getFirst();
}
}
@Override
public E getLast() {
synchronized (mutex) {
return delegate().getLast();
}
}
@Override
public @Nullable E peekFirst() {
synchronized (mutex) {
return delegate().peekFirst();
}
}
@Override
public @Nullable E peekLast() {
synchronized (mutex) {
return delegate().peekLast();
}
}
@Override
public boolean removeFirstOccurrence(@Nullable Object o) {
synchronized (mutex) {
return delegate().removeFirstOccurrence(o);
}
}
@Override
public boolean removeLastOccurrence(@Nullable Object o) {
synchronized (mutex) {
return delegate().removeLastOccurrence(o);
}
}
@Override
public void push(E e) {
synchronized (mutex) {
delegate().push(e);
}
}
@Override
public E pop() {
synchronized (mutex) {
return delegate().pop();
}
}
@Override
public Iterator<E> descendingIterator() {
synchronized (mutex) {
return delegate().descendingIterator();
}
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
static <R extends @Nullable Object, C extends @Nullable Object, V extends @Nullable Object>
Table<R, C, V> table(Table<R, C, V> table, @Nullable Object mutex) {
return new SynchronizedTable<>(table, mutex);
}
static final
|
SynchronizedDeque
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/MockNotUsedInProductionTest.java
|
{
"start": 8374,
"end": 9062
}
|
class ____ {
@Mock private Test test;
public Object test() {
doAnswer(
a -> {
when(test.test()).thenReturn(null);
return null;
})
.when(test)
.test();
return null;
}
}
""")
.addOutputLines(
"Test.java",
"""
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.mockito.Mock;
|
Test
|
java
|
apache__kafka
|
metadata/src/test/java/org/apache/kafka/controller/QuorumControllerTestEnv.java
|
{
"start": 1608,
"end": 1966
}
|
class ____ implements AutoCloseable {
private final List<QuorumController> controllers;
private final MockRaftClientTestEnv clientEnv;
private final Map<Integer, MockFaultHandler> fatalFaultHandlers = new HashMap<>();
private final Map<Integer, MockFaultHandler> nonFatalFaultHandlers = new HashMap<>();
public static
|
QuorumControllerTestEnv
|
java
|
apache__spark
|
mllib/src/test/java/org/apache/spark/ml/classification/JavaGBTClassifierSuite.java
|
{
"start": 1193,
"end": 2868
}
|
class ____ extends SharedSparkSession {
@Test
public void runDT() {
int nPoints = 20;
double A = 2.0;
double B = -1.5;
JavaRDD<LabeledPoint> data = jsc.parallelize(
LogisticRegressionSuite.generateLogisticInputAsList(A, B, nPoints, 42), 2).cache();
Map<Integer, Integer> categoricalFeatures = new HashMap<>();
Dataset<Row> dataFrame = TreeTests.setMetadata(data, categoricalFeatures, 2);
// This tests setters. Training with various options is tested in Scala.
GBTClassifier rf = new GBTClassifier()
.setMaxDepth(2)
.setMaxBins(10)
.setMinInstancesPerNode(5)
.setMinInfoGain(0.0)
.setMaxMemoryInMB(256)
.setCacheNodeIds(false)
.setCheckpointInterval(10)
.setSubsamplingRate(1.0)
.setSeed(1234)
.setMaxIter(3)
.setStepSize(0.1)
.setMaxDepth(2); // duplicate setMaxDepth to check builder pattern
for (String lossType : GBTClassifier.supportedLossTypes()) {
rf.setLossType(lossType);
}
GBTClassificationModel model = rf.fit(dataFrame);
model.transform(dataFrame);
model.totalNumNodes();
model.toDebugString();
model.trees();
model.treeWeights();
/*
// TODO: Add test once save/load are implemented. SPARK-6725
File tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark");
String path = tempDir.toURI().toString();
try {
model3.save(sc.sc(), path);
GBTClassificationModel sameModel = GBTClassificationModel.load(sc.sc(), path);
TreeTests.checkEqual(model3, sameModel);
} finally {
Utils.deleteRecursively(tempDir);
}
*/
}
}
|
JavaGBTClassifierSuite
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/time/JodaPlusMinusLongTest.java
|
{
"start": 3216,
"end": 3668
}
|
class ____ {
private static final Duration PLUS = Duration.ZERO.plus(Duration.millis(42));
private static final Duration MINUS = Duration.ZERO.minus(Duration.millis(42));
}
""")
.doTest();
}
@Test
public void durationPlusMinusLong() {
helper
.addSourceLines(
"TestClass.java",
"""
import org.joda.time.Duration;
public
|
TestClass
|
java
|
quarkusio__quarkus
|
extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/BeginOptions.java
|
{
"start": 199,
"end": 1123
}
|
class ____ {
boolean commitOnRequestScopeEnd;
int timeout = 0;
/**
* If this method is called the transaction will be automatically committed when the request scope is destroyed, instead of
* being rolled back.
* <p>
*
* @return These options
*/
public BeginOptions commitOnRequestScopeEnd() {
commitOnRequestScopeEnd = true;
return this;
}
/**
* Sets the transaction timeout for transactions created by this builder. A value of zero refers to the system default.
*
* @param seconds The timeout in seconds
* @return This builder
* @throws IllegalArgumentException If seconds is negative
*/
public BeginOptions timeout(int seconds) {
if (seconds < 0) {
throw new IllegalArgumentException("seconds cannot be negative");
}
this.timeout = seconds;
return this;
}
}
|
BeginOptions
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/client/OidcLogoutConfigurerTests.java
|
{
"start": 17852,
"end": 18340
}
|
class ____ {
@Bean
@Order(1)
SecurityFilterChain filters(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((authorize) -> authorize.anyRequest().authenticated())
.oauth2Login(Customizer.withDefaults())
.oidcLogout((oidc) -> oidc
.backChannel(Customizer.withDefaults())
);
// @formatter:on
return http.build();
}
}
@Configuration
@EnableWebSecurity
@Import(RegistrationConfig.class)
static
|
SelfLogoutUriConfig
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/logging/logback/DefaultLogbackConfigurationTests.java
|
{
"start": 1165,
"end": 4321
}
|
class ____ {
private final LoggerContext loggerContext = new LoggerContext();
private final LogbackConfigurator logbackConfigurator = new LogbackConfigurator(this.loggerContext);
@Test
void defaultLogbackXmlContainsConsoleLogPattern() throws Exception {
assertThatDefaultXmlContains("CONSOLE_LOG_PATTERN", DefaultLogbackConfiguration.CONSOLE_LOG_PATTERN);
}
@Test
void defaultLogbackXmlContainsFileLogPattern() throws Exception {
assertThatDefaultXmlContains("FILE_LOG_PATTERN", DefaultLogbackConfiguration.FILE_LOG_PATTERN);
}
@Test
void consoleLogCharsetShouldUseConsoleCharsetIfConsoleAvailable() {
DefaultLogbackConfiguration logbackConfiguration = spy(new DefaultLogbackConfiguration(null));
Console console = mock(Console.class);
given(console.charset()).willReturn(StandardCharsets.UTF_16);
given(logbackConfiguration.getConsole()).willReturn(console);
logbackConfiguration.apply(this.logbackConfigurator);
assertThat(this.loggerContext.getProperty("CONSOLE_LOG_CHARSET")).isEqualTo(StandardCharsets.UTF_16.name());
}
@Test
void consoleLogCharsetShouldDefaultToUtf8WhenConsoleIsNull() {
DefaultLogbackConfiguration logbackConfiguration = spy(new DefaultLogbackConfiguration(null));
given(logbackConfiguration.getConsole()).willReturn(null);
logbackConfiguration.apply(this.logbackConfigurator);
assertThat(this.loggerContext.getProperty("CONSOLE_LOG_CHARSET")).isEqualTo(StandardCharsets.UTF_8.name());
}
@Test
void consoleLogCharsetShouldUseSystemPropertyIfSet() {
withSystemProperty("CONSOLE_LOG_CHARSET", StandardCharsets.US_ASCII.name(), () -> {
new DefaultLogbackConfiguration(null).apply(this.logbackConfigurator);
assertThat(this.loggerContext.getProperty("CONSOLE_LOG_CHARSET"))
.isEqualTo(StandardCharsets.US_ASCII.name());
});
}
@Test
void fileLogCharsetShouldUseSystemPropertyIfSet() {
withSystemProperty("FILE_LOG_CHARSET", StandardCharsets.ISO_8859_1.name(), () -> {
new DefaultLogbackConfiguration(null).apply(this.logbackConfigurator);
assertThat(this.loggerContext.getProperty("FILE_LOG_CHARSET"))
.isEqualTo(StandardCharsets.ISO_8859_1.name());
});
}
@Test
void fileLogCharsetShouldDefaultToUtf8() {
new DefaultLogbackConfiguration(null).apply(this.logbackConfigurator);
assertThat(this.loggerContext.getProperty("FILE_LOG_CHARSET")).isEqualTo(StandardCharsets.UTF_8.name());
}
private void assertThatDefaultXmlContains(String name, String value) throws Exception {
String expected = "<property name=\"%s\" value=\"%s\"/>".formatted(name, value);
assertThat(defaultXmlContent()).contains(expected);
}
private String defaultXmlContent() throws IOException {
return StreamUtils.copyToString(getClass().getResourceAsStream("defaults.xml"), StandardCharsets.UTF_8);
}
private static void withSystemProperty(String name, String value, Runnable action) {
String previous = System.getProperty(name);
try {
System.setProperty(name, value);
action.run();
}
finally {
if (previous != null) {
System.setProperty(name, previous);
}
else {
System.clearProperty(name);
}
}
}
}
|
DefaultLogbackConfigurationTests
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
|
{
"start": 23350,
"end": 23899
}
|
class ____ {
final URL url;
int counter;
private ResourceCounter(URL url) {
this.url = url;
this.counter = 0;
}
private void increaseCounter() {
this.counter++;
}
private boolean decreaseCounter() {
this.counter--;
checkState(
this.counter >= 0,
String.format("Invalid reference count[%d] which must >= 0", this.counter));
return this.counter == 0;
}
}
}
|
ResourceCounter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderDocument.java
|
{
"start": 24791,
"end": 25129
}
|
interface ____ {
ParseField PRINCIPAL = new ParseField("principal");
ParseField EMAIL = new ParseField("email");
ParseField NAME = new ParseField("name");
ParseField ROLES = new ParseField("roles");
ParseField EXTENSIONS = new ParseField("extensions");
}
|
Attributes
|
java
|
apache__flink
|
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldFromArray.java
|
{
"start": 1079,
"end": 1770
}
|
class ____<OUT> implements Extractor<Object, OUT> {
private static final long serialVersionUID = -5161386546695574359L;
private int fieldId = 0;
/** Extracts the first field (id 0) from the array. */
public FieldFromArray() {
// noting to do => will use default 0
}
/**
* Extracts the field with the given id from the array.
*
* @param fieldId The id of the field which will be extracted from the array.
*/
public FieldFromArray(int fieldId) {
this.fieldId = fieldId;
}
@SuppressWarnings("unchecked")
@Override
public OUT extract(Object in) {
return (OUT) Array.get(in, fieldId);
}
}
|
FieldFromArray
|
java
|
playframework__playframework
|
web/play-java-forms/src/main/java/play/data/validation/Constraints.java
|
{
"start": 12489,
"end": 13821
}
|
class ____ extends Validator<String>
implements ConstraintValidator<MinLength, String> {
public static final String message = "error.minLength";
private long min;
public MinLengthValidator() {}
public MinLengthValidator(long value) {
this.min = value;
}
public void initialize(MinLength constraintAnnotation) {
this.min = constraintAnnotation.value();
}
public boolean isValid(String object) {
if (object == null || object.isEmpty()) {
return true;
}
return object.length() >= min;
}
public Tuple<String, Object[]> getErrorMessageKey() {
return Tuple(message, new Object[] {min});
}
}
/**
* Constructs a 'minLength' validator.
*
* @param value the minimum length value.
* @return the MinLengthValidator
*/
public static Validator<String> minLength(long value) {
return new MinLengthValidator(value);
}
// --- MaxLength
/** Defines a maximum length for a string field. */
@Target({METHOD, FIELD, ANNOTATION_TYPE, CONSTRUCTOR, PARAMETER, TYPE_USE})
@Retention(RUNTIME)
@Constraint(validatedBy = MaxLengthValidator.class)
@Repeatable(play.data.validation.Constraints.MaxLength.List.class)
@Display(
name = "constraint.maxLength",
attributes = {"value"})
public @
|
MinLengthValidator
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java
|
{
"start": 1462,
"end": 8776
}
|
class ____ extends ESIndexLevelReplicationTestCase {
static final PeerRecoveryTargetService.RecoveryListener listener = new PeerRecoveryTargetService.RecoveryListener() {
@Override
public void onRecoveryDone(
RecoveryState state,
ShardLongFieldRange timestampMillisFieldRange,
ShardLongFieldRange eventIngestedMillisFieldRange
) {
}
@Override
public void onRecoveryFailure(RecoveryFailedException e, boolean sendShardFailure) {
}
};
public void testLastAccessTimeUpdate() throws Exception {
try (ReplicationGroup shards = createGroup(0)) {
final RecoveriesCollection collection = new RecoveriesCollection(logger, threadPool);
final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica());
try (RecoveriesCollection.RecoveryRef status = collection.getRecovery(recoveryId)) {
final long lastSeenTime = status.target().lastAccessTime();
assertBusy(() -> {
try (RecoveriesCollection.RecoveryRef currentStatus = collection.getRecovery(recoveryId)) {
assertThat("access time failed to update", lastSeenTime, lessThan(currentStatus.target().lastAccessTime()));
}
});
} finally {
collection.cancelRecovery(recoveryId, "life");
}
}
}
public void testRecoveryTimeout() throws Exception {
try (ReplicationGroup shards = createGroup(0)) {
final RecoveriesCollection collection = new RecoveriesCollection(logger, threadPool);
final AtomicBoolean failed = new AtomicBoolean();
final CountDownLatch latch = new CountDownLatch(1);
final long recoveryId = startRecovery(
collection,
shards.getPrimaryNode(),
shards.addReplica(),
new PeerRecoveryTargetService.RecoveryListener() {
@Override
public void onRecoveryDone(
RecoveryState state,
ShardLongFieldRange timestampMillisFieldRange,
ShardLongFieldRange eventIngestedMillisFieldRange
) {
latch.countDown();
}
@Override
public void onRecoveryFailure(RecoveryFailedException e, boolean sendShardFailure) {
failed.set(true);
latch.countDown();
}
},
TimeValue.timeValueMillis(100)
);
try {
latch.await(30, TimeUnit.SECONDS);
assertTrue("recovery failed to timeout", failed.get());
} finally {
collection.cancelRecovery(recoveryId, "meh");
}
}
}
public void testRecoveryCancellation() throws Exception {
try (ReplicationGroup shards = createGroup(0)) {
final RecoveriesCollection collection = new RecoveriesCollection(logger, threadPool);
final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica());
final long recoveryId2 = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica());
try (RecoveriesCollection.RecoveryRef recoveryRef = collection.getRecovery(recoveryId)) {
ShardId shardId = recoveryRef.target().shardId();
assertTrue("failed to cancel recoveries", collection.cancelRecoveriesForShard(shardId, "test"));
assertThat("all recoveries should be cancelled", collection.size(), equalTo(0));
} finally {
collection.cancelRecovery(recoveryId, "meh");
collection.cancelRecovery(recoveryId2, "meh");
}
}
}
public void testResetRecovery() throws Exception {
try (ReplicationGroup shards = createGroup(0)) {
shards.startAll();
int numDocs = randomIntBetween(1, 15);
shards.indexDocs(numDocs);
final RecoveriesCollection collection = new RecoveriesCollection(logger, threadPool);
IndexShard shard = shards.addReplica();
final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shard);
RecoveryTarget recoveryTarget = collection.getRecoveryTarget(recoveryId);
final int currentAsTarget = shard.recoveryStats().currentAsTarget();
final int referencesToStore = recoveryTarget.store().refCount();
IndexShard indexShard = recoveryTarget.indexShard();
Store store = recoveryTarget.store();
String tempFileName = recoveryTarget.getTempNameForFile("foobar");
RecoveryTarget resetRecovery = collection.resetRecovery(recoveryId, TimeValue.timeValueMinutes(60));
final long resetRecoveryId = resetRecovery.recoveryId();
assertNotSame(recoveryTarget, resetRecovery);
assertNotSame(recoveryTarget.cancellableThreads(), resetRecovery.cancellableThreads());
assertSame(indexShard, resetRecovery.indexShard());
assertSame(store, resetRecovery.store());
assertEquals(referencesToStore, resetRecovery.store().refCount());
assertEquals(currentAsTarget, shard.recoveryStats().currentAsTarget());
assertEquals(recoveryTarget.refCount(), 0);
if (Assertions.ENABLED) {
expectThrows(AssertionError.class, recoveryTarget::store);
expectThrows(AssertionError.class, recoveryTarget::indexShard);
}
String resetTempFileName = resetRecovery.getTempNameForFile("foobar");
assertNotEquals(tempFileName, resetTempFileName);
assertEquals(currentAsTarget, shard.recoveryStats().currentAsTarget());
try (RecoveriesCollection.RecoveryRef newRecoveryRef = collection.getRecovery(resetRecoveryId)) {
shards.recoverReplica(shard, (s, n) -> {
assertSame(s, newRecoveryRef.target().indexShard());
return newRecoveryRef.target();
}, false);
}
shards.assertAllEqual(numDocs);
assertNull("recovery is done", collection.getRecovery(recoveryId));
}
}
static long startRecovery(RecoveriesCollection collection, DiscoveryNode sourceNode, IndexShard shard) {
return startRecovery(collection, sourceNode, shard, listener, TimeValue.timeValueMinutes(60));
}
static long startRecovery(
RecoveriesCollection collection,
DiscoveryNode sourceNode,
IndexShard indexShard,
PeerRecoveryTargetService.RecoveryListener listener,
TimeValue timeValue
) {
final DiscoveryNode rNode = getDiscoveryNode(indexShard.routingEntry().currentNodeId());
indexShard.markAsRecovering("remote", new RecoveryState(indexShard.routingEntry(), sourceNode, rNode));
indexShard.prepareForIndexRecovery();
return collection.startRecovery(indexShard, sourceNode, 0L, null, listener, timeValue, null);
}
}
|
RecoveriesCollectionTests
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/GpuDevice.java
|
{
"start": 1005,
"end": 2209
}
|
class ____ implements Serializable, Comparable {
protected int index;
protected int minorNumber;
private static final long serialVersionUID = -6812314470754667710L;
public GpuDevice(int index, int minorNumber) {
this.index = index;
this.minorNumber = minorNumber;
}
public GpuDevice() {
}
public int getIndex() {
return index;
}
public int getMinorNumber() {
return minorNumber;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof GpuDevice)) {
return false;
}
GpuDevice other = (GpuDevice) obj;
return index == other.index && minorNumber == other.minorNumber;
}
@Override
public int compareTo(Object obj) {
if (!(obj instanceof GpuDevice)) {
return -1;
}
GpuDevice other = (GpuDevice) obj;
int result = Integer.compare(index, other.index);
if (0 != result) {
return result;
}
return Integer.compare(minorNumber, other.minorNumber);
}
@Override
public int hashCode() {
final int prime = 47;
return prime * index + minorNumber;
}
@Override
public String toString() {
return "(index=" + index + ",minor_number=" + minorNumber + ")";
}
}
|
GpuDevice
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/MultipleOutputFormat.java
|
{
"start": 1635,
"end": 1929
}
|
class ____ used for a map reduce job with at least one reducer.
* The reducer wants to write data to different files depending on the actual
* keys. It is assumed that a key (or value) encodes the actual key (value)
* and the desired location for the actual key (value).
*
* Case two: This
|
is
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/filter/wall/WallStatTest_select_1.java
|
{
"start": 431,
"end": 2706
}
|
class ____ extends TestCase {
private String sql = "SELECT b.* FROM lhwbbs_posts_reply a LEFT JOIN lhwbbs_posts b ON a.pid=b.pid WHERE a.rpid=? AND b.disabled=? ORDER BY a.pid DESC";
protected void setUp() throws Exception {
WallContext.clearContext();
}
protected void tearDown() throws Exception {
WallContext.clearContext();
}
public void testMySql() throws Exception {
WallProvider provider = new MySqlWallProvider();
assertTrue(provider.checkValid(sql));
{
WallTableStat tableStat = provider.getTableStat("lhwbbs_posts_reply");
assertEquals(1, tableStat.getSelectCount());
}
{
WallTableStat tableStat = provider.getTableStat("lhwbbs_posts");
assertEquals(1, tableStat.getSelectCount());
}
}
public void testOracle() throws Exception {
WallProvider provider = new OracleWallProvider();
assertTrue(provider.checkValid(sql));
{
WallTableStat tableStat = provider.getTableStat("lhwbbs_posts_reply");
assertEquals(1, tableStat.getSelectCount());
}
{
WallTableStat tableStat = provider.getTableStat("lhwbbs_posts");
assertEquals(1, tableStat.getSelectCount());
}
}
public void testPG() throws Exception {
WallProvider provider = new PGWallProvider();
assertTrue(provider.checkValid(sql));
{
WallTableStat tableStat = provider.getTableStat("lhwbbs_posts_reply");
assertEquals(1, tableStat.getSelectCount());
}
{
WallTableStat tableStat = provider.getTableStat("lhwbbs_posts");
assertEquals(1, tableStat.getSelectCount());
}
}
public void testSQLServer() throws Exception {
WallProvider provider = new SQLServerWallProvider();
assertTrue(provider.checkValid(sql));
{
WallTableStat tableStat = provider.getTableStat("lhwbbs_posts_reply");
assertEquals(1, tableStat.getSelectCount());
}
{
WallTableStat tableStat = provider.getTableStat("lhwbbs_posts");
assertEquals(1, tableStat.getSelectCount());
}
}
}
|
WallStatTest_select_1
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/GeoCoordinates.java
|
{
"start": 250,
"end": 1961
}
|
class ____ {
private final Number x;
private final Number y;
/**
* Creates new {@link GeoCoordinates}.
*
* @param x the longitude, must not be {@code null}.
* @param y the latitude, must not be {@code null}.
*/
public GeoCoordinates(Number x, Number y) {
LettuceAssert.notNull(x, "X must not be null");
LettuceAssert.notNull(y, "Y must not be null");
this.x = x;
this.y = y;
}
/**
* Creates new {@link GeoCoordinates}.
*
* @param x the longitude, must not be {@code null}.
* @param y the latitude, must not be {@code null}.
* @return {@link GeoCoordinates}.
*/
public static GeoCoordinates create(Number x, Number y) {
return new GeoCoordinates(x, y);
}
/**
*
* @return the longitude.
*/
public Number getX() {
return x;
}
/**
*
* @return the latitude.
*/
public Number getY() {
return y;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof GeoCoordinates))
return false;
GeoCoordinates geoCoords = (GeoCoordinates) o;
if (x != null ? !x.equals(geoCoords.x) : geoCoords.x != null)
return false;
return !(y != null ? !y.equals(geoCoords.y) : geoCoords.y != null);
}
@Override
public int hashCode() {
int result = x != null ? x.hashCode() : 0;
result = 31 * result + (y != null ? y.hashCode() : 0);
return result;
}
@Override
public String toString() {
return String.format("(%s, %s)", getX(), getY());
}
}
|
GeoCoordinates
|
java
|
elastic__elasticsearch
|
x-pack/extras/plugins/microsoft-graph-authz/src/main/java/org/elasticsearch/xpack/security/authz/microsoft/MicrosoftGraphAuthzPlugin.java
|
{
"start": 580,
"end": 1089
}
|
class ____ extends Plugin implements SecurityExtension {
@Override
public Map<String, Realm.Factory> getRealms(SecurityComponents components) {
return Map.of(
MicrosoftGraphAuthzRealmSettings.REALM_TYPE,
config -> new MicrosoftGraphAuthzRealm(components.roleMapper(), config, components.threadPool())
);
}
@Override
public List<Setting<?>> getSettings() {
return MicrosoftGraphAuthzRealmSettings.getSettings();
}
}
|
MicrosoftGraphAuthzPlugin
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/factory/parameterizedfactory/BFactory.java
|
{
"start": 931,
"end": 2263
}
|
class ____ {
String name = "fromFactory";
boolean postConstructCalled = false;
boolean getCalled = false;
@Inject
private A fieldA;
@Inject protected A anotherField;
@Inject A a;
private A methodInjected;
@Inject private void injectMe(A a) {
methodInjected = a;
}
A getFieldA() {
return fieldA;
}
A getAnotherField() {
return anotherField;
}
A getMethodInjected() {
return methodInjected;
}
@PostConstruct
void init() {
assertState();
postConstructCalled = true;
name = name.toUpperCase();
}
@Singleton
B get() {
assert postConstructCalled : "post construct should have been called";
assertState();
getCalled = true;
B b = new B();
b.setName(name);
return b;
}
@Prototype
C buildC(B b, @Parameter int count) {
return new C(b, count);
}
private void assertState() {
assert fieldA != null: "private fields should have been injected first";
assert anotherField != null: "protected fields should have been injected field";
assert a != null: "public properties should have been injected first";
assert methodInjected != null: "methods should have been injected first";
}
}
|
BFactory
|
java
|
quarkusio__quarkus
|
extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/i18n/MessageBundleNameCollisionTest.java
|
{
"start": 352,
"end": 1537
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.overrideConfigKey("quarkus.default-locale", "en_US")
.withApplicationRoot((jar) -> jar
.addClasses(EmailBundles.class)
.addAsResource("messages/EmailBundles_started.properties")
.addAsResource("messages/EmailBundles_started_en.properties")
.addAsResource("messages/EmailBundles_startedValidator.properties")
.addAsResource("messages/EmailBundles_startedValidator_en.properties"));
@Inject
Engine engine;
@Test
public void testBundleMethodIsFound() {
EmailBundles.startedValidator startedValidator = MessageBundles.get(EmailBundles.startedValidator.class);
assertEquals("You will be notified with another email when it is your turn to sign.",
startedValidator.turnEmailWillBeSent());
assertEquals("You will be notified with another email when it is your turn to sign.",
engine.parse("{EmailBundles_startedValidator:turnEmailWillBeSent()}").render());
}
}
|
MessageBundleNameCollisionTest
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/introspect/IntrospectorPairTest.java
|
{
"start": 28697,
"end": 30128
}
|
class ____ {
public UnreadableInjectedBean(@JacksonInject UnreadableBean injectBean) {
this.injectBean = injectBean;
}
@JsonProperty
private String foo;
@JsonIgnore
private UnreadableBean injectBean;
}
@Test
public void testMergingIntrospectorsForInjection() throws Exception {
AnnotationIntrospector testIntrospector = new TestIntrospector();
ObjectMapper mapper = JsonMapper.builder()
.injectableValues(new TestInjector())
.annotationIntrospector(new AnnotationIntrospectorPair(testIntrospector,
new JacksonAnnotationIntrospector()))
.build();
ReadableInjectedBean bean = mapper.readValue("{\"foo\": \"bob\"}", ReadableInjectedBean.class);
assertEquals("bob", bean.foo);
assertEquals(SimpleEnum.TWO, bean.injectBean.value);
boolean successReadingUnreadableInjectedBean;
try {
/*UnreadableInjectedBean noBean =*/ mapper.readValue("{\"foo\": \"bob\"}", UnreadableInjectedBean.class);
successReadingUnreadableInjectedBean = true;
} catch (DatabindException e) {
successReadingUnreadableInjectedBean = false;
assertTrue(e.getMessage().contains("Conflicting setter definitions"));
}
assertFalse(successReadingUnreadableInjectedBean);
}
}
|
UnreadableInjectedBean
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/metrics/stats/Frequency.java
|
{
"start": 993,
"end": 2008
}
|
class ____ {
private final MetricName name;
private final double centerValue;
/**
* Create an instance with the given name and center point value.
*
* @param name the name of the frequency metric; may not be null
* @param centerValue the value identifying the {@link Frequencies} bucket to be reported
*/
public Frequency(MetricName name, double centerValue) {
this.name = name;
this.centerValue = centerValue;
}
/**
* Get the name of this metric.
*
* @return the metric name; never null
*/
public MetricName name() {
return this.name;
}
/**
* Get the value of this metrics center point.
*
* @return the center point value
*/
public double centerValue() {
return this.centerValue;
}
@Override
public String toString() {
return "Frequency(" +
"name=" + name +
", centerValue=" + centerValue +
')';
}
}
|
Frequency
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/validators/ParameterNamesInClassesTest.java
|
{
"start": 723,
"end": 1166
}
|
class ____ {
void method(int paramName) {
// deliberately empty
}
@Test
public void javacParametersEnabled() throws Exception {
assertEquals("Please enable saving parameter names via the -parameters javac argument",
"paramName",
getClass()
.getDeclaredMethod("method", Integer.TYPE)
.getParameters()[0].getName());
}
}
|
ParameterNamesInClassesTest
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/inject/JacksonInject2465Test.java
|
{
"start": 522,
"end": 1187
}
|
class ____ {
// 17-Apr-2020, tatu: Forcing this to be ignored will work around the
// problem, but this really should not be necessary.
// @JsonIgnore
private final Internal2465 str;
private final int id;
@JsonCreator
public TestCase2465(@JacksonInject(useInput = OptBoolean.FALSE) Internal2465 str,
@JsonProperty("id") int id) {
this.str = str;
this.id = id;
}
public int fetchId() {
return id;
}
public Internal2465 fetchInternal() {
return str;
}
}
public static final
|
TestCase2465
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.