language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/injection/erroneous/interceptorBean/InterceptorBeanInjectionProducerTest.java | {
"start": 919,
"end": 1059
} | class ____ {
@Produces
String produceSth(Interceptor<MyBean> interceptor) {
return "foo";
}
}
}
| MyBean |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/state/internals/OrderedBytes.java | {
"start": 925,
"end": 3133
} | class ____ {
private static final int MIN_KEY_LENGTH = 1;
/**
* Returns the upper byte range for a key with a given fixed size maximum suffix
*
* Assumes the minimum key length is one byte
*/
static Bytes upperRange(final Bytes key, final byte[] maxSuffix) {
final byte[] bytes = key.get();
final ByteBuffer rangeEnd = ByteBuffer.allocate(bytes.length + maxSuffix.length);
final int firstTimestampByte = maxSuffix[0] & 0xFF;
// if firstTimestampByte is 0, we'll put all key bytes into range result because `(bytes[i] & 0xFF) >= firstTimestampByte`
// will always be true (this is a byte to unsigned int conversion comparison)
if (firstTimestampByte == 0) {
return Bytes.wrap(
rangeEnd
.put(bytes)
.put(maxSuffix)
.array()
);
} else {
int i = 0;
while (i < bytes.length && (
i < MIN_KEY_LENGTH // assumes keys are at least one byte long
|| (bytes[i] & 0xFF) >= firstTimestampByte
)) {
rangeEnd.put(bytes[i++]);
}
rangeEnd.put(maxSuffix);
rangeEnd.flip();
final byte[] res = new byte[rangeEnd.remaining()];
ByteBuffer.wrap(res).put(rangeEnd);
return Bytes.wrap(res);
}
}
static Bytes lowerRange(final Bytes key, final byte[] minSuffix) {
final byte[] bytes = key.get();
final ByteBuffer rangeStart = ByteBuffer.allocate(bytes.length + minSuffix.length);
// any key in the range would start at least with the given prefix to be
// in the range, and have at least SUFFIX_SIZE number of trailing zero bytes.
// unless there is a maximum key length, you can keep appending more zero bytes
// to keyFrom to create a key that will match the range, yet that would precede
// KeySchema.toBinaryKey(keyFrom, from, 0) in byte order
return Bytes.wrap(
rangeStart
.put(bytes)
.put(minSuffix)
.array()
);
}
}
| OrderedBytes |
java | apache__camel | components/camel-aws/camel-aws2-ec2/src/generated/java/org/apache/camel/component/aws2/ec2/AWS2EC2EndpointUriFactory.java | {
"start": 518,
"end": 2953
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":label";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(19);
props.add("accessKey");
props.add("amazonEc2Client");
props.add("label");
props.add("lazyStartProducer");
props.add("operation");
props.add("overrideEndpoint");
props.add("pojoRequest");
props.add("profileCredentialsName");
props.add("proxyHost");
props.add("proxyPort");
props.add("proxyProtocol");
props.add("region");
props.add("secretKey");
props.add("sessionToken");
props.add("trustAllCertificates");
props.add("uriEndpointOverride");
props.add("useDefaultCredentialsProvider");
props.add("useProfileCredentialsProvider");
props.add("useSessionCredentials");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(3);
secretProps.add("accessKey");
secretProps.add("secretKey");
secretProps.add("sessionToken");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "aws2-ec2".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "label", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| AWS2EC2EndpointUriFactory |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java | {
"start": 7660,
"end": 7950
} | class ____ {
static final Logger LOG = LoggerFactory.
getLogger(ApplicationSummary.class);
// Escape sequences
static final char EQUALS = '=';
static final char[] charsToEscape =
{StringUtils.COMMA, EQUALS, StringUtils.ESCAPE_CHAR};
static | ApplicationSummary |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/annotations/ConfigDocEnumValue.java | {
"start": 710,
"end": 752
} | enum ____.
*/
String value();
}
| type |
java | apache__camel | components/camel-fhir/camel-fhir-api/src/main/java/org/apache/camel/component/fhir/api/FhirSearch.java | {
"start": 1446,
"end": 3745
} | class ____ {
private final IGenericClient client;
public FhirSearch(IGenericClient client) {
this.client = client;
}
/**
* Perform a search directly by URL.
*
* @param url The URL to search for. Note that this URL may be complete (e.g.
* "http://example.com/base/Patient?name=foo") in which case the client's base URL will be
* ignored. Or it can be relative (e.g. "Patient?name=foo") in which case the client's base
* URL will be used.
* @param extraParameters see {@link ExtraParameters} for a full list of parameters that can be passed, may be NULL
* @return the {@link IBaseBundle}
*/
public IBaseBundle searchByUrl(String url, Map<ExtraParameters, Object> extraParameters) {
IQuery<IBaseBundle> query = client.search().byUrl(url);
ExtraParameters.process(extraParameters, query);
return query.execute();
}
/**
* Perform a search by resource name.
*
* @param resourceName The resource to search for.
* @param searchParameters A set of search parameters to the query.
* @param searchStyle Forces the query to perform the search using the given method (allowable methods are
* described in the <a href="http://www.hl7.org/fhir/search.html">FHIR Search
* Specification</a>). The default search style is HTTP POST.
* @param extraParameters see {@link ExtraParameters} for a full list of parameters that can be passed, may be
* NULL
* @return the {@link IBaseBundle}
*/
public IBaseBundle searchByResource(
String resourceName, Map<String, List<String>> searchParameters, SearchStyleEnum searchStyle,
Map<ExtraParameters, Object> extraParameters) {
IQuery<IBaseBundle> query = client.search().forResource(resourceName);
query.whereMap(Objects.requireNonNullElse(searchParameters, Map.of()));
query.usingStyle(Objects.requireNonNullElse(searchStyle, SearchStyleEnum.POST));
ExtraParameters.process(extraParameters, query);
return query.execute();
}
}
| FhirSearch |
java | apache__rocketmq | tools/src/test/java/org/apache/rocketmq/tools/command/message/ConsumeMessageCommandTest.java | {
"start": 2207,
"end": 9123
} | class ____ {
private static ConsumeMessageCommand consumeMessageCommand;
private static final PullResult PULL_RESULT = mockPullResult();
private static PullResult mockPullResult() {
MessageExt msg = new MessageExt();
msg.setBody(new byte[] {'a'});
List<MessageExt> msgFoundList = new ArrayList<>();
msgFoundList.add(msg);
return new PullResult(PullStatus.FOUND, 2, 0, 1, msgFoundList);
}
@BeforeClass
public static void init() throws MQClientException, RemotingException, MQBrokerException, InterruptedException,
NoSuchFieldException, IllegalAccessException {
consumeMessageCommand = new ConsumeMessageCommand();
DefaultMQPullConsumer defaultMQPullConsumer = mock(DefaultMQPullConsumer.class);
assignPullResult(defaultMQPullConsumer);
when(defaultMQPullConsumer.minOffset(any(MessageQueue.class))).thenReturn(Long.valueOf(0));
when(defaultMQPullConsumer.maxOffset(any(MessageQueue.class))).thenReturn(Long.valueOf(1));
final Set<MessageQueue> mqList = new HashSet<>();
mqList.add(new MessageQueue());
when(defaultMQPullConsumer.fetchSubscribeMessageQueues(anyString())).thenReturn(mqList);
Field producerField = ConsumeMessageCommand.class.getDeclaredField("defaultMQPullConsumer");
producerField.setAccessible(true);
producerField.set(consumeMessageCommand, defaultMQPullConsumer);
}
@AfterClass
public static void terminate() {
}
private static void assignPullResult() {
assignPullResult(null);
}
private static void assignPullResult(DefaultMQPullConsumer defaultMQPullConsumer) {
try {
if (defaultMQPullConsumer == null) {
Field producerField = ConsumeMessageCommand.class.getDeclaredField("defaultMQPullConsumer");
producerField.setAccessible(true);
defaultMQPullConsumer = (DefaultMQPullConsumer) producerField.get(consumeMessageCommand);
}
when(defaultMQPullConsumer.pull(any(MessageQueue.class), anyString(), anyLong(), anyInt()))
.thenReturn(PULL_RESULT);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
@Test
public void testExecuteDefault() throws SubCommandException {
PrintStream out = System.out;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
System.setOut(new PrintStream(bos));
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-t mytopic", "-n localhost:9876"};
assignPullResult();
CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + consumeMessageCommand.commandName(),
subargs, consumeMessageCommand.buildCommandlineOptions(options), new DefaultParser());
consumeMessageCommand.execute(commandLine, options, null);
System.setOut(out);
String s = new String(bos.toByteArray(), StandardCharsets.UTF_8);
Assert.assertTrue(s.contains("Consume ok"));
}
@Test
public void testExecuteByCondition() throws SubCommandException {
PrintStream out = System.out;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
System.setOut(new PrintStream(bos));
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-t mytopic", "-b localhost", "-i 0", "-n localhost:9876"};
CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + consumeMessageCommand.commandName(),
subargs, consumeMessageCommand.buildCommandlineOptions(options), new DefaultParser());
assignPullResult();
consumeMessageCommand.execute(commandLine, options, null);
System.setOut(out);
String s = new String(bos.toByteArray(), StandardCharsets.UTF_8);
Assert.assertTrue(s.contains("Consume ok"));
}
@Test
public void testExecuteDefaultWhenPullMessageByQueueGotException() throws SubCommandException, InterruptedException, RemotingException, MQClientException, MQBrokerException, NoSuchFieldException, IllegalAccessException {
DefaultMQPullConsumer defaultMQPullConsumer = mock(DefaultMQPullConsumer.class);
when(defaultMQPullConsumer.pull(any(MessageQueue.class), anyString(), anyLong(), anyInt())).thenThrow(MQClientException.class);
Field producerField = ConsumeMessageCommand.class.getDeclaredField("defaultMQPullConsumer");
producerField.setAccessible(true);
producerField.set(consumeMessageCommand, defaultMQPullConsumer);
PrintStream out = System.out;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
System.setOut(new PrintStream(bos));
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-t topic-not-existu", "-n localhost:9876"};
CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + consumeMessageCommand.commandName(),
subargs, consumeMessageCommand.buildCommandlineOptions(options), new DefaultParser());
consumeMessageCommand.execute(commandLine, options, null);
System.setOut(out);
String s = new String(bos.toByteArray(), StandardCharsets.UTF_8);
Assert.assertFalse(s.contains("Consume ok"));
}
@Test
public void testExecuteByConditionWhenPullMessageByQueueGotException() throws IllegalAccessException, InterruptedException, RemotingException, MQClientException, MQBrokerException, NoSuchFieldException, SubCommandException {
DefaultMQPullConsumer defaultMQPullConsumer = mock(DefaultMQPullConsumer.class);
when(defaultMQPullConsumer.pull(any(MessageQueue.class), anyString(), anyLong(), anyInt())).thenThrow(MQClientException.class);
Field producerField = ConsumeMessageCommand.class.getDeclaredField("defaultMQPullConsumer");
producerField.setAccessible(true);
producerField.set(consumeMessageCommand, defaultMQPullConsumer);
PrintStream out = System.out;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
System.setOut(new PrintStream(bos));
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-t mytopic", "-b localhost", "-i 0", "-n localhost:9876"};
CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + consumeMessageCommand.commandName(),
subargs, consumeMessageCommand.buildCommandlineOptions(options), new DefaultParser());
consumeMessageCommand.execute(commandLine, options, null);
System.setOut(out);
String s = new String(bos.toByteArray(), StandardCharsets.UTF_8);
Assert.assertFalse(s.contains("Consume ok"));
}
} | ConsumeMessageCommandTest |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/docker/ssl/KeyStoreFactory.java | {
"start": 1258,
"end": 3368
} | class ____ {
private static final char[] NO_PASSWORD = {};
private KeyStoreFactory() {
}
/**
* Create a new {@link KeyStore} populated with the certificate stored at the
* specified file path and an optional private key.
* @param certPath the path to the certificate authority file
* @param keyPath the path to the private file
* @param alias the alias to use for KeyStore entries
* @return the {@code KeyStore}
*/
static KeyStore create(Path certPath, @Nullable Path keyPath, String alias) {
try {
KeyStore keyStore = getKeyStore();
String certificateText = Files.readString(certPath);
List<X509Certificate> certificates = PemCertificateParser.parse(certificateText);
PrivateKey privateKey = getPrivateKey(keyPath);
try {
addCertificates(keyStore, certificates.toArray(X509Certificate[]::new), privateKey, alias);
}
catch (KeyStoreException ex) {
throw new IllegalStateException("Error adding certificates to KeyStore: " + ex.getMessage(), ex);
}
return keyStore;
}
catch (GeneralSecurityException | IOException ex) {
throw new IllegalStateException("Error creating KeyStore: " + ex.getMessage(), ex);
}
}
private static KeyStore getKeyStore()
throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException {
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
keyStore.load(null);
return keyStore;
}
private static @Nullable PrivateKey getPrivateKey(@Nullable Path path) throws IOException {
if (path != null && Files.exists(path)) {
String text = Files.readString(path);
return PemPrivateKeyParser.parse(text);
}
return null;
}
private static void addCertificates(KeyStore keyStore, X509Certificate[] certificates,
@Nullable PrivateKey privateKey, String alias) throws KeyStoreException {
if (privateKey != null) {
keyStore.setKeyEntry(alias, privateKey, NO_PASSWORD, certificates);
}
else {
for (int index = 0; index < certificates.length; index++) {
keyStore.setCertificateEntry(alias + "-" + index, certificates[index]);
}
}
}
}
| KeyStoreFactory |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/enumeratedvalue/EnumAndColumnDefinitionTest.java | {
"start": 3827,
"end": 4258
} | enum ____ an
ordinal so a TinyIntJdbcType is used.
Using @JdbcTypeCode(SqlTypes.INTEGER) the ordinal values is saved.
*/
@JdbcTypeCode(SqlTypes.INTEGER)
AnotherMyEnum anotherMyEnum;
@Enumerated(value = EnumType.ORDINAL)
@Column(name = "another_my_enum_2", columnDefinition = "VARCHAR(255) NOT NULL")
/*
Without specifying the JdbcTypeCode Hibernate has no clue
of the column being a VARCHAR and being the | type |
java | grpc__grpc-java | stub/src/main/java/io/grpc/stub/ClientCallStreamObserver.java | {
"start": 791,
"end": 1314
} | class ____ obtained via {@link ClientResponseObserver}, or by
* manually casting the {@code StreamObserver} returned by a stub.
*
* <p>Like {@code StreamObserver}, implementations are not required to be thread-safe; if multiple
* threads will be writing to an instance concurrently, the application must synchronize its calls.
*
* <p>DO NOT MOCK: The API is too complex to reliably mock. Use InProcessChannelBuilder to create
* "real" RPCs suitable for testing and make a fake for the server-side.
*/
public abstract | is |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java | {
"start": 153879,
"end": 154072
} | class ____ {
@RequestMapping("/bind")
public String handle(DataClass data) {
return data.param1 + "-" + data.param2 + "-" + data.param3;
}
}
@RestController
static | DataClassController |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestWordStats.java | {
"start": 2078,
"end": 3591
} | class ____ {
private long wordsRead = 0;
private long wordLengthsRead = 0;
private long wordLengthsReadSquared = 0;
public WordStdDevReader() {
}
public double read(String path) throws IOException {
FileSystem fs = FileSystem.get(new Configuration());
FileStatus[] files = fs.listStatus(new Path(path));
for (FileStatus fileStat : files) {
if (!fileStat.isFile())
continue;
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(fs.open(fileStat.getPath())));
String line;
while ((line = br.readLine()) != null) {
StringTokenizer st = new StringTokenizer(line);
String word;
while (st.hasMoreTokens()) {
word = st.nextToken();
this.wordsRead++;
this.wordLengthsRead += word.length();
this.wordLengthsReadSquared += (long) Math.pow(word.length(), 2.0);
}
}
} catch (IOException e) {
System.out.println("Output could not be read!");
throw e;
} finally {
br.close();
}
}
double mean = (((double) this.wordLengthsRead) / ((double) this.wordsRead));
mean = Math.pow(mean, 2.0);
double term = (((double) this.wordLengthsReadSquared / ((double) this.wordsRead)));
double stddev = Math.sqrt((term - mean));
return stddev;
}
}
/**
* Modified internal test | WordStdDevReader |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsStore.java | {
"start": 1345,
"end": 6971
} | interface ____ extends IOStatistics,
IOStatisticsSetters,
IOStatisticsAggregator,
DurationTrackerFactory {
/**
* Increment a counter by one.
*
* No-op if the counter is unknown.
* @param key statistics key
* @return old value or, if the counter is unknown: 0
*/
default long incrementCounter(String key) {
return incrementCounter(key, 1);
}
/**
* Increment a counter.
*
* No-op if the counter is unknown.
* If the value is negative, it is ignored.
* @param key statistics key
* @param value value to increment
* @return the updated value or, if the counter is unknown: 0
*/
long incrementCounter(String key, long value);
/**
* Increment a gauge.
* <p>
* No-op if the gauge is unknown.
* </p>
* @param key statistics key
* @param value value to increment
* @return new value or 0 if the key is unknown
*/
long incrementGauge(String key, long value);
/**
* Increment a maximum.
* <p>
* No-op if the maximum is unknown.
* </p>
* @param key statistics key
* @param value value to increment
* @return new value or 0 if the key is unknown
*/
long incrementMaximum(String key, long value);
/**
* Increment a minimum.
* <p>
* No-op if the minimum is unknown.
* </p>
* @param key statistics key
* @param value value to increment
* @return new value or 0 if the key is unknown
*/
long incrementMinimum(String key, long value);
/**
* Add a minimum sample: if less than the current value,
* updates the value.
* <p>
* No-op if the minimum is unknown.
* </p>
* @param key statistics key
* @param value sample value
*/
void addMinimumSample(String key, long value);
/**
* Add a maximum sample: if greater than the current value,
* updates the value.
* <p>
* No-op if the key is unknown.
* </p>
* @param key statistics key
* @param value sample value
*/
void addMaximumSample(String key, long value);
/**
* Add a sample to the mean statistics.
* <p>
* No-op if the key is unknown.
* </p>
* @param key key
* @param value sample value.
*/
void addMeanStatisticSample(String key, long value);
/**
* Reset all statistics.
* Unsynchronized.
*/
void reset();
/**
* Get a reference to the atomic instance providing the
* value for a specific counter. This is useful if
* the value is passed around.
* @param key statistic name
* @return the reference
* @throws NullPointerException if there is no entry of that name
*/
AtomicLong getCounterReference(String key);
/**
* Get a reference to the atomic instance providing the
* value for a specific maximum. This is useful if
* the value is passed around.
* @param key statistic name
* @return the reference
* @throws NullPointerException if there is no entry of that name
*/
AtomicLong getMaximumReference(String key);
/**
* Get a reference to the atomic instance providing the
* value for a specific minimum. This is useful if
* the value is passed around.
* @param key statistic name
* @return the reference
* @throws NullPointerException if there is no entry of that name
*/
AtomicLong getMinimumReference(String key);
/**
* Get a reference to the atomic instance providing the
* value for a specific gauge. This is useful if
* the value is passed around.
* @param key statistic name
* @return the reference
* @throws NullPointerException if there is no entry of that name
*/
AtomicLong getGaugeReference(String key);
/**
* Get a reference to the atomic instance providing the
* value for a specific meanStatistic. This is useful if
* the value is passed around.
* @param key statistic name
* @return the reference
* @throws NullPointerException if there is no entry of that name
*/
MeanStatistic getMeanStatistic(String key);
/**
* Add a duration to the min/mean/max statistics, using the
* given prefix and adding a suffix for each specific value.
*
* The update is not-atomic, even though each individual statistic
* is updated thread-safely. If two threads update the values
* simultaneously, at the end of each operation the state will
* be correct. It is only during the sequence that the statistics
* may be observably inconsistent.
* @param prefix statistic prefix
* @param durationMillis duration in milliseconds.
*/
void addTimedOperation(String prefix, long durationMillis);
/**
* Add a duration to the min/mean/max statistics, using the
* given prefix and adding a suffix for each specific value.;
* increment tha counter whose name == prefix.
*
* If any of the statistics are not registered, that part of
* the sequence will be omitted -the rest will proceed.
*
* The update is not-atomic, even though each individual statistic
* is updated thread-safely. If two threads update the values
* simultaneously, at the end of each operation the state will
* be correct. It is only during the sequence that the statistics
* may be observably inconsistent.
* @param prefix statistic prefix
* @param duration duration
*/
void addTimedOperation(String prefix, Duration duration);
/**
* Add a statistics sample as a min, max and mean and count.
* @param key key to add.
* @param count count.
*/
default void addSample(String key, long count) {
incrementCounter(key, count);
addMeanStatisticSample(key, count);
addMaximumSample(key, count);
addMinimumSample(key, count);
}
}
| IOStatisticsStore |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java | {
"start": 19140,
"end": 19669
} | class ____ extends MurmurHash3SingleValues {
private final LongValues values;
Long(LongValues values) {
this.values = values;
}
@Override
public boolean advanceExact(int docId) throws IOException {
return values.advanceExact(docId);
}
@Override
public long longValue() throws IOException {
return BitMixer.mix64(values.longValue());
}
}
private static | Long |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsDelta.java | {
"start": 981,
"end": 1161
} | interface ____ {@link RowLevelOperation}. Data sources can implement this interface
* to indicate they support handling deltas of rows.
*
* @since 3.4.0
*/
@Experimental
public | for |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-core-stubs/maven-plugin-plugin/src/main/java/org/apache/maven/plugin/coreit/DescriptorMojo.java | {
"start": 1448,
"end": 2959
} | class ____ extends AbstractMojo {
/**
* The current Maven project.
*/
@Parameter(defaultValue = "${project}", required = true, readonly = true)
private MavenProject project;
/**
* The path to the output file, relative to the project base directory.
*
*/
@Parameter
private String pathname = "target/plugin-descriptor.txt";
/**
* Runs this mojo.
*
* @throws MojoExecutionException If the output file could not be created.
* @throws MojoFailureException If the output file has not been set.
*/
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("[MAVEN-CORE-IT-LOG] Using output file path: " + pathname);
if (pathname == null || pathname.length() <= 0) {
throw new MojoFailureException("Path name for output file has not been specified");
}
File outputFile = new File(pathname);
if (!outputFile.isAbsolute()) {
outputFile = new File(project.getBasedir(), pathname).getAbsoluteFile();
}
getLog().info("[MAVEN-CORE-IT-LOG] Creating output file: " + outputFile);
try {
outputFile.getParentFile().mkdirs();
outputFile.createNewFile();
} catch (IOException e) {
throw new MojoExecutionException("Output file could not be created: " + pathname, e);
}
getLog().info("[MAVEN-CORE-IT-LOG] Created output file: " + outputFile);
}
}
| DescriptorMojo |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/procedure/StoreProcedureStatementsClosedTest.java | {
"start": 3216,
"end": 3594
} | class ____ {
@Id
private long id;
private String name;
public SimpleEntity() {
}
public SimpleEntity(String name) {
this.name = name;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}
| SimpleEntity |
java | google__guice | extensions/testlib/test/com/google/inject/testing/fieldbinder/BoundFieldModuleTest.java | {
"start": 32141,
"end": 32213
} | interface ____ {}
public void testBoundFieldModuleWithPermits() {
| Foo |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemIsolatedClassloader.java | {
"start": 1398,
"end": 1490
} | class ____ extends AbstractS3ATestBase {
private static | ITestS3AFileSystemIsolatedClassloader |
java | grpc__grpc-java | binder/src/main/java/io/grpc/binder/internal/IntentNameResolver.java | {
"start": 10971,
"end": 12550
} | class ____ extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
// Get off the main thread and into the correct SynchronizationContext.
syncContext.executeLater(IntentNameResolver.this::resolve);
offloadExecutor.execute(syncContext::drain);
}
}
@SuppressLint("UnprotectedReceiver") // All of these are protected system broadcasts.
private void registerReceiver() {
checkState(receiver == null, "Already registered!");
receiver = new PackageChangeReceiver();
IntentFilter filter = new IntentFilter();
filter.addDataScheme("package");
filter.addAction(Intent.ACTION_PACKAGE_ADDED);
filter.addAction(Intent.ACTION_PACKAGE_CHANGED);
filter.addAction(Intent.ACTION_PACKAGE_REMOVED);
filter.addAction(Intent.ACTION_PACKAGE_REPLACED);
targetUserContext.registerReceiver(receiver, filter);
if (Build.VERSION.SDK_INT >= 24) {
// Clients running in direct boot mode must refresh() when the user is unlocked because
// that's when `directBootAware=false` services become visible in queryIntentServices()
// results. ACTION_BOOT_COMPLETED would work too but it's delivered with lower priority.
targetUserContext.registerReceiver(receiver, new IntentFilter(Intent.ACTION_USER_UNLOCKED));
}
}
private void maybeUnregisterReceiver() {
if (receiver != null) { // NameResolver API contract appears to allow shutdown without start().
targetUserContext.unregisterReceiver(receiver);
receiver = null;
}
}
}
| PackageChangeReceiver |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/codec/support/BaseDefaultCodecs.java | {
"start": 4912,
"end": 34564
} | class ____ implements CodecConfigurer.DefaultCodecs, CodecConfigurer.DefaultCodecConfig {
static final boolean JACKSON_PRESENT;
static final boolean JACKSON_2_PRESENT;
static final boolean GSON_PRESENT;
private static final boolean JACKSON_SMILE_PRESENT;
private static final boolean JACKSON_2_SMILE_PRESENT;
private static final boolean JACKSON_CBOR_PRESENT;
private static final boolean JACKSON_2_CBOR_PRESENT;
private static final boolean JAXB_2_PRESENT;
private static final boolean PROTOBUF_PRESENT;
static final boolean NETTY_BYTE_BUF_PRESENT;
static final boolean KOTLIN_SERIALIZATION_CBOR_PRESENT;
static final boolean KOTLIN_SERIALIZATION_JSON_PRESENT;
static final boolean KOTLIN_SERIALIZATION_PROTOBUF_PRESENT;
static {
ClassLoader classLoader = BaseCodecConfigurer.class.getClassLoader();
JACKSON_PRESENT = ClassUtils.isPresent("tools.jackson.databind.ObjectMapper", classLoader);
JACKSON_2_PRESENT = ClassUtils.isPresent("com.fasterxml.jackson.databind.ObjectMapper", classLoader) &&
ClassUtils.isPresent("com.fasterxml.jackson.core.JsonGenerator", classLoader);
GSON_PRESENT = ClassUtils.isPresent("com.google.gson.Gson", classLoader);
JACKSON_SMILE_PRESENT = JACKSON_PRESENT && ClassUtils.isPresent("tools.jackson.dataformat.smile.SmileMapper", classLoader);
JACKSON_2_SMILE_PRESENT = JACKSON_2_PRESENT && ClassUtils.isPresent("com.fasterxml.jackson.dataformat.smile.SmileFactory", classLoader);
JACKSON_CBOR_PRESENT = JACKSON_PRESENT && ClassUtils.isPresent("tools.jackson.dataformat.cbor.CBORMapper", classLoader);
JACKSON_2_CBOR_PRESENT = JACKSON_2_PRESENT && ClassUtils.isPresent("com.fasterxml.jackson.dataformat.cbor.databind.CBORMapper", classLoader);
JAXB_2_PRESENT = ClassUtils.isPresent("jakarta.xml.bind.Binder", classLoader);
PROTOBUF_PRESENT = ClassUtils.isPresent("com.google.protobuf.Message", classLoader);
NETTY_BYTE_BUF_PRESENT = ClassUtils.isPresent("io.netty.buffer.ByteBuf", classLoader);
KOTLIN_SERIALIZATION_CBOR_PRESENT = ClassUtils.isPresent("kotlinx.serialization.cbor.Cbor", classLoader);
KOTLIN_SERIALIZATION_JSON_PRESENT = ClassUtils.isPresent("kotlinx.serialization.json.Json", classLoader);
KOTLIN_SERIALIZATION_PROTOBUF_PRESENT = ClassUtils.isPresent("kotlinx.serialization.protobuf.ProtoBuf", classLoader);
}
private @Nullable Decoder<?> jacksonJsonDecoder;
private @Nullable Encoder<?> jacksonJsonEncoder;
private @Nullable Decoder<?> gsonDecoder;
private @Nullable Encoder<?> gsonEncoder;
private @Nullable Encoder<?> jacksonSmileEncoder;
private @Nullable Decoder<?> jacksonSmileDecoder;
private @Nullable Encoder<?> jacksonCborEncoder;
private @Nullable Decoder<?> jacksonCborDecoder;
private @Nullable Decoder<?> protobufDecoder;
private @Nullable Encoder<?> protobufEncoder;
private @Nullable Decoder<?> jaxb2Decoder;
private @Nullable Encoder<?> jaxb2Encoder;
private @Nullable Decoder<?> kotlinSerializationCborDecoder;
private @Nullable Encoder<?> kotlinSerializationCborEncoder;
private @Nullable Decoder<?> kotlinSerializationJsonDecoder;
private @Nullable Encoder<?> kotlinSerializationJsonEncoder;
private @Nullable Decoder<?> kotlinSerializationProtobufDecoder;
private @Nullable Encoder<?> kotlinSerializationProtobufEncoder;
private @Nullable DefaultMultipartCodecs multipartCodecs;
private @Nullable Supplier<List<HttpMessageWriter<?>>> partWritersSupplier;
private @Nullable HttpMessageReader<?> multipartReader;
private @Nullable Consumer<Object> codecConsumer;
private @Nullable Integer maxInMemorySize;
private @Nullable Boolean enableLoggingRequestDetails;
private boolean registerDefaults = true;
// The default reader and writer instances to use
private final List<HttpMessageReader<?>> typedReaders = new ArrayList<>();
private final List<HttpMessageReader<?>> objectReaders = new ArrayList<>();
private final List<HttpMessageWriter<?>> typedWriters = new ArrayList<>();
private final List<HttpMessageWriter<?>> objectWriters = new ArrayList<>();
BaseDefaultCodecs() {
initReaders();
initWriters();
}
/**
* Reset and initialize typed readers and object readers.
* @since 5.3.3
*/
protected void initReaders() {
initTypedReaders();
initObjectReaders();
}
/**
* Reset and initialize typed writers and object writers.
* @since 5.3.3
*/
protected void initWriters() {
initTypedWriters();
initObjectWriters();
}
/**
* Create a deep copy of the given {@link BaseDefaultCodecs}.
*/
protected BaseDefaultCodecs(BaseDefaultCodecs other) {
this.jacksonJsonDecoder = other.jacksonJsonDecoder;
this.jacksonJsonEncoder = other.jacksonJsonEncoder;
this.gsonDecoder = other.gsonDecoder;
this.gsonEncoder = other.gsonEncoder;
this.jacksonSmileDecoder = other.jacksonSmileDecoder;
this.jacksonSmileEncoder = other.jacksonSmileEncoder;
this.jacksonCborDecoder = other.jacksonCborDecoder;
this.jacksonCborEncoder = other.jacksonCborEncoder;
this.protobufDecoder = other.protobufDecoder;
this.protobufEncoder = other.protobufEncoder;
this.jaxb2Decoder = other.jaxb2Decoder;
this.jaxb2Encoder = other.jaxb2Encoder;
this.kotlinSerializationCborDecoder = other.kotlinSerializationCborDecoder;
this.kotlinSerializationCborEncoder = other.kotlinSerializationCborEncoder;
this.kotlinSerializationJsonDecoder = other.kotlinSerializationJsonDecoder;
this.kotlinSerializationJsonEncoder = other.kotlinSerializationJsonEncoder;
this.kotlinSerializationProtobufDecoder = other.kotlinSerializationProtobufDecoder;
this.kotlinSerializationProtobufEncoder = other.kotlinSerializationProtobufEncoder;
this.multipartCodecs = other.multipartCodecs != null ?
new DefaultMultipartCodecs(other.multipartCodecs) : null;
this.multipartReader = other.multipartReader;
this.codecConsumer = other.codecConsumer;
this.maxInMemorySize = other.maxInMemorySize;
this.enableLoggingRequestDetails = other.enableLoggingRequestDetails;
this.registerDefaults = other.registerDefaults;
this.typedReaders.addAll(other.typedReaders);
this.objectReaders.addAll(other.objectReaders);
this.typedWriters.addAll(other.typedWriters);
this.objectWriters.addAll(other.objectWriters);
}
@Override
public void jacksonJsonDecoder(Decoder<?> decoder) {
this.jacksonJsonDecoder = decoder;
initObjectReaders();
}
@Override
public void jacksonJsonEncoder(Encoder<?> encoder) {
this.jacksonJsonEncoder = encoder;
initObjectWriters();
initTypedWriters();
}
@Override
public void gsonDecoder(Decoder<?> decoder) {
this.gsonDecoder = decoder;
initObjectReaders();
}
@Override
public void gsonEncoder(Encoder<?> encoder) {
this.gsonEncoder = encoder;
initObjectWriters();
initTypedWriters();
}
@Override
public void jacksonSmileDecoder(Decoder<?> decoder) {
this.jacksonSmileDecoder = decoder;
initObjectReaders();
}
@Override
public void jacksonSmileEncoder(Encoder<?> encoder) {
this.jacksonSmileEncoder = encoder;
initObjectWriters();
initTypedWriters();
}
@Override
public void jacksonCborDecoder(Decoder<?> decoder) {
this.jacksonCborDecoder = decoder;
initObjectReaders();
}
@Override
public void jacksonCborEncoder(Encoder<?> encoder) {
this.jacksonCborEncoder = encoder;
initObjectWriters();
initTypedWriters();
}
@Override
public void protobufDecoder(Decoder<?> decoder) {
this.protobufDecoder = decoder;
initTypedReaders();
}
@Override
public void protobufEncoder(Encoder<?> encoder) {
this.protobufEncoder = encoder;
initTypedWriters();
}
@Override
public void jaxb2Decoder(Decoder<?> decoder) {
this.jaxb2Decoder = decoder;
initObjectReaders();
}
@Override
public void jaxb2Encoder(Encoder<?> encoder) {
this.jaxb2Encoder = encoder;
initObjectWriters();
}
@Override
public void kotlinSerializationCborDecoder(Decoder<?> decoder) {
this.kotlinSerializationCborDecoder = decoder;
initObjectReaders();
}
@Override
public void kotlinSerializationCborEncoder(Encoder<?> encoder) {
this.kotlinSerializationCborEncoder = encoder;
initObjectWriters();
}
@Override
public void kotlinSerializationJsonDecoder(Decoder<?> decoder) {
this.kotlinSerializationJsonDecoder = decoder;
initObjectReaders();
}
@Override
public void kotlinSerializationJsonEncoder(Encoder<?> encoder) {
this.kotlinSerializationJsonEncoder = encoder;
initObjectWriters();
}
@Override
public void kotlinSerializationProtobufDecoder(Decoder<?> decoder) {
this.kotlinSerializationProtobufDecoder = decoder;
initObjectReaders();
}
@Override
public void kotlinSerializationProtobufEncoder(Encoder<?> encoder) {
this.kotlinSerializationProtobufEncoder = encoder;
initObjectWriters();
}
@Override
public void configureDefaultCodec(Consumer<Object> codecConsumer) {
this.codecConsumer = (this.codecConsumer != null ?
this.codecConsumer.andThen(codecConsumer) : codecConsumer);
initReaders();
initWriters();
}
@Override
public void maxInMemorySize(int byteCount) {
if (!ObjectUtils.nullSafeEquals(this.maxInMemorySize, byteCount)) {
this.maxInMemorySize = byteCount;
initReaders();
}
}
@Override
public @Nullable Integer maxInMemorySize() {
return this.maxInMemorySize;
}
@Override
public void enableLoggingRequestDetails(boolean enable) {
if (!ObjectUtils.nullSafeEquals(this.enableLoggingRequestDetails, enable)) {
this.enableLoggingRequestDetails = enable;
initReaders();
initWriters();
}
}
@Override
public CodecConfigurer.MultipartCodecs multipartCodecs() {
if (this.multipartCodecs == null) {
this.multipartCodecs = new DefaultMultipartCodecs();
}
return this.multipartCodecs;
}
@Override
public void multipartReader(HttpMessageReader<?> multipartReader) {
this.multipartReader = multipartReader;
initTypedReaders();
}
/**
* Set a supplier for part writers to use when
* {@link #multipartCodecs()} are not explicitly configured.
* That's the same set of writers as for general except for the multipart
* writer itself.
*/
void setPartWritersSupplier(Supplier<List<HttpMessageWriter<?>>> supplier) {
this.partWritersSupplier = supplier;
initTypedWriters();
}
@Override
public @Nullable Boolean isEnableLoggingRequestDetails() {
return this.enableLoggingRequestDetails;
}
/**
* Delegate method used from {@link BaseCodecConfigurer#registerDefaults}.
*/
void registerDefaults(boolean registerDefaults) {
if (this.registerDefaults != registerDefaults) {
this.registerDefaults = registerDefaults;
initReaders();
initWriters();
}
}
/**
* Return readers that support specific types.
*/
final List<HttpMessageReader<?>> getTypedReaders() {
return this.typedReaders;
}
/**
* Reset and initialize typed readers.
* @since 5.3.3
*/
protected void initTypedReaders() {
this.typedReaders.clear();
if (!this.registerDefaults) {
return;
}
addCodec(this.typedReaders, new DecoderHttpMessageReader<>(new ByteArrayDecoder()));
addCodec(this.typedReaders, new DecoderHttpMessageReader<>(new ByteBufferDecoder()));
addCodec(this.typedReaders, new DecoderHttpMessageReader<>(new DataBufferDecoder()));
if (NETTY_BYTE_BUF_PRESENT) {
addCodec(this.typedReaders, new DecoderHttpMessageReader<>(new NettyByteBufDecoder()));
}
addCodec(this.typedReaders, new ResourceHttpMessageReader(new ResourceDecoder()));
addCodec(this.typedReaders, new DecoderHttpMessageReader<>(StringDecoder.textPlainOnly()));
if (PROTOBUF_PRESENT) {
addCodec(this.typedReaders, new DecoderHttpMessageReader<>(this.protobufDecoder != null ?
(ProtobufDecoder) this.protobufDecoder : new ProtobufDecoder()));
}
addCodec(this.typedReaders, new FormHttpMessageReader());
if (this.multipartReader != null) {
addCodec(this.typedReaders, this.multipartReader);
}
else {
DefaultPartHttpMessageReader partReader = new DefaultPartHttpMessageReader();
addCodec(this.typedReaders, partReader);
addCodec(this.typedReaders, new MultipartHttpMessageReader(partReader));
}
addCodec(this.typedReaders, new PartEventHttpMessageReader());
// client vs server..
extendTypedReaders(this.typedReaders);
}
/**
* Initialize a codec and add it to the List.
* @since 5.1.13
*/
protected <T> void addCodec(List<T> codecs, T codec) {
initCodec(codec);
codecs.add(codec);
}
/**
* Apply {@link #maxInMemorySize()} and {@link #enableLoggingRequestDetails},
* if configured by the application, to the given codec , including any
* codec it contains.
*/
@SuppressWarnings("removal")
private void initCodec(@Nullable Object codec) {
if (codec instanceof DecoderHttpMessageReader<?> decoderHttpMessageReader) {
codec = decoderHttpMessageReader.getDecoder();
}
else if (codec instanceof EncoderHttpMessageWriter<?> encoderHttpMessageWriter) {
codec = encoderHttpMessageWriter.getEncoder();
}
if (codec == null) {
return;
}
Integer size = this.maxInMemorySize;
if (size != null) {
if (codec instanceof AbstractDataBufferDecoder<?> abstractDataBufferDecoder) {
abstractDataBufferDecoder.setMaxInMemorySize(size);
}
// Pattern variables in the following if-blocks cannot be named the same as instance fields
// due to lacking support in Checkstyle: https://github.com/checkstyle/checkstyle/issues/10969
if (PROTOBUF_PRESENT) {
if (codec instanceof ProtobufDecoder protobufDec) {
protobufDec.setMaxMessageSize(size);
}
}
if (KOTLIN_SERIALIZATION_CBOR_PRESENT) {
if (codec instanceof KotlinSerializationCborDecoder kotlinSerializationCborDec) {
kotlinSerializationCborDec.setMaxInMemorySize(size);
}
}
if (KOTLIN_SERIALIZATION_JSON_PRESENT) {
if (codec instanceof KotlinSerializationJsonDecoder kotlinSerializationJsonDec) {
kotlinSerializationJsonDec.setMaxInMemorySize(size);
}
}
if (KOTLIN_SERIALIZATION_PROTOBUF_PRESENT) {
if (codec instanceof KotlinSerializationProtobufDecoder kotlinSerializationProtobufDec) {
kotlinSerializationProtobufDec.setMaxInMemorySize(size);
}
}
if (JACKSON_PRESENT) {
if (codec instanceof AbstractJacksonDecoder<?> abstractJacksonDecoder) {
abstractJacksonDecoder.setMaxInMemorySize(size);
}
}
if (JACKSON_2_PRESENT) {
if (codec instanceof AbstractJackson2Decoder abstractJackson2Decoder) {
abstractJackson2Decoder.setMaxInMemorySize(size);
}
}
if (JAXB_2_PRESENT) {
if (codec instanceof Jaxb2XmlDecoder jaxb2XmlDecoder) {
jaxb2XmlDecoder.setMaxInMemorySize(size);
}
}
if (codec instanceof FormHttpMessageReader formHttpMessageReader) {
formHttpMessageReader.setMaxInMemorySize(size);
}
if (codec instanceof ServerSentEventHttpMessageReader serverSentEventHttpMessageReader) {
serverSentEventHttpMessageReader.setMaxInMemorySize(size);
}
if (codec instanceof DefaultPartHttpMessageReader defaultPartHttpMessageReader) {
defaultPartHttpMessageReader.setMaxInMemorySize(size);
}
if (codec instanceof PartEventHttpMessageReader partEventHttpMessageReader) {
partEventHttpMessageReader.setMaxInMemorySize(size);
}
}
Boolean enable = this.enableLoggingRequestDetails;
if (enable != null) {
if (codec instanceof FormHttpMessageReader formHttpMessageReader) {
formHttpMessageReader.setEnableLoggingRequestDetails(enable);
}
if (codec instanceof MultipartHttpMessageReader multipartHttpMessageReader) {
multipartHttpMessageReader.setEnableLoggingRequestDetails(enable);
}
if (codec instanceof DefaultPartHttpMessageReader defaultPartHttpMessageReader) {
defaultPartHttpMessageReader.setEnableLoggingRequestDetails(enable);
}
if (codec instanceof PartEventHttpMessageReader partEventHttpMessageReader) {
partEventHttpMessageReader.setEnableLoggingRequestDetails(enable);
}
if (codec instanceof FormHttpMessageWriter formHttpMessageWriter) {
formHttpMessageWriter.setEnableLoggingRequestDetails(enable);
}
if (codec instanceof MultipartHttpMessageWriter multipartHttpMessageWriter) {
multipartHttpMessageWriter.setEnableLoggingRequestDetails(enable);
}
}
if (this.codecConsumer != null) {
this.codecConsumer.accept(codec);
}
// Recurse for nested codecs
if (codec instanceof MultipartHttpMessageReader multipartHttpMessageReader) {
initCodec(multipartHttpMessageReader.getPartReader());
}
else if (codec instanceof MultipartHttpMessageWriter multipartHttpMessageWriter) {
initCodec(multipartHttpMessageWriter.getFormWriter());
}
else if (codec instanceof ServerSentEventHttpMessageReader serverSentEventHttpMessageReader) {
initCodec(serverSentEventHttpMessageReader.getDecoder());
}
else if (codec instanceof ServerSentEventHttpMessageWriter serverSentEventHttpMessageWriter) {
initCodec(serverSentEventHttpMessageWriter.getEncoder());
}
}
/**
* Hook for client or server specific typed readers.
*/
protected void extendTypedReaders(List<HttpMessageReader<?>> typedReaders) {
}
/**
* Return Object readers (JSON, XML, SSE).
*/
final List<HttpMessageReader<?>> getObjectReaders() {
return this.objectReaders;
}
/**
* Reset and initialize object readers.
* @since 5.3.3
*/
protected void initObjectReaders() {
this.objectReaders.clear();
if (!this.registerDefaults) {
return;
}
if (KOTLIN_SERIALIZATION_JSON_PRESENT) {
addCodec(this.objectReaders, new DecoderHttpMessageReader<>(getKotlinSerializationJsonDecoder()));
}
if (JACKSON_PRESENT || JACKSON_2_PRESENT) {
addCodec(this.objectReaders, new DecoderHttpMessageReader<>(getJacksonJsonDecoder()));
}
else if (GSON_PRESENT) {
addCodec(this.objectReaders, new DecoderHttpMessageReader<>(getGsonDecoder()));
}
if (JACKSON_SMILE_PRESENT || JACKSON_2_SMILE_PRESENT) {
addCodec(this.objectReaders, new DecoderHttpMessageReader<>(getJacksonSmileDecoder()));
}
if (KOTLIN_SERIALIZATION_CBOR_PRESENT) {
addCodec(this.objectReaders, new DecoderHttpMessageReader<>(getKotlinSerializationCborDecoder()));
}
if (JACKSON_CBOR_PRESENT || JACKSON_2_CBOR_PRESENT) {
addCodec(this.objectReaders, new DecoderHttpMessageReader<>(getJacksonCborDecoder()));
}
if (JAXB_2_PRESENT) {
addCodec(this.objectReaders, new DecoderHttpMessageReader<>(this.jaxb2Decoder != null ?
(Jaxb2XmlDecoder) this.jaxb2Decoder : new Jaxb2XmlDecoder()));
}
if (KOTLIN_SERIALIZATION_PROTOBUF_PRESENT) {
addCodec(this.objectReaders,
new DecoderHttpMessageReader<>(getKotlinSerializationProtobufDecoder()));
}
// client vs server..
extendObjectReaders(this.objectReaders);
}
/**
* Hook for client or server specific Object readers.
*/
protected void extendObjectReaders(List<HttpMessageReader<?>> objectReaders) {
}
/**
* Return readers that need to be at the end, after all others.
*/
final List<HttpMessageReader<?>> getCatchAllReaders() {
if (!this.registerDefaults) {
return Collections.emptyList();
}
List<HttpMessageReader<?>> readers = new ArrayList<>();
addCodec(readers, new DecoderHttpMessageReader<>(StringDecoder.allMimeTypes()));
return readers;
}
/**
* Return all writers that support specific types.
*/
final List<HttpMessageWriter<?>> getTypedWriters() {
return this.typedWriters;
}
/**
* Reset and initialize typed writers.
* @since 5.3.3
*/
protected void initTypedWriters() {
this.typedWriters.clear();
if (!this.registerDefaults) {
return;
}
this.typedWriters.addAll(getBaseTypedWriters());
extendTypedWriters(this.typedWriters);
}
/**
* Return "base" typed writers only, i.e. common to client and server.
*/
final List<HttpMessageWriter<?>> getBaseTypedWriters() {
if (!this.registerDefaults) {
return Collections.emptyList();
}
List<HttpMessageWriter<?>> writers = new ArrayList<>();
addCodec(writers, new EncoderHttpMessageWriter<>(new ByteArrayEncoder()));
addCodec(writers, new EncoderHttpMessageWriter<>(new ByteBufferEncoder()));
addCodec(writers, new EncoderHttpMessageWriter<>(new DataBufferEncoder()));
if (NETTY_BYTE_BUF_PRESENT) {
addCodec(writers, new EncoderHttpMessageWriter<>(new NettyByteBufEncoder()));
}
addCodec(writers, new ResourceHttpMessageWriter());
addCodec(writers, new EncoderHttpMessageWriter<>(CharSequenceEncoder.textPlainOnly()));
if (PROTOBUF_PRESENT) {
addCodec(writers, new ProtobufHttpMessageWriter(this.protobufEncoder != null ?
(ProtobufEncoder) this.protobufEncoder : new ProtobufEncoder()));
}
addCodec(writers, new MultipartHttpMessageWriter(this::getPartWriters, new FormHttpMessageWriter()));
addCodec(writers, new PartEventHttpMessageWriter());
addCodec(writers, new PartHttpMessageWriter());
return writers;
}
private List<HttpMessageWriter<?>> getPartWriters() {
if (this.multipartCodecs != null) {
return this.multipartCodecs.getWriters();
}
else if (this.partWritersSupplier != null) {
return this.partWritersSupplier.get();
}
else {
return Collections.emptyList();
}
}
/**
* Hook for client or server specific typed writers.
*/
protected void extendTypedWriters(List<HttpMessageWriter<?>> typedWriters) {
}
/**
* Return Object writers (JSON, XML, SSE).
*/
final List<HttpMessageWriter<?>> getObjectWriters() {
return this.objectWriters;
}
/**
* Reset and initialize object writers.
* @since 5.3.3
*/
protected void initObjectWriters() {
this.objectWriters.clear();
if (!this.registerDefaults) {
return;
}
this.objectWriters.addAll(getBaseObjectWriters());
extendObjectWriters(this.objectWriters);
}
/**
* Return "base" object writers only, i.e. common to client and server.
*/
final List<HttpMessageWriter<?>> getBaseObjectWriters() {
List<HttpMessageWriter<?>> writers = new ArrayList<>();
if (KOTLIN_SERIALIZATION_JSON_PRESENT) {
addCodec(writers, new EncoderHttpMessageWriter<>(getKotlinSerializationJsonEncoder()));
}
if (JACKSON_PRESENT || JACKSON_2_PRESENT) {
addCodec(writers, new EncoderHttpMessageWriter<>(getJacksonJsonEncoder()));
}
else if (GSON_PRESENT) {
addCodec(writers, new EncoderHttpMessageWriter<>(getGsonEncoder()));
}
if (JACKSON_SMILE_PRESENT || JACKSON_2_SMILE_PRESENT) {
addCodec(writers, new EncoderHttpMessageWriter<>(getJacksonSmileEncoder()));
}
if (KOTLIN_SERIALIZATION_CBOR_PRESENT) {
addCodec(writers, new EncoderHttpMessageWriter<>(getKotlinSerializationCborEncoder()));
}
if (JACKSON_CBOR_PRESENT || JACKSON_2_CBOR_PRESENT) {
addCodec(writers, new EncoderHttpMessageWriter<>(getJacksonCborEncoder()));
}
if (JAXB_2_PRESENT) {
addCodec(writers, new EncoderHttpMessageWriter<>(this.jaxb2Encoder != null ?
(Jaxb2XmlEncoder) this.jaxb2Encoder : new Jaxb2XmlEncoder()));
}
if (KOTLIN_SERIALIZATION_PROTOBUF_PRESENT) {
addCodec(writers, new EncoderHttpMessageWriter<>(getKotlinSerializationProtobufEncoder()));
}
return writers;
}
/**
* Hook for client or server specific Object writers.
*/
protected void extendObjectWriters(List<HttpMessageWriter<?>> objectWriters) {
}
/**
* Return writers that need to be at the end, after all others.
*/
List<HttpMessageWriter<?>> getCatchAllWriters() {
if (!this.registerDefaults) {
return Collections.emptyList();
}
List<HttpMessageWriter<?>> result = new ArrayList<>();
result.add(new EncoderHttpMessageWriter<>(CharSequenceEncoder.allMimeTypes()));
return result;
}
void applyDefaultConfig(BaseCodecConfigurer.DefaultCustomCodecs customCodecs) {
applyDefaultConfig(customCodecs.getTypedReaders());
applyDefaultConfig(customCodecs.getObjectReaders());
applyDefaultConfig(customCodecs.getTypedWriters());
applyDefaultConfig(customCodecs.getObjectWriters());
customCodecs.getDefaultConfigConsumers().forEach(consumer -> consumer.accept(this));
}
private void applyDefaultConfig(Map<?, Boolean> readers) {
readers.entrySet().stream()
.filter(Map.Entry::getValue)
.map(Map.Entry::getKey)
.forEach(this::initCodec);
}
// Accessors for use in subclasses...
@SuppressWarnings("removal")
protected Decoder<?> getJacksonJsonDecoder() {
if (this.jacksonJsonDecoder == null) {
if (JACKSON_PRESENT) {
this.jacksonJsonDecoder = new JacksonJsonDecoder();
}
else if (JACKSON_2_PRESENT) {
this.jacksonJsonDecoder = new Jackson2JsonDecoder();
}
else {
throw new IllegalStateException("Jackson not present");
}
}
return this.jacksonJsonDecoder;
}
/**
* Get or initialize a Jackson JSON decoder.
* @deprecated in favor of {@link #getJacksonJsonDecoder()}
*/
@Deprecated(since = "7.0", forRemoval = true)
protected Decoder<?> getJackson2JsonDecoder() {
return getJacksonJsonDecoder();
}
@SuppressWarnings("removal")
protected Encoder<?> getJacksonJsonEncoder() {
if (this.jacksonJsonEncoder == null) {
if (JACKSON_PRESENT) {
this.jacksonJsonEncoder = new JacksonJsonEncoder();
}
else if (JACKSON_2_PRESENT) {
this.jacksonJsonEncoder = new Jackson2JsonEncoder();
}
else {
throw new IllegalStateException("Jackson not present");
}
}
return this.jacksonJsonEncoder;
}
/**
* Get or initialize a Jackson JSON encoder.
* @deprecated in favor of {@link #getJacksonJsonEncoder()}
*/
@Deprecated(since = "7.0", forRemoval = true)
protected Encoder<?> getJackson2JsonEncoder() {
return getJacksonJsonEncoder();
}
protected Decoder<?> getGsonDecoder() {
if (this.gsonDecoder == null) {
this.gsonDecoder = new GsonDecoder();
}
return this.gsonDecoder;
}
protected Encoder<?> getGsonEncoder() {
if (this.gsonEncoder == null) {
this.gsonEncoder = new GsonEncoder();
}
return this.gsonEncoder;
}
@SuppressWarnings("removal")
protected Decoder<?> getJacksonSmileDecoder() {
if (this.jacksonSmileDecoder == null) {
if (JACKSON_SMILE_PRESENT) {
this.jacksonSmileDecoder = new JacksonSmileDecoder();
}
else if (JACKSON_2_SMILE_PRESENT) {
this.jacksonSmileDecoder = new Jackson2SmileDecoder();
}
else {
throw new IllegalStateException("Jackson Smile support not present");
}
}
return this.jacksonSmileDecoder;
}
@SuppressWarnings("removal")
protected Encoder<?> getJacksonSmileEncoder() {
if (this.jacksonSmileEncoder == null) {
if (JACKSON_SMILE_PRESENT) {
this.jacksonSmileEncoder = new JacksonSmileEncoder();
}
else if (JACKSON_2_SMILE_PRESENT) {
this.jacksonSmileEncoder = new Jackson2SmileEncoder();
}
else {
throw new IllegalStateException("Jackson Smile support not present");
}
}
return this.jacksonSmileEncoder;
}
@SuppressWarnings("removal")
protected Decoder<?> getJacksonCborDecoder() {
if (this.jacksonCborDecoder == null) {
if (JACKSON_CBOR_PRESENT) {
this.jacksonCborDecoder = new JacksonCborDecoder();
}
else if (JACKSON_2_CBOR_PRESENT) {
this.jacksonCborDecoder = new Jackson2CborDecoder();
}
else {
throw new IllegalStateException("Jackson CBOR support not present");
}
}
return this.jacksonCborDecoder;
}
@SuppressWarnings("removal")
protected Encoder<?> getJacksonCborEncoder() {
if (this.jacksonCborEncoder == null) {
if (JACKSON_CBOR_PRESENT) {
this.jacksonCborEncoder = new JacksonCborEncoder();
}
else if (JACKSON_2_CBOR_PRESENT) {
this.jacksonCborEncoder = new Jackson2CborEncoder();
}
else {
throw new IllegalStateException("Jackson CBOR support not present");
}
}
return this.jacksonCborEncoder;
}
protected Decoder<?> getKotlinSerializationJsonDecoder() {
if (this.kotlinSerializationJsonDecoder == null) {
this.kotlinSerializationJsonDecoder = (this.jacksonJsonDecoder != null || JACKSON_PRESENT || JACKSON_2_PRESENT || GSON_PRESENT ?
new KotlinSerializationJsonDecoder() : new KotlinSerializationJsonDecoder(type -> true));
}
return this.kotlinSerializationJsonDecoder;
}
protected Encoder<?> getKotlinSerializationJsonEncoder() {
if (this.kotlinSerializationJsonEncoder == null) {
this.kotlinSerializationJsonEncoder = (this.jacksonJsonDecoder != null || JACKSON_PRESENT || JACKSON_2_PRESENT || GSON_PRESENT ?
new KotlinSerializationJsonEncoder() : new KotlinSerializationJsonEncoder(type -> true));
}
return this.kotlinSerializationJsonEncoder;
}
protected Decoder<?> getKotlinSerializationCborDecoder() {
if (this.kotlinSerializationCborDecoder == null) {
this.kotlinSerializationCborDecoder = (this.jacksonCborDecoder != null || JACKSON_CBOR_PRESENT ?
new KotlinSerializationCborDecoder() : new KotlinSerializationCborDecoder(type -> true));
}
return this.kotlinSerializationCborDecoder;
}
protected Encoder<?> getKotlinSerializationCborEncoder() {
if (this.kotlinSerializationCborEncoder == null) {
this.kotlinSerializationCborEncoder = (this.jacksonCborDecoder != null || JACKSON_CBOR_PRESENT ?
new KotlinSerializationCborEncoder() : new KotlinSerializationCborEncoder(type -> true));
}
return this.kotlinSerializationCborEncoder;
}
protected Decoder<?> getKotlinSerializationProtobufDecoder() {
if (this.kotlinSerializationProtobufDecoder == null) {
this.kotlinSerializationProtobufDecoder = new KotlinSerializationProtobufDecoder(type -> true);
}
return this.kotlinSerializationProtobufDecoder;
}
protected Encoder<?> getKotlinSerializationProtobufEncoder() {
if (this.kotlinSerializationProtobufEncoder == null) {
this.kotlinSerializationProtobufEncoder = new KotlinSerializationProtobufEncoder(type -> true);
}
return this.kotlinSerializationProtobufEncoder;
}
/**
* Default implementation of {@link CodecConfigurer.MultipartCodecs}.
*/
protected | BaseDefaultCodecs |
java | elastic__elasticsearch | x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/otlp/docbuilder/MetricDocumentBuilder.java | {
"start": 1233,
"end": 1408
} | class ____ an Elasticsearch document representation of a metric data point group.
* It also handles dynamic templates for metrics based on their attributes.
*/
public | constructs |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/util/ExceptionCollector.java | {
"start": 1259,
"end": 3966
} | class ____ {
private final List<Throwable> exceptions = new ArrayList<>();
/**
* Execute the supplied {@link Executable} and track any exception thrown.
* @param executable the {@code Executable} to execute
* @see #getExceptions()
* @see #assertEmpty()
*/
public void execute(Executable executable) {
try {
executable.execute();
}
catch (Throwable ex) {
this.exceptions.add(ex);
}
}
/**
* Get the list of exceptions encountered in {@link #execute(Executable)}.
* @return an unmodifiable copy of the list of exceptions, potentially empty
* @see #assertEmpty()
*/
public List<Throwable> getExceptions() {
return Collections.unmodifiableList(this.exceptions);
}
/**
* Assert that this {@code ExceptionCollector} does not contain any
* {@linkplain #getExceptions() exceptions}.
* <p>If this collector is empty, this method is effectively a no-op.
* <p>If this collector contains a single {@link Error} or {@link Exception},
* this method rethrows the error or exception.
* <p>If this collector contains a single {@link Throwable}, this method throws
* an {@link AssertionError} with the error message of the {@code Throwable}
* and with the {@code Throwable} as the {@linkplain Throwable#getCause() cause}.
* <p>If this collector contains multiple exceptions, this method throws an
* {@code AssertionError} whose message is "Multiple Exceptions (#):"
* followed by a new line with the error message of each exception separated
* by a new line, with {@code #} replaced with the number of exceptions present.
* In addition, each exception will be added to the {@code AssertionError} as
* a {@link Throwable#addSuppressed(Throwable) suppressed exception}.
* @see #execute(Executable)
* @see #getExceptions()
*/
public void assertEmpty() throws Exception {
if (this.exceptions.isEmpty()) {
return;
}
if (this.exceptions.size() == 1) {
Throwable exception = this.exceptions.get(0);
if (exception instanceof Error error) {
throw error;
}
if (exception instanceof Exception ex) {
throw ex;
}
AssertionError assertionError = new AssertionError(exception.getMessage());
assertionError.initCause(exception);
throw assertionError;
}
StringBuilder message = new StringBuilder();
message.append("Multiple Exceptions (").append(this.exceptions.size()).append("):");
for (Throwable exception : this.exceptions) {
message.append('\n');
message.append(exception.getMessage());
}
AssertionError assertionError = new AssertionError(message);
this.exceptions.forEach(assertionError::addSuppressed);
throw assertionError;
}
/**
* {@code Executable} is a functional | ExceptionCollector |
java | quarkusio__quarkus | extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/OpenApiBuildTimeExcludedClassTestCase.java | {
"start": 660,
"end": 2619
} | class ____ {
static String quarkusProfile;
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(IfBuildProfileTest.class,
IfBuildProfileBar.class,
IfBuildPropertyBarBazIsTrue.class,
IfBuildProperyFooBarIsTrue.class,
UnlessBuildProfileBar.class,
UnlessBuildProfileTest.class,
UnlessBuildPropertyBarBazIsFalse.class,
UnlessBuildProperyFooBarIsFalse.class)
.addAsResource(
new StringAsset("%test.foobar=true\n"
+ "%test.barbaz=false\n"
+ "foobar=false\n"
+ "barbaz=true\n"),
"application.properties"));
@Test
void testAutoSecurityRequirement() {
RestAssured.given()
.header("Accept", "application/json")
.when()
.get("/q/openapi")
.then()
.log().body()
.body("paths", aMapWithSize(4))
.body("paths", hasKey("/test-profile-enabled"))
.body("paths", not(hasKey("/test-profile-not-enabled")))
.body("paths", hasKey("/bar-profile-not-enabled"))
.body("paths", not(hasKey("/bar-profile-enabled")))
.body("paths", hasKey("/foobar-property-true"))
.body("paths", hasKey("/foobar-property-not-false"))
.body("paths", not(hasKey("/barbaz-property-true")))
.body("paths", not(hasKey("/barbaz-property-not-false")));
}
@Path("/test-profile-enabled")
@IfBuildProfile("test")
public static | OpenApiBuildTimeExcludedClassTestCase |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/SqlDialect.java | {
"start": 1259,
"end": 1550
} | enum ____ {
/** Flink's default SQL behavior. */
DEFAULT,
/**
* SQL dialect that allows some Apache Hive specific grammar.
*
* <p>Note: We might never support all of the Hive grammar. See the documentation for supported
* features.
*/
HIVE
}
| SqlDialect |
java | apache__camel | components/camel-fhir/camel-fhir-component/src/generated/java/org/apache/camel/component/fhir/FhirValidateEndpointConfiguration.java | {
"start": 1095,
"end": 2749
} | class ____ extends FhirConfiguration {
@UriParam
@ApiParam(optional = true, apiMethods = {@ApiMethod(methodName = "resource", description="See ExtraParameters for a full list of parameters that can be passed, may be NULL"), @ApiMethod(methodName = "resource", description="See ExtraParameters for a full list of parameters that can be passed, may be NULL")})
private java.util.Map<org.apache.camel.component.fhir.api.ExtraParameters, Object> extraParameters;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "resource", description="The IBaseResource to validate")})
private org.hl7.fhir.instance.model.api.IBaseResource resource;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "resource", description="Raw resource to validate")})
private String resourceAsString;
public java.util.Map<org.apache.camel.component.fhir.api.ExtraParameters, Object> getExtraParameters() {
return extraParameters;
}
public void setExtraParameters(java.util.Map<org.apache.camel.component.fhir.api.ExtraParameters, Object> extraParameters) {
this.extraParameters = extraParameters;
}
public org.hl7.fhir.instance.model.api.IBaseResource getResource() {
return resource;
}
public void setResource(org.hl7.fhir.instance.model.api.IBaseResource resource) {
this.resource = resource;
}
public String getResourceAsString() {
return resourceAsString;
}
public void setResourceAsString(String resourceAsString) {
this.resourceAsString = resourceAsString;
}
}
| FhirValidateEndpointConfiguration |
java | apache__avro | lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java | {
"start": 9252,
"end": 9477
} | class ____ not be null");
}
mValueClass = valueClass;
return this;
}
/**
* Sets the writer schema of the value records when using Avro data.
*
* <p>
* The value | may |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/ByteArrayResource.java | {
"start": 1559,
"end": 4018
} | class ____ extends AbstractResource {
private final byte[] byteArray;
private final String description;
/**
* Create a new {@code ByteArrayResource}.
* @param byteArray the byte array to wrap
*/
public ByteArrayResource(byte[] byteArray) {
this(byteArray, "resource loaded from byte array");
}
/**
* Create a new {@code ByteArrayResource} with a description.
* @param byteArray the byte array to wrap
* @param description where the byte array comes from
*/
public ByteArrayResource(byte[] byteArray, @Nullable String description) {
Assert.notNull(byteArray, "Byte array must not be null");
this.byteArray = byteArray;
this.description = (description != null ? description : "");
}
/**
* Return the underlying byte array.
*/
public final byte[] getByteArray() {
return this.byteArray;
}
/**
* This implementation always returns {@code true}.
*/
@Override
public boolean exists() {
return true;
}
/**
* This implementation returns the length of the underlying byte array.
*/
@Override
public long contentLength() {
return this.byteArray.length;
}
/**
* This implementation returns a ByteArrayInputStream for the
* underlying byte array.
* @see java.io.ByteArrayInputStream
*/
@Override
public InputStream getInputStream() throws IOException {
return new ByteArrayInputStream(this.byteArray);
}
@Override
public byte[] getContentAsByteArray() throws IOException {
int length = this.byteArray.length;
byte[] result = new byte[length];
System.arraycopy(this.byteArray, 0, result, 0, length);
return result;
}
@Override
public String getContentAsString(Charset charset) throws IOException {
return new String(this.byteArray, charset);
}
/**
* This implementation returns a description that includes the passed-in
* {@code description}, if any.
*/
@Override
public String getDescription() {
return "Byte array resource [" + this.description + "]";
}
/**
* This implementation compares the underlying byte array.
* @see java.util.Arrays#equals(byte[], byte[])
*/
@Override
public boolean equals(@Nullable Object other) {
return (this == other || (other instanceof ByteArrayResource that &&
Arrays.equals(this.byteArray, that.byteArray)));
}
/**
* This implementation returns the hash code based on the
* underlying byte array.
*/
@Override
public int hashCode() {
return Arrays.hashCode(this.byteArray);
}
}
| ByteArrayResource |
java | apache__camel | components/camel-bean-validator/src/test/java/org/apache/camel/component/bean/validator/BeanValidatorRouteTest.java | {
"start": 1948,
"end": 13367
} | class ____ extends CamelTestSupport {
private Locale origLocale;
@BeforeEach
public void setLanguage() {
origLocale = Locale.getDefault();
Locale.setDefault(Locale.US);
}
@AfterEach
public void restoreLanguage() {
Locale.setDefault(origLocale);
}
@ParameterizedTest
@MethodSource("provideValidCars")
void validateShouldSuccessWithImpliciteDefaultGroup(Object cars) {
Exchange exchange = template.request("bean-validator://x", new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody(cars);
}
});
assertNotNull(exchange);
}
@ParameterizedTest
@MethodSource("provideValidCars")
void validateShouldSuccessWithExpliciteDefaultGroup(Object cars) {
Exchange exchange = template.request("bean-validator://x?group=jakarta.validation.groups.Default", new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody(cars);
}
});
assertNotNull(exchange);
}
@ParameterizedTest
@MethodSource("provideInvalidCarsWithoutLicensePlate")
void validateShouldFailWithImpliciteDefaultGroup(Object cars, int numberOfViolations) {
final String url = "bean-validator://x";
try {
template.requestBody(url, cars);
fail("should throw exception");
} catch (CamelExecutionException e) {
assertIsInstanceOf(BeanValidationException.class, e.getCause());
BeanValidationException exception = (BeanValidationException) e.getCause();
Set<ConstraintViolation<Object>> constraintViolations = exception.getConstraintViolations();
assertEquals(numberOfViolations, constraintViolations.size());
constraintViolations.forEach(cv -> {
assertEquals("licensePlate", cv.getPropertyPath().toString());
assertNull(cv.getInvalidValue());
assertEquals("must not be null", cv.getMessage());
});
}
setLicensePlates(cars, "D-A");
Exchange exchange = template.request(url, new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody(cars);
}
});
assertNotNull(exchange);
}
@ParameterizedTest
@MethodSource("provideInvalidCarsWithoutLicensePlate")
void validateShouldFailWithExpliciteDefaultGroup(Object cars, int numberOfViolations) {
final String url = "bean-validator://x?group=jakarta.validation.groups.Default";
try {
template.requestBody(url, cars);
fail("should throw exception");
} catch (CamelExecutionException e) {
assertIsInstanceOf(BeanValidationException.class, e.getCause());
BeanValidationException exception = (BeanValidationException) e.getCause();
Set<ConstraintViolation<Object>> constraintViolations = exception.getConstraintViolations();
assertEquals(numberOfViolations, constraintViolations.size());
constraintViolations.forEach(cv -> {
assertEquals("licensePlate", cv.getPropertyPath().toString());
assertNull(cv.getInvalidValue());
assertEquals("must not be null", cv.getMessage());
});
}
setLicensePlates(cars, "D-A");
Exchange exchange = template.request(url, new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody(cars);
}
});
assertNotNull(exchange);
}
@ParameterizedTest
@MethodSource("provideInvalidCarsWithShortLicensePlate")
void validateShouldFailWithOptionalChecksGroup(Object cars, int numberOfViolations) {
final String url = "bean-validator://x?group=org.apache.camel.component.bean.validator.OptionalChecks";
try {
template.requestBody(url, cars);
fail("should throw exception");
} catch (CamelExecutionException e) {
assertIsInstanceOf(BeanValidationException.class, e.getCause());
BeanValidationException exception = (BeanValidationException) e.getCause();
Set<ConstraintViolation<Object>> constraintViolations = exception.getConstraintViolations();
assertEquals(numberOfViolations, constraintViolations.size());
constraintViolations.forEach(cv -> {
assertEquals("licensePlate", cv.getPropertyPath().toString());
assertEquals("D-A", cv.getInvalidValue());
assertEquals("size must be between 5 and 14", cv.getMessage());
});
}
setLicensePlates(cars, "DD-AB-123");
Exchange exchange = template.request(url, new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody(cars);
}
});
assertNotNull(exchange);
}
@ParameterizedTest
@MethodSource("provideInvalidCarsWithoutManufacturer")
void validateShouldFailWithOrderedChecksGroup(Object cars, int numberOfViolations) {
final String url = "bean-validator://x?group=org.apache.camel.component.bean.validator.OrderedChecks";
try {
template.requestBody(url, cars);
fail("should throw exception");
} catch (CamelExecutionException e) {
assertIsInstanceOf(BeanValidationException.class, e.getCause());
BeanValidationException exception = (BeanValidationException) e.getCause();
Set<ConstraintViolation<Object>> constraintViolations = exception.getConstraintViolations();
assertEquals(numberOfViolations, constraintViolations.size());
constraintViolations.forEach(cv -> {
assertEquals("manufacturer", cv.getPropertyPath().toString());
assertNull(cv.getInvalidValue());
assertEquals("must not be null", cv.getMessage());
});
}
setManufacturer(cars, "BMW");
try {
template.requestBody(url, cars);
fail("should throw exception");
} catch (CamelExecutionException e) {
assertIsInstanceOf(BeanValidationException.class, e.getCause());
BeanValidationException exception = (BeanValidationException) e.getCause();
Set<ConstraintViolation<Object>> constraintViolations = exception.getConstraintViolations();
assertEquals(numberOfViolations, constraintViolations.size());
constraintViolations.forEach(cv -> {
assertEquals("licensePlate", cv.getPropertyPath().toString());
assertEquals("D-A", cv.getInvalidValue());
assertEquals("size must be between 5 and 14", cv.getMessage());
});
}
setLicensePlates(cars, "DD-AB-123");
Exchange exchange = template.request(url, new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody(cars);
}
});
assertNotNull(exchange);
}
@ParameterizedTest
@MethodSource("provideCarsWithRedefinedDefaultGroup")
void validateShouldSuccessWithRedefinedDefaultGroup(Object cars) {
final String url = "bean-validator://x";
Exchange exchange = template.request(url, new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody(cars);
}
});
assertNotNull(exchange);
}
@ParameterizedTest
@MethodSource("provideCarsWithRedefinedDefaultGroupAndShortLicencePlate")
void validateShouldFailWithRedefinedDefaultGroup(Object cars, int numberOfViolations) {
final String url = "bean-validator://x";
try {
template.requestBody(url, cars);
fail("should throw exception");
} catch (CamelExecutionException e) {
assertIsInstanceOf(BeanValidationException.class, e.getCause());
BeanValidationException exception = (BeanValidationException) e.getCause();
Set<ConstraintViolation<Object>> constraintViolations = exception.getConstraintViolations();
assertEquals(numberOfViolations, constraintViolations.size());
constraintViolations.forEach(cv -> {
assertEquals("licensePlate", cv.getPropertyPath().toString());
assertEquals("D-A", cv.getInvalidValue());
assertEquals("size must be between 5 and 14", cv.getMessage());
});
}
}
Car createCar(String manufacturer, String licencePlate) {
return new CarWithAnnotations(manufacturer, licencePlate);
}
private Stream<Arguments> provideValidCars() {
return Stream.of(
Arguments.of(createCar("BMW", "DD-AB-123")),
Arguments.of(Arrays.asList(
createCar("BMW", "DD-AB-123"),
createCar("VW", "XX-YZ-789"))));
}
private Stream<Arguments> provideInvalidCarsWithoutLicensePlate() {
return Stream.of(
Arguments.of(createCar("BMW", null), 1),
Arguments.of(Arrays.asList(
createCar("BMW", null),
createCar("VW", null)), 2));
}
private Stream<Arguments> provideInvalidCarsWithShortLicensePlate() {
return Stream.of(
Arguments.of(createCar("BMW", "D-A"), 1),
Arguments.of(Arrays.asList(
createCar("BMW", "D-A"),
createCar("VW", "D-A")), 2));
}
private Stream<Arguments> provideInvalidCarsWithoutManufacturer() {
return Stream.of(
Arguments.of(createCar(null, "D-A"), 1),
Arguments.of(Arrays.asList(
createCar(null, "D-A"),
createCar(null, "D-A")), 2));
}
private Stream<Arguments> provideCarsWithRedefinedDefaultGroup() {
return Stream.of(
Arguments.of(new CarWithRedefinedDefaultGroup(null, "DD-AB-123")),
Arguments.of(Arrays.asList(
new CarWithRedefinedDefaultGroup(null, "DD-AB-123")),
new CarWithRedefinedDefaultGroup(null, "XX-YZ-789")));
}
private Stream<Arguments> provideCarsWithRedefinedDefaultGroupAndShortLicencePlate() {
return Stream.of(
Arguments.of(new CarWithRedefinedDefaultGroup(null, "D-A"), 1),
Arguments.of(Arrays.asList(
new CarWithRedefinedDefaultGroup(null, "D-A"),
new CarWithRedefinedDefaultGroup(null, "D-A")), 2));
}
private void setLicensePlates(Object cars, String licensePlate) {
if (cars instanceof Car car) {
car.setLicensePlate(licensePlate);
} else {
((Iterable) cars).forEach(car -> ((Car) car).setLicensePlate(licensePlate));
}
}
private void setManufacturer(Object cars, String manufacturer) {
if (cars instanceof Car car) {
car.setManufacturer(manufacturer);
} else {
((Iterable) cars).forEach(car -> ((Car) car).setManufacturer(manufacturer));
}
}
}
| BeanValidatorRouteTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java | {
"start": 1947,
"end": 2121
} | class ____ for maintaining the various DataNode statistics
* and publishing them through the metrics interfaces.
* This also registers the JMX MBean for RPC.
* <p>
* This | is |
java | grpc__grpc-java | examples/src/main/java/io/grpc/examples/nameresolve/NameResolveServer.java | {
"start": 2758,
"end": 3269
} | class ____ extends GreeterGrpc.GreeterImplBase {
int port;
public GreeterImpl(int port) {
this.port = port;
}
@Override
public void sayHello(HelloRequest req, StreamObserver<HelloReply> responseObserver) {
HelloReply reply = HelloReply.newBuilder().setMessage("Hello " + req.getName() + " from server<" + this.port + ">").build();
responseObserver.onNext(reply);
responseObserver.onCompleted();
}
}
}
| GreeterImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/jpa/internal/util/PersistenceUtilHelper.java | {
"start": 2043,
"end": 5181
} | class ____ {
private PersistenceUtilHelper() {
}
/**
* Determine if the given object reference represents loaded state. The reference may be to an entity or a
* persistent collection.
* <p>
* Return is defined as follows:<ol>
* <li>
* If the reference is a {@link HibernateProxy}, we return {@link LoadState#LOADED} if
* {@link LazyInitializer#isUninitialized()} returns {@code false}; else we return
* {@link LoadState#NOT_LOADED}
* </li>
* <li>
* If the reference is an enhanced (by Hibernate) entity, we return {@link LoadState#LOADED} if
* {@link LazyAttributeLoadingInterceptor#hasAnyUninitializedAttributes()} returns {@code false};
* otherwise we return {@link LoadState#NOT_LOADED}
* </li>
* <li>
* If the reference is a {@link PersistentCollection}, we return {@link LoadState#LOADED} if
* {@link PersistentCollection#wasInitialized()} returns {@code true}; else
* we return {@link LoadState#NOT_LOADED}
* </li>
* <li>
* In all other cases we return {@link LoadState#UNKNOWN}
* </li>
* </ol>
*
*
* @param reference The object reference to check.
*
* @return The appropriate LoadState (see above)
*/
public static LoadState getLoadState(Object reference) {
final LazyInitializer lazyInitializer = extractLazyInitializer( reference );
if ( lazyInitializer != null ) {
return !lazyInitializer.isUninitialized() ? LOADED : NOT_LOADED;
}
else if ( isPersistentAttributeInterceptable( reference ) ) {
return isInitialized( asPersistentAttributeInterceptable( reference ) ) ? LOADED : NOT_LOADED;
}
else if ( reference instanceof LazyInitializable lazyInitializable) {
return lazyInitializable.wasInitialized() ? LOADED : NOT_LOADED;
}
else {
return UNKNOWN;
}
}
private static boolean isInitialized(PersistentAttributeInterceptable interceptable) {
final BytecodeLazyAttributeInterceptor interceptor = extractInterceptor( interceptable );
return interceptor == null || !interceptor.hasAnyUninitializedAttributes();
}
private static BytecodeLazyAttributeInterceptor extractInterceptor(PersistentAttributeInterceptable interceptable) {
return (BytecodeLazyAttributeInterceptor) interceptable.$$_hibernate_getInterceptor();
}
/**
* Is the given attribute (by name) loaded? This form must take care to not access the attribute (trigger
* initialization).
*
* @param entity The entity
* @param attributeName The name of the attribute to check
* @param cache The cache we maintain of attribute resolutions
*
* @return The LoadState
*/
public static LoadState isLoadedWithoutReference(Object entity, String attributeName, MetadataCache cache) {
final boolean sureFromUs;
final LazyInitializer lazyInitializer = extractLazyInitializer( entity );
if ( lazyInitializer != null ) {
if ( lazyInitializer.isUninitialized() ) {
// we have an uninitialized proxy, the attribute cannot be loaded
return NOT_LOADED;
}
else {
// swap the proxy with target (for proper | PersistenceUtilHelper |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/collection/multisession/MultipleSessionCollectionTest.java | {
"start": 12989,
"end": 13052
} | class ____ {
@Id
@GeneratedValue
private Long id;
}
}
| Child |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/RecipientListOnPrepareTest.java | {
"start": 1115,
"end": 2168
} | class ____ extends ContextTestSupport {
@Test
public void testRecipientListOnPrepare() throws Exception {
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:a").message(0).body(String.class).isEqualTo("1 Tony the Tiger");
getMockEndpoint("mock:b").expectedMessageCount(1);
getMockEndpoint("mock:b").message(0).body(String.class).isEqualTo("1 Tiger");
template.sendBodyAndHeader("direct:start", new Animal(1, "Tiger"), "foo", "direct:a,direct:b");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").recipientList(header("foo")).onPrepare(new AnimalDeepClonePrepare());
from("direct:a").process(new ProcessorA()).to("mock:a");
from("direct:b").process(new ProcessorB()).to("mock:b");
}
};
}
public static | RecipientListOnPrepareTest |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ExceptionHandlerExceptionResolverTests.java | {
"start": 23440,
"end": 23742
} | class ____ {
@ExceptionHandler({IllegalStateException.class, IllegalAccessException.class})
public String handleException(Exception ex) {
return "AnotherTestExceptionResolver: " + ClassUtils.getShortName(ex.getClass());
}
}
@RestControllerAdvice
@Order(3)
static | AnotherTestExceptionResolver |
java | micronaut-projects__micronaut-core | http-server-tck/src/main/java/io/micronaut/http/server/tck/tests/FilterProxyTest.java | {
"start": 1814,
"end": 2799
} | class ____ {
public static final String SPEC_NAME = "FilterProxyTest";
public static final String PROP_MICRONAUT_SERVER_CORS_ENABLED = "micronaut.server.cors.enabled";
@Test
void testFiltersAreRunCorrectly() throws IOException {
Map<String, Object> configuration = Map.of(
PROP_MICRONAUT_SERVER_CORS_ENABLED, StringUtils.TRUE
);
try (ServerUnderTest server = ServerUnderTestProviderUtils.getServerUnderTestProvider().getServer(SPEC_NAME, configuration)) {
HttpRequest<?> request = HttpRequest.GET("/filter-test/redirection");
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("OK")
.headers(Collections.singletonMap("X-Test-Filter", StringUtils.TRUE))
.build());
}
}
@Controller("/ok")
@Requires(property = "spec.name", value = SPEC_NAME)
static | FilterProxyTest |
java | apache__camel | tests/camel-itest/src/test/java/org/apache/camel/itest/tx/JmsToHttpTXWithRollbackTest.java | {
"start": 1609,
"end": 2812
} | class ____ {
@RegisterExtension
public static JmsServiceExtension jmsServiceExtension = JmsServiceExtension.createExtension();
// use uri to refer to our mock
@EndpointInject("mock:JmsToHttpWithRollbackRoute")
MockEndpoint mock;
// use the spring id to refer to the endpoint we should send data to
// notice using this id we can setup the actual endpoint in spring XML
// and we can even use spring ${ } property in the spring XML
@EndpointInject("ref:data")
private ProducerTemplate template;
// the ok response to expect
private String ok = "<?xml version=\"1.0\"?><reply><status>ok</status></reply>";
@Test
void testSendToTXJmsWithRollback() throws Exception {
// we assume 2 rollbacks
mock.expectedMessageCount(2);
// use requestBody to force a InOut message exchange pattern ( = request/reply)
// will send and wait for a response
Object out = template.requestBody("<?xml version=\"1.0\"?><request><status id=\"123\"/></request>");
// compare response
assertEquals(ok, out);
// assert the mock is correct
mock.assertIsSatisfied();
}
}
| JmsToHttpTXWithRollbackTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedTypeParameterTest.java | {
"start": 3155,
"end": 3358
} | class ____ {
private <T> boolean contains(java.util.Set<T> set, T elem) {
return set.contains(elem);
}
}
""")
.doTest();
}
}
| Test |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/packagescan/resource/PathMatchingResourcePatternResolver.java | {
"start": 5473,
"end": 5653
} | class ____ resources with
* the same name, via the "{@code classpath*:}" prefix. For example,
* "{@code classpath*:META-INF/beans.xml}" will find all "beans.xml"
* files in the | path |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/ClearCachesApplicationListener.java | {
"start": 1020,
"end": 1653
} | class ____ implements ApplicationListener<ContextRefreshedEvent> {
@Override
public void onApplicationEvent(ContextRefreshedEvent event) {
ReflectionUtils.clearCache();
clearClassLoaderCaches(Thread.currentThread().getContextClassLoader());
}
private void clearClassLoaderCaches(@Nullable ClassLoader classLoader) {
if (classLoader == null) {
return;
}
try {
Method clearCacheMethod = classLoader.getClass().getDeclaredMethod("clearCache");
clearCacheMethod.invoke(classLoader);
}
catch (Exception ex) {
// Ignore
}
clearClassLoaderCaches(classLoader.getParent());
}
}
| ClearCachesApplicationListener |
java | grpc__grpc-java | interop-testing/src/test/java/io/grpc/testing/integration/RetryTest.java | {
"start": 20492,
"end": 22121
} | class ____ extends ClientStreamTracer.Factory {
@Override
public ClientStreamTracer newClientStreamTracer(StreamInfo info, Metadata headers) {
return new TransparentRetryTriggeringTracer();
}
}
CallOptions callOptions = CallOptions.DEFAULT
.withWaitForReady()
.withStreamTracerFactory(new TransparentRetryTracerFactory());
while (true) {
ClientCall<String, Integer> call = channel.newCall(clientStreamingMethod, callOptions);
call.start(mockCallListener, new Metadata());
assertRpcStartedRecorded(); // original attempt
MetricsRecord record = clientStatsRecorder.pollRecord(5, SECONDS);
assertThat(record.getMetricAsLongOrFail(DeprecatedCensusConstants.RPC_CLIENT_FINISHED_COUNT))
.isEqualTo(1);
TagValue statusTag = record.tags.get(RpcMeasureConstants.GRPC_CLIENT_STATUS);
if (statusTag.asString().equals(Code.UNAVAILABLE.toString())) {
break;
} else {
// Due to race condition, GOAWAY is not received/processed before the stream is closed due
// to connection error. Rerun the test.
assertThat(statusTag.asString()).isEqualTo(Code.UNKNOWN.toString());
assertRetryStatsRecorded(0, 0, 0);
originalAttemptFailed.set(false);
}
}
assertRpcStartedRecorded(); // retry attempt
ServerCall<String, Integer> serverCall = serverCalls.poll(5, SECONDS);
serverCall.close(Status.INVALID_ARGUMENT, new Metadata());
assertRpcStatusRecorded(Code.INVALID_ARGUMENT, 0, 0);
assertRetryStatsRecorded(0, 1, 0);
}
private static | TransparentRetryTracerFactory |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/type/GenericsBoundedTest.java | {
"start": 926,
"end": 1083
} | class ____ extends Range<Double> {
public DoubleRange() { }
public DoubleRange(Double s, Double e) { super(s, e); }
}
static | DoubleRange |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/REPLACE_Syntax_Test.java | {
"start": 982,
"end": 2438
} | class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "REPLACE INTO T SELECT * FROM T;";
SQLStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> stmtList = parser.parseStatementList();
String text = SQLUtils.toSQLString(stmtList, JdbcConstants.MYSQL);
assertEquals("REPLACE INTO T\n\tSELECT *\n\tFROM T;", text);
}
public void test_1() throws Exception {
String sql = "REPLACE DELAYED INTO `online_users` SET `session_id`='3580cc4e61117c0785372c426eddd11c', `user_id` = 'XXX', `page` = '/', `lastview` = NOW();";
SQLStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> stmtList = parser.parseStatementList();
String text = SQLUtils.toSQLString(stmtList, JdbcConstants.MYSQL);
assertEquals("REPLACE DELAYED INTO `online_users` (`session_id`, `user_id`, `page`, `lastview`)\nVALUES ('3580cc4e61117c0785372c426eddd11c', 'XXX', '/', NOW());",
text);
}
public void test_2() throws Exception {
String sql = " replace into t(col1,col2)values(?,?)";
SQLStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> stmtList = parser.parseStatementList();
String text = SQLUtils.toSQLString(stmtList, JdbcConstants.MYSQL);
assertEquals("REPLACE INTO t (col1, col2)\nVALUES (?, ?)", text);
}
}
| REPLACE_Syntax_Test |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/internal/entities/mapper/DynamicComponentMapperSupport.java | {
"start": 511,
"end": 776
} | interface ____ {
/**
* Mark the property mapper that it wraps a dynamic-component.
*/
void markAsDynamicComponentMap();
/**
* Returns whether the property mapper wraps a dynamic-component.
*/
boolean isDynamicComponentMap();
}
| DynamicComponentMapperSupport |
java | apache__camel | components/camel-elytron/src/main/java/org/apache/camel/component/elytron/ElytronSercurityConfiguration.java | {
"start": 985,
"end": 1095
} | interface ____ to be provided as `securityConfiguration` parameter into camel-undertow.
*/
@Deprecated
public | has |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/UriFsPathParam.java | {
"start": 899,
"end": 1499
} | class ____ extends StringParam {
/** Parameter name. */
public static final String NAME = "path";
private static final Domain DOMAIN = new Domain(NAME, null);
/**
* Constructor.
* @param str a string representation of the parameter value.
*/
public UriFsPathParam(String str) {
super(DOMAIN, str);
}
@Override
public String getName() {
return NAME;
}
/** @return the absolute path. */
public final String getAbsolutePath() {
final String path = getValue(); //The first / has been stripped out.
return path == null? null: "/" + path;
}
}
| UriFsPathParam |
java | apache__camel | components/camel-zookeeper/src/test/java/org/apache/camel/component/zookeeper/operations/integration/ExistenceChangedOperationIT.java | {
"start": 1341,
"end": 2438
} | class ____ extends ZooKeeperITSupport {
@Test
public void getStatsWhenNodeIsCreated() throws Exception {
String path = "/doesNotYetExist";
ExistenceChangedOperation future = setupMonitor(path);
client.create(path, "This is a test");
assertEquals(path, future.get().getResult());
assertNotNull(future.get().getStatistics());
}
@Test
public void getsNotifiedWhenNodeIsDeleted() throws Exception {
String path = "/soonToBeDeleted";
client.create(path, "This is a test");
ExistenceChangedOperation future = setupMonitor(path);
client.delete(path);
assertEquals(path, future.get().getResult());
assertNull(future.get().getStatistics());
}
private ExistenceChangedOperation setupMonitor(String path) throws KeeperException, InterruptedException {
ZooKeeper connection = getConnection();
ExistenceChangedOperation future = new ExistenceChangedOperation(connection, path);
connection.exists(path, future);
return future;
}
}
| ExistenceChangedOperationIT |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/floatarray/FloatArrayAssert_startsWith_Test.java | {
"start": 1123,
"end": 1952
} | class ____ extends FloatArrayAssertBaseTest {
@Override
protected FloatArrayAssert invoke_api_method() {
return assertions.startsWith(6f, 8f);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertStartsWith(getInfo(assertions), getActual(assertions), arrayOf(6f, 8f));
}
@Test
void should_pass_with_precision_specified_as_last_argument() {
// GIVEN
float[] actual = arrayOf(1.0f, 2.0f, 3.0f);
// THEN
assertThat(actual).startsWith(arrayOf(1.01f, 2.0f), withPrecision(0.1f));
}
@Test
void should_pass_with_precision_specified_in_comparator() {
// GIVEN
float[] actual = arrayOf(1.0f, 2.0f, 3.0f);
// THEN
assertThat(actual).usingComparatorWithPrecision(0.1f)
.startsWith(1.01f, 2.0f);
}
}
| FloatArrayAssert_startsWith_Test |
java | google__guava | android/guava-tests/test/com/google/common/graph/EndpointPairTest.java | {
"start": 1182,
"end": 9894
} | class ____ {
private static final Integer N0 = 0;
private static final Integer N1 = 1;
private static final Integer N2 = 2;
private static final Integer N3 = 3;
private static final Integer N4 = 4;
private static final String E12 = "1-2";
private static final String E12_A = "1-2a";
private static final String E21 = "2-1";
private static final String E13 = "1-3";
private static final String E44 = "4-4";
// Test for EndpointPair class
@Test
public void testOrderedEndpointPair() {
EndpointPair<String> ordered = EndpointPair.ordered("source", "target");
assertThat(ordered.isOrdered()).isTrue();
assertThat(ordered).containsExactly("source", "target").inOrder();
assertThat(ordered.source()).isEqualTo("source");
assertThat(ordered.target()).isEqualTo("target");
assertThat(ordered.nodeU()).isEqualTo("source");
assertThat(ordered.nodeV()).isEqualTo("target");
assertThat(ordered.adjacentNode("source")).isEqualTo("target");
assertThat(ordered.adjacentNode("target")).isEqualTo("source");
assertThat(ordered.toString()).isEqualTo("<source -> target>");
}
@Test
public void testUnorderedEndpointPair() {
EndpointPair<String> unordered = EndpointPair.unordered("chicken", "egg");
assertThat(unordered.isOrdered()).isFalse();
assertThat(unordered).containsExactly("chicken", "egg");
assertThat(ImmutableSet.of(unordered.nodeU(), unordered.nodeV()))
.containsExactly("chicken", "egg");
assertThat(unordered.adjacentNode(unordered.nodeU())).isEqualTo(unordered.nodeV());
assertThat(unordered.adjacentNode(unordered.nodeV())).isEqualTo(unordered.nodeU());
assertThat(unordered.toString()).contains("chicken");
assertThat(unordered.toString()).contains("egg");
}
@Test
public void testSelfLoop() {
EndpointPair<String> unordered = EndpointPair.unordered("node", "node");
assertThat(unordered.isOrdered()).isFalse();
assertThat(unordered).containsExactly("node", "node");
assertThat(unordered.nodeU()).isEqualTo("node");
assertThat(unordered.nodeV()).isEqualTo("node");
assertThat(unordered.adjacentNode("node")).isEqualTo("node");
assertThat(unordered.toString()).isEqualTo("[node, node]");
}
@Test
public void testAdjacentNode_nodeNotIncident() {
ImmutableList<MutableNetwork<Integer, String>> testNetworks =
ImmutableList.of(
NetworkBuilder.directed().<Integer, String>build(),
NetworkBuilder.undirected().<Integer, String>build());
for (MutableNetwork<Integer, String> network : testNetworks) {
network.addEdge(1, 2, "1-2");
EndpointPair<Integer> endpointPair = network.incidentNodes("1-2");
assertThrows(IllegalArgumentException.class, () -> endpointPair.adjacentNode(3));
}
}
@Test
public void testEquals() {
EndpointPair<String> ordered = EndpointPair.ordered("a", "b");
EndpointPair<String> orderedMirror = EndpointPair.ordered("b", "a");
EndpointPair<String> unordered = EndpointPair.unordered("a", "b");
EndpointPair<String> unorderedMirror = EndpointPair.unordered("b", "a");
new EqualsTester()
.addEqualityGroup(ordered)
.addEqualityGroup(orderedMirror)
.addEqualityGroup(unordered, unorderedMirror)
.testEquals();
}
// Tests for Graph.edges() and Network.asGraph().edges() methods
// TODO(user): Move these to a more appropriate location in the test suite.
@Test
public void endpointPair_directedGraph() {
MutableGraph<Integer> directedGraph = GraphBuilder.directed().allowsSelfLoops(true).build();
directedGraph.addNode(N0);
directedGraph.putEdge(N1, N2);
directedGraph.putEdge(N2, N1);
directedGraph.putEdge(N1, N3);
directedGraph.putEdge(N4, N4);
containsExactlySanityCheck(
directedGraph.edges(),
EndpointPair.ordered(N1, N2),
EndpointPair.ordered(N2, N1),
EndpointPair.ordered(N1, N3),
EndpointPair.ordered(N4, N4));
}
@Test
public void endpointPair_undirectedGraph() {
MutableGraph<Integer> undirectedGraph = GraphBuilder.undirected().allowsSelfLoops(true).build();
undirectedGraph.addNode(N0);
undirectedGraph.putEdge(N1, N2);
undirectedGraph.putEdge(N2, N1); // does nothing
undirectedGraph.putEdge(N1, N3);
undirectedGraph.putEdge(N4, N4);
containsExactlySanityCheck(
undirectedGraph.edges(),
EndpointPair.unordered(N1, N2),
EndpointPair.unordered(N1, N3),
EndpointPair.unordered(N4, N4));
}
@Test
public void endpointPair_directedNetwork() {
MutableNetwork<Integer, String> directedNetwork =
NetworkBuilder.directed().allowsSelfLoops(true).build();
directedNetwork.addNode(N0);
directedNetwork.addEdge(N1, N2, E12);
directedNetwork.addEdge(N2, N1, E21);
directedNetwork.addEdge(N1, N3, E13);
directedNetwork.addEdge(N4, N4, E44);
containsExactlySanityCheck(
directedNetwork.asGraph().edges(),
EndpointPair.ordered(N1, N2),
EndpointPair.ordered(N2, N1),
EndpointPair.ordered(N1, N3),
EndpointPair.ordered(N4, N4));
}
@Test
public void endpointPair_undirectedNetwork() {
MutableNetwork<Integer, String> undirectedNetwork =
NetworkBuilder.undirected().allowsParallelEdges(true).allowsSelfLoops(true).build();
undirectedNetwork.addNode(N0);
undirectedNetwork.addEdge(N1, N2, E12);
undirectedNetwork.addEdge(N2, N1, E12_A); // adds parallel edge, won't be in Graph edges
undirectedNetwork.addEdge(N1, N3, E13);
undirectedNetwork.addEdge(N4, N4, E44);
containsExactlySanityCheck(
undirectedNetwork.asGraph().edges(),
EndpointPair.unordered(N1, N2),
EndpointPair.unordered(N1, N3),
EndpointPair.unordered(N4, N4));
}
@Test
public void endpointPair_unmodifiableView() {
MutableGraph<Integer> directedGraph = GraphBuilder.directed().build();
Set<EndpointPair<Integer>> edges = directedGraph.edges();
directedGraph.putEdge(N1, N2);
containsExactlySanityCheck(edges, EndpointPair.ordered(N1, N2));
directedGraph.putEdge(N2, N1);
containsExactlySanityCheck(edges, EndpointPair.ordered(N1, N2), EndpointPair.ordered(N2, N1));
directedGraph.removeEdge(N1, N2);
directedGraph.removeEdge(N2, N1);
containsExactlySanityCheck(edges);
assertThrows(
UnsupportedOperationException.class, () -> edges.add(EndpointPair.ordered(N1, N2)));
}
@Test
public void endpointPair_undirected_contains() {
MutableGraph<Integer> undirectedGraph = GraphBuilder.undirected().allowsSelfLoops(true).build();
undirectedGraph.putEdge(N1, N1);
undirectedGraph.putEdge(N1, N2);
Set<EndpointPair<Integer>> edges = undirectedGraph.edges();
assertThat(edges).hasSize(2);
assertThat(edges).contains(EndpointPair.unordered(N1, N1));
assertThat(edges).contains(EndpointPair.unordered(N1, N2));
assertThat(edges).contains(EndpointPair.unordered(N2, N1)); // equal to unordered(N1, N2)
// ordered endpoints not compatible with undirected graph
assertThat(edges).doesNotContain(EndpointPair.ordered(N1, N2));
assertThat(edges).doesNotContain(EndpointPair.unordered(N2, N2)); // edge not present
assertThat(edges).doesNotContain(EndpointPair.unordered(N3, N4)); // nodes not in graph
}
@Test
public void endpointPair_directed_contains() {
MutableGraph<Integer> directedGraph = GraphBuilder.directed().allowsSelfLoops(true).build();
directedGraph.putEdge(N1, N1);
directedGraph.putEdge(N1, N2);
Set<EndpointPair<Integer>> edges = directedGraph.edges();
assertThat(edges).hasSize(2);
assertThat(edges).contains(EndpointPair.ordered(N1, N1));
assertThat(edges).contains(EndpointPair.ordered(N1, N2));
// unordered endpoints not OK for directed graph (undefined behavior)
assertThat(edges).doesNotContain(EndpointPair.unordered(N1, N2));
assertThat(edges).doesNotContain(EndpointPair.ordered(N2, N1)); // wrong order
assertThat(edges).doesNotContain(EndpointPair.ordered(N2, N2)); // edge not present
assertThat(edges).doesNotContain(EndpointPair.ordered(N3, N4)); // nodes not in graph
}
// We are testing our implementations of methods on Collection.
@SuppressWarnings({"CollectionSizeTruth", "CollectionContainsTruth"})
private static void containsExactlySanityCheck(Collection<?> collection, Object... varargs) {
assertThat(collection.size()).isEqualTo(varargs.length);
for (Object obj : varargs) {
assertThat(collection.contains(obj)).isTrue();
}
assertThat(ImmutableList.copyOf(collection.iterator())).containsExactlyElementsIn(varargs);
}
}
| EndpointPairTest |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/ParameterizedClassIntegrationTests.java | {
"start": 28104,
"end": 36588
} | class ____ annotated with @TestInstance(Lifecycle.PER_CLASS).",
annotationName, className, lifecycleMethodName);
assertThat(issue.source()) //
.containsInstanceOf(org.junit.platform.engine.support.descriptor.MethodSource.class);
}
@Test
void lifecycleMethodsMustNotBePrivate() {
var results = discoverTestsForClass(PrivateLifecycleMethodTestCase.class);
var issue = getOnlyElement(results.getDiscoveryIssues());
assertThat(issue.severity()) //
.isEqualTo(Severity.ERROR);
assertThat(issue.message()) //
.isEqualTo(
"@BeforeParameterizedClassInvocation method 'private static void %s.beforeParameterizedClassInvocation()' must not be private.",
PrivateLifecycleMethodTestCase.class.getName());
assertThat(issue.source()) //
.containsInstanceOf(org.junit.platform.engine.support.descriptor.MethodSource.class);
}
@Test
void lifecycleMethodsMustNotDeclareReturnType() {
var results = discoverTestsForClass(NonVoidLifecycleMethodTestCase.class);
var issue = getOnlyElement(results.getDiscoveryIssues());
assertThat(issue.severity()) //
.isEqualTo(Severity.ERROR);
assertThat(issue.message()) //
.isEqualTo(
"@BeforeParameterizedClassInvocation method 'static int %s.beforeParameterizedClassInvocation()' must not return a value.",
NonVoidLifecycleMethodTestCase.class.getName());
assertThat(issue.source()) //
.containsInstanceOf(org.junit.platform.engine.support.descriptor.MethodSource.class);
}
@Test
void lifecycleMethodsFromSuperclassAreWrappedAroundLifecycleMethodsFromTestClass() {
var results = executeTestsForClass(LifecycleMethodsFromSuperclassTestCase.class);
results.allEvents().assertStatistics(stats -> stats.started(4).succeeded(4));
assertThat(allReportEntries(results).map(it -> it.get("value"))) //
.containsExactly("zzz_before", "aaa_before", "test", "aaa_after", "zzz_after");
}
@Test
void exceptionsInLifecycleMethodsArePropagated() {
var results = executeTestsForClass(LifecycleMethodsErrorHandlingTestCase.class);
results.allEvents().assertStatistics(stats -> stats.started(3).failed(1).succeeded(2));
results.containerEvents().assertThatEvents() //
.haveExactly(1, finishedWithFailure( //
message("zzz_before"), //
suppressed(0, message("aaa_after")), //
suppressed(1, message("zzz_after"))));
assertThat(allReportEntries(results).map(it -> it.get("value"))) //
.containsExactly("zzz_before", "aaa_after", "zzz_after");
}
@ParameterizedTest
@ValueSource(classes = { LifecycleMethodArgumentInjectionWithConstructorInjectionTestCase.class,
LifecycleMethodArgumentInjectionWithFieldInjectionTestCase.class })
void supportsInjectingArgumentsIntoLifecycleMethods(Class<?> classTemplateClass) {
var results = executeTestsForClass(classTemplateClass);
results.allEvents().assertStatistics(stats -> stats.started(5).succeeded(5));
}
@ParameterizedTest
@ValueSource(classes = { CustomConverterAnnotationsWithLifecycleMethodsAndConstructorInjectionTestCase.class,
CustomConverterAnnotationsWithLifecycleMethodsAndFieldInjectionTestCase.class })
void convertersHaveAccessToTheirAnnotations(Class<?> classTemplateClass) {
var results = executeTestsForClass(classTemplateClass);
results.allEvents().assertStatistics(stats -> stats.started(4).succeeded(4));
}
@ParameterizedTest
@ValueSource(classes = { ValidLifecycleMethodInjectionWithConstructorInjectionTestCase.class,
ValidLifecycleMethodInjectionWithFieldInjectionTestCase.class })
void supportsMixedInjectionsForLifecycleMethods(Class<?> classTemplateClass) {
var results = executeTestsForClass(classTemplateClass);
results.allEvents().assertStatistics(stats -> stats.started(4).succeeded(4));
}
@Test
void failsForLifecycleMethodWithInvalidParameters() {
var results = executeTestsForClass(LifecycleMethodWithInvalidParametersTestCase.class);
var expectedMessage = withPlatformSpecificLineSeparator(
"""
2 configuration errors:
- parameter 'value' with index 0 is incompatible with the parameter declared on the parameterized class: \
expected type 'int' but found 'long'
- parameter 'anotherValue' with index 1 must not be annotated with @ConvertWith""");
var failedResult = getFirstTestExecutionResult(results.containerEvents().failed());
assertThat(failedResult.getThrowable().orElseThrow()) //
.hasMessage(
"Invalid @BeforeParameterizedClassInvocation lifecycle method declaration: static void %s.before(long,int)".formatted(
LifecycleMethodWithInvalidParametersTestCase.class.getName())) //
.cause().hasMessage(expectedMessage);
}
@Test
void failsForLifecycleMethodWithInvalidParameterOrder() {
var results = executeTestsForClass(LifecycleMethodWithInvalidParameterOrderTestCase.class);
results.containerEvents().assertThatEvents() //
.haveExactly(1, finishedWithFailure(message(
("@BeforeParameterizedClassInvocation method [static void %s.before(%s,int,%s)] declares formal parameters in an invalid order: "
+ "argument aggregators must be declared after any indexed arguments and before any arguments resolved by another ParameterResolver.").formatted(
LifecycleMethodWithInvalidParameterOrderTestCase.class.getName(),
ArgumentsAccessor.class.getName(), ArgumentsAccessor.class.getName()))));
}
@Test
void failsForLifecycleMethodWithParameterAfterAggregator() {
var results = executeTestsForClass(LifecycleMethodWithParameterAfterAggregatorTestCase.class);
results.containerEvents().assertThatEvents() //
.haveExactly(1, finishedWithFailure(
message(it -> it.contains("No ParameterResolver registered for parameter [int value]"))));
}
@Test
void lifecycleMethodsMustNotBeDeclaredInRegularTestClasses() {
var testClassName = RegularClassWithLifecycleMethodsTestCase.class.getName();
var results = discoverTestsForClass(RegularClassWithLifecycleMethodsTestCase.class);
assertThat(results.getDiscoveryIssues()).hasSize(2);
var issues = results.getDiscoveryIssues().stream() //
.sorted(comparing(DiscoveryIssue::message)) //
.toList();
assertThat(issues) //
.extracting(DiscoveryIssue::severity) //
.containsOnly(Severity.ERROR);
assertThat(issues) //
.extracting(DiscoveryIssue::source) //
.extracting(Optional::orElseThrow) //
.allMatch(org.junit.platform.engine.support.descriptor.MethodSource.class::isInstance);
assertThat(issues.getFirst().message()) //
.isEqualTo(
"@AfterParameterizedClassInvocation method 'static void %s.after()' must not be declared in test class '%s' because it is not annotated with @ParameterizedClass.",
testClassName, testClassName);
assertThat(issues.getLast().message()) //
.isEqualTo(
"@BeforeParameterizedClassInvocation method 'static void %s.before()' must not be declared in test class '%s' because it is not annotated with @ParameterizedClass.",
testClassName, testClassName);
}
}
private static String withPlatformSpecificLineSeparator(String textBlock) {
return textBlock.replace("\n", System.lineSeparator());
}
// -------------------------------------------------------------------
private static Stream<String> invocationDisplayNames(EngineExecutionResults results) {
return results.containerEvents() //
.started() //
.filter(uniqueId(lastSegmentType(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE))::matches) //
.map(Event::getTestDescriptor) //
.map(TestDescriptor::getDisplayName);
}
private static Stream<Map<String, String>> allReportEntries(EngineExecutionResults results) {
return results.allEvents().reportingEntryPublished() //
.map(e -> e.getRequiredPayload(ReportEntry.class)) //
.map(ReportEntry::getKeyValuePairs);
}
private static Condition<UniqueId> lastSegmentType(@SuppressWarnings("SameParameterValue") String segmentType) {
return new Condition<>(it -> segmentType.equals(it.getLastSegment().getType()), "last segment type is '%s'",
segmentType);
}
private static TestExecutionResult getFirstTestExecutionResult(Events events) {
return events.stream() //
.findFirst() //
.flatMap(Event::getPayload) //
.map(TestExecutionResult.class::cast) //
.orElseThrow();
}
@SuppressWarnings("JUnitMalformedDeclaration")
@ParameterizedClassWithNegativeAndPositiveValue
static | is |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java | {
"start": 1633,
"end": 8686
} | class ____ extends ESTestCase {
/**
* test version lookup actually works
*/
public void testSimple() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
new IndexWriterConfig(Lucene.STANDARD_ANALYZER)
// to have deleted docs
.setMergePolicy(NoMergePolicy.INSTANCE)
);
Document doc = new Document();
doc.add(new StringField(IdFieldMapper.NAME, "6", Field.Store.YES));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.NAME, randomNonNegativeLong()));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, randomLongBetween(1, Long.MAX_VALUE)));
writer.addDocument(doc);
writer.addDocument(new Document());
DirectoryReader reader = DirectoryReader.open(writer);
LeafReaderContext segment = reader.leaves().get(0);
PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false);
// found doc
DocIdAndVersion result = lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment);
assertNotNull(result);
assertEquals(87, result.version);
assertEquals(0, result.docId);
// not found doc
assertNull(lookup.lookupVersion(new BytesRef("7"), randomBoolean(), segment));
// deleted doc
writer.deleteDocuments(new Term(IdFieldMapper.NAME, "6"));
reader.close();
reader = DirectoryReader.open(writer);
segment = reader.leaves().get(0);
lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false);
assertNull(lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment));
reader.close();
writer.close();
dir.close();
}
/**
* test version lookup with two documents matching the ID
*/
public void testTwoDocuments() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER).setMergePolicy(NoMergePolicy.INSTANCE));
Document doc = new Document();
doc.add(new StringField(IdFieldMapper.NAME, "6", Field.Store.YES));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.NAME, randomNonNegativeLong()));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, randomLongBetween(1, Long.MAX_VALUE)));
writer.addDocument(doc);
writer.addDocument(doc);
writer.addDocument(new Document());
DirectoryReader reader = DirectoryReader.open(writer);
LeafReaderContext segment = reader.leaves().get(0);
PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false);
// return the last doc when there are duplicates
DocIdAndVersion result = lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment);
assertNotNull(result);
assertEquals(87, result.version);
assertEquals(1, result.docId);
// delete the first doc only
assertTrue(writer.tryDeleteDocument(reader, 0) >= 0);
reader.close();
reader = DirectoryReader.open(writer);
segment = reader.leaves().get(0);
lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false);
result = lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment);
assertNotNull(result);
assertEquals(87, result.version);
assertEquals(1, result.docId);
// delete both docs
assertTrue(writer.tryDeleteDocument(reader, 1) >= 0);
reader.close();
reader = DirectoryReader.open(writer);
segment = reader.leaves().get(0);
lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false);
assertNull(lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment));
reader.close();
writer.close();
dir.close();
}
public void testLoadTimestampRange() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER).setMergePolicy(NoMergePolicy.INSTANCE));
Document doc = new Document();
doc.add(new StringField(IdFieldMapper.NAME, "6", Field.Store.YES));
doc.add(new LongPoint(DataStream.TIMESTAMP_FIELD_NAME, 1_000));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.NAME, randomNonNegativeLong()));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, randomLongBetween(1, Long.MAX_VALUE)));
writer.addDocument(doc);
doc = new Document();
doc.add(new StringField(IdFieldMapper.NAME, "8", Field.Store.YES));
doc.add(new LongPoint(DataStream.TIMESTAMP_FIELD_NAME, 1_000_000));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 1));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.NAME, randomNonNegativeLong()));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, randomLongBetween(1, Long.MAX_VALUE)));
writer.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(writer);
LeafReaderContext segment = reader.leaves().get(0);
PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), true);
assertTrue(lookup.loadedTimestampRange);
assertEquals(lookup.minTimestamp, 1_000L);
assertEquals(lookup.maxTimestamp, 1_000_000L);
lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false);
assertFalse(lookup.loadedTimestampRange);
assertEquals(lookup.minTimestamp, 0L);
assertEquals(lookup.maxTimestamp, Long.MAX_VALUE);
reader.close();
writer.close();
dir.close();
}
public void testLoadTimestampRangeWithDeleteTombstone() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER).setMergePolicy(NoMergePolicy.INSTANCE));
var randomSeqNoIndexOptions = randomFrom(SeqNoFieldMapper.SeqNoIndexOptions.values());
writer.addDocument(ParsedDocument.deleteTombstone(randomSeqNoIndexOptions, "_id").docs().get(0));
DirectoryReader reader = DirectoryReader.open(writer);
LeafReaderContext segment = reader.leaves().get(0);
PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), true);
assertTrue(lookup.loadedTimestampRange);
assertEquals(lookup.minTimestamp, 0L);
assertEquals(lookup.maxTimestamp, Long.MAX_VALUE);
reader.close();
writer.close();
dir.close();
}
}
| VersionLookupTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java | {
"start": 18199,
"end": 18546
} | class ____ extends DisiWrapper {
final int filterOrd;
FilterMatchingDisiWrapper(Scorer scorer, int ord) {
super(scorer, false);
this.filterOrd = ord;
}
boolean checkDocForMatch(int doc) throws IOException {
return true;
}
}
private static | FilterMatchingDisiWrapper |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/method/MethodAndClassConfig.java | {
"start": 916,
"end": 1188
} | class ____ {
private String value;
public String getValue() {
return this.value;
}
public void setValue(String value) {
this.value = value;
}
@TestConfigurationProperties("conflict")
public Foo foo() {
return new Foo();
}
public static | MethodAndClassConfig |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/processor/StateRestoreCallback.java | {
"start": 1023,
"end": 1103
} | interface ____ {
void restore(byte[] key, byte[] value);
}
| StateRestoreCallback |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/creator/JSONCreatorTest_float.java | {
"start": 298,
"end": 1152
} | class ____ extends TestCase {
public void test_create() throws Exception {
Entity entity = new Entity(123.45F, "菜姐");
String text = JSON.toJSONString(entity);
Entity entity2 = JSON.parseObject(text, Entity.class);
Assert.assertTrue(entity.getId() == entity2.getId());
Assert.assertEquals(entity.getName(), entity2.getName());
}
public void test_create_2() throws Exception {
Entity entity = new Entity(123.45F, "菜姐");
String text = JSON.toJSONString(entity);
ParserConfig config = new ParserConfig();
Entity entity2 = JSON.parseObject(text, Entity.class, config, 0);
Assert.assertTrue(entity.getId() == entity2.getId());
Assert.assertEquals(entity.getName(), entity2.getName());
}
public static | JSONCreatorTest_float |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/StatementSet.java | {
"start": 1253,
"end": 3063
} | interface ____ extends Explainable<StatementSet>, Compilable, Executable {
/** Adds a {@link TablePipeline}. */
StatementSet add(TablePipeline tablePipeline);
/** Adds an {@code INSERT INTO} SQL statement. */
StatementSet addInsertSql(String statement);
/**
* Shorthand for {@code statementSet.add(table.insertInto(targetPath))}.
*
* @see #add(TablePipeline)
* @see Table#insertInto(String)
*/
StatementSet addInsert(String targetPath, Table table);
/**
* Shorthand for {@code statementSet.add(table.insertInto(targetPath, overwrite))}.
*
* @see #add(TablePipeline)
* @see Table#insertInto(String, boolean)
*/
StatementSet addInsert(String targetPath, Table table, boolean overwrite);
/**
* Shorthand for {@code statementSet.add(table.insertInto(targetDescriptor))}.
*
* @see #add(TablePipeline)
* @see Table#insertInto(TableDescriptor)
*/
StatementSet addInsert(TableDescriptor targetDescriptor, Table table);
/**
* Shorthand for {@code statementSet.add(table.insertInto(targetDescriptor, overwrite))}.
*
* @see #add(TablePipeline)
* @see Table#insertInto(TableDescriptor, boolean)
*/
StatementSet addInsert(TableDescriptor targetDescriptor, Table table, boolean overwrite);
/**
* {@inheritDoc}
*
* <p>This method executes all statements as one job.
*
* <p>The added statements will be cleared after calling this method.
*/
@Override
TableResult execute();
/**
* {@inheritDoc}
*
* <p>This method compiles all statements into a {@link CompiledPlan} that can be executed as
* one job.
*/
@Override
@Experimental
CompiledPlan compilePlan() throws TableException;
}
| StatementSet |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/graal/GraalVM.java | {
"start": 4298,
"end": 11518
} | class ____ implements Comparable<Version> {
public static final Version VERSION_23_0_0 = new Version("GraalVM 23.0.0", "23.0.0", "17", Distribution.GRAALVM);
public static final Version VERSION_23_1_0 = new Version("GraalVM 23.1.0", "23.1.0", "21", Distribution.GRAALVM);
public static final Version VERSION_24_2_0 = new Version("GraalVM 24.2.0", "24.2.0", "24", Distribution.GRAALVM);
public static final Version VERSION_25_0_0 = new Version("GraalVM 25.0.0", "25.0.0", "25", Distribution.GRAALVM);
// Temporarily work around https://github.com/quarkusio/quarkus/issues/36246,
// till we have a consensus on how to move forward in
// https://github.com/quarkusio/quarkus/issues/34161
protected static final Map<String, String> GRAAL_MAPPING = Map.of(
"21", "23.1",
"22", "24.0",
"23", "24.1",
"24", "24.2");
// Mapping of community major.minor pair to the JDK major version based on
// GRAAL_MAPPING
private static final Map<String, String> MANDREL_JDK_REV_MAP;
static {
Map<String, String> reverseMap = new HashMap<>(GRAAL_MAPPING.size());
for (Entry<String, String> entry : GRAAL_MAPPING.entrySet()) {
reverseMap.put(entry.getValue(), entry.getKey());
}
MANDREL_JDK_REV_MAP = Collections.unmodifiableMap(reverseMap);
}
/**
* The minimum version of GraalVM supported by Quarkus.
* Versions prior to this are expected to cause major issues.
*/
public static final Version MINIMUM = VERSION_23_0_0;
/**
* The current version of GraalVM supported by Quarkus.
* This version is the one actively being tested and is expected to give the best experience.
*/
public static final Version CURRENT = VERSION_23_1_0;
/**
* The minimum version of GraalVM officially supported by Quarkus.
* Versions prior to this are expected to work but are not given the same level of testing or priority.
*/
public static final Version MINIMUM_SUPPORTED = CURRENT;
private static final String DEFAULT_JDK_VERSION = "21";
protected final String fullVersion;
public final Runtime.Version javaVersion;
protected final Distribution distribution;
private int[] versions;
private String suffix;
Version(String fullVersion, String version, Distribution distro) {
this(fullVersion, version,
distro == Distribution.MANDREL || distro == Distribution.LIBERICA ? communityJDKvers(version)
: DEFAULT_JDK_VERSION,
distro);
}
Version(String fullVersion, String version, String javaVersion, Distribution distro) {
this(fullVersion, version, Runtime.Version.parse(javaVersion), distro);
}
protected Version(String fullVersion, String version, Runtime.Version javaVersion, Distribution distro) {
this.fullVersion = fullVersion;
breakdownVersion(version);
this.javaVersion = javaVersion;
this.distribution = distro;
}
private void breakdownVersion(String version) {
int dash = version.indexOf('-');
if (dash != -1) {
this.suffix = version.substring(dash + 1);
version = version.substring(0, dash);
}
this.versions = Arrays.stream(version.split("\\.")).mapToInt(Integer::parseInt).toArray();
}
/*
* Reconstruct the JDK version from the given GraalVM community version (Mandrel or Liberica)
*/
private static String communityJDKvers(String communityVersion) {
try {
String[] parts = communityVersion.split("\\.", 4);
int major = Integer.parseInt(parts[0]);
int minor = Integer.parseInt(parts[1]);
if ((major == 23 && minor > 0) ||
major > 23) {
String mandrelMajorMinor = String.format("%s.%s", parts[0], parts[1]);
// If we don't find a reverse mapping we use a JDK version >= 25, thus
// the feature version is the first part of the quadruple.
String feature = MANDREL_JDK_REV_MAP.getOrDefault(mandrelMajorMinor, parts[0]);
// Heuristic: The update version of Mandrel and the JDK match.
// Interim is usually 0 for the JDK version.
// Skip trailing zeroes, as they are not supported by java.lang.Runtime.Version.parse.
if ("0".equals(parts[2])) {
return feature;
}
return String.format("%s.%s.%s", feature, "0", parts[2]);
}
} catch (Throwable e) {
// fall-through do default
log.warnf("Failed to parse JDK version from GraalVM version: %s. Defaulting to currently supported version %s ",
communityVersion,
DEFAULT_JDK_VERSION);
}
return DEFAULT_JDK_VERSION;
}
@Override
public int compareTo(Version o) {
return compareTo(o.versions);
}
public int compareTo(int[] versions) {
int i = 0;
for (; i < this.versions.length; i++) {
if (i >= versions.length) {
if (this.versions[i] != 0) {
return 1;
}
} else if (this.versions[i] != versions[i]) {
return this.versions[i] - versions[i];
}
}
for (; i < versions.length; i++) {
if (versions[i] != 0) {
return -1;
}
}
return 0;
}
/**
* Returns the Mandrel/GraalVM version as a string. e.g. 21.3.0-rc1
*/
public String getVersionAsString() {
String version = Arrays.stream(versions).mapToObj(Integer::toString).collect(Collectors.joining("."));
if (suffix != null) {
return version + "-" + suffix;
}
return version;
}
public String getMajorMinorAsString() {
if (versions.length >= 2) {
return versions[0] + "." + versions[1];
}
return versions[0] + ".0";
}
@Override
public String toString() {
return "Version{" +
"version="
+ getVersionAsString() +
", fullVersion=" + fullVersion +
", distribution=" + distribution +
", javaVersion=" + javaVersion +
'}';
}
public static Version getCurrent() {
String vendorVersion = System.getProperty("org.graalvm.vendorversion");
return VersionParseHelper.parse(vendorVersion);
}
}
public | Version |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SplunkEndpointBuilderFactory.java | {
"start": 66687,
"end": 67004
} | class ____ extends AbstractEndpointBuilder implements SplunkEndpointBuilder, AdvancedSplunkEndpointBuilder {
public SplunkEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new SplunkEndpointBuilderImpl(path);
}
} | SplunkEndpointBuilderImpl |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/metrics/MetricsCapabilityBuildItem.java | {
"start": 1063,
"end": 2361
} | interface ____<String> {
boolean isSupported(String value);
}
final String path;
final MetricsCapability<String> metricsCapability;
public MetricsCapabilityBuildItem(MetricsCapability<String> metricsCapability) {
this(metricsCapability, null);
}
public MetricsCapabilityBuildItem(MetricsCapability<String> metricsCapability, String path) {
this.metricsCapability = metricsCapability;
this.path = path;
}
/**
* Test for a known metrics system to allow selective initialization of metrics
* based using a known API. Avoid using deployment module artifacts. Ensure that
* metrics API dependencies remain optional / compile-time only.
*
* @return true if this factory supports the named metrics system. Arbitrary
* strings are allowed. Constants are present for a few.
* @see io.quarkus.runtime.metrics.MetricsFactory#MICROMETER
* @see io.quarkus.runtime.metrics.MetricsFactory#MP_METRICS
*/
public boolean metricsSupported(String name) {
return metricsCapability.isSupported(name);
}
/**
* @return the configured Metrics Endpoint (if an endpoint is enabled) or null
*/
public String metricsEndpoint() {
return path;
}
}
| MetricsCapability |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java | {
"start": 2019,
"end": 42035
} | class ____ extends ESTestCase {
private IngestDocument ingestDocument;
private List<SimulateProcessorResult> resultList;
@Before
public void init() {
ingestDocument = TestIngestDocument.emptyIngestDocument();
resultList = new ArrayList<>();
}
public void testActualProcessor() throws Exception {
TestProcessor actualProcessor = new TestProcessor(ingestDocument -> {});
TrackingResultProcessor trackingProcessor = new TrackingResultProcessor(false, actualProcessor, null, resultList, true);
trackingProcessor.execute(ingestDocument, (result, e) -> {});
SimulateProcessorResult expectedResult = new SimulateProcessorResult(
actualProcessor.getType(),
actualProcessor.getTag(),
actualProcessor.getDescription(),
ingestDocument,
null
);
assertThat(actualProcessor.getInvokedCounter(), equalTo(1));
assertThat(resultList.size(), equalTo(1));
assertIngestDocument(resultList.get(0).getIngestDocument(), expectedResult.getIngestDocument());
assertThat(resultList.get(0).getFailure(), nullValue());
assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag()));
}
public void testActualCompoundProcessorWithoutOnFailure() throws Exception {
RuntimeException exception = new RuntimeException("processor failed");
TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; });
CompoundProcessor actualProcessor = new CompoundProcessor(testProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
Exception[] holder = new Exception[1];
trackingProcessor.execute(ingestDocument, (result, e) -> holder[0] = e);
assertThat(((IngestProcessorException) holder[0]).getRootCause().getMessage(), equalTo(exception.getMessage()));
SimulateProcessorResult expectedFirstResult = new SimulateProcessorResult(
testProcessor.getType(),
testProcessor.getTag(),
actualProcessor.getDescription(),
ingestDocument,
null
);
assertThat(testProcessor.getInvokedCounter(), equalTo(1));
assertThat(resultList.size(), equalTo(1));
assertThat(resultList.get(0).getIngestDocument(), nullValue());
assertThat(resultList.get(0).getFailure(), equalTo(exception));
assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedFirstResult.getProcessorTag()));
}
public void testActualCompoundProcessorWithOnFailure() throws Exception {
RuntimeException exception = new RuntimeException("fail");
TestProcessor failProcessor = new TestProcessor("fail", "test", null, exception);
TestProcessor onFailureProcessor = new TestProcessor("success", "test", null, ingestDocument -> {});
CompoundProcessor actualProcessor = new CompoundProcessor(
false,
List.of(new CompoundProcessor(false, List.of(failProcessor, onFailureProcessor), List.of(onFailureProcessor, failProcessor))),
List.of(onFailureProcessor)
);
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
trackingProcessor.execute(ingestDocument, (result, e) -> {});
SimulateProcessorResult expectedFailResult = new SimulateProcessorResult(
failProcessor.getType(),
failProcessor.getTag(),
failProcessor.getDescription(),
ingestDocument,
null
);
SimulateProcessorResult expectedSuccessResult = new SimulateProcessorResult(
onFailureProcessor.getType(),
onFailureProcessor.getTag(),
failProcessor.getDescription(),
ingestDocument,
null
);
assertThat(failProcessor.getInvokedCounter(), equalTo(2));
assertThat(onFailureProcessor.getInvokedCounter(), equalTo(2));
assertThat(resultList.size(), equalTo(4));
assertThat(resultList.get(0).getIngestDocument(), nullValue());
assertThat(resultList.get(0).getFailure(), equalTo(exception));
assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedFailResult.getProcessorTag()));
Map<String, Object> metadata = resultList.get(1).getIngestDocument().getIngestMetadata();
assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail"));
assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test"));
assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail"));
assertThat(resultList.get(1).getFailure(), nullValue());
assertThat(resultList.get(1).getProcessorTag(), equalTo(expectedSuccessResult.getProcessorTag()));
assertThat(resultList.get(2).getIngestDocument(), nullValue());
assertThat(resultList.get(2).getFailure(), equalTo(exception));
assertThat(resultList.get(2).getProcessorTag(), equalTo(expectedFailResult.getProcessorTag()));
metadata = resultList.get(3).getIngestDocument().getIngestMetadata();
assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail"));
assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test"));
assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail"));
assertThat(resultList.get(3).getFailure(), nullValue());
assertThat(resultList.get(3).getProcessorTag(), equalTo(expectedSuccessResult.getProcessorTag()));
}
public void testActualCompoundProcessorWithOnFailureAndTrueCondition() throws Exception {
String scriptName = "conditionalScript";
ScriptService scriptService = new ScriptService(
Settings.builder().build(),
Map.of(Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Map.of(scriptName, ctx -> true), Map.of())),
new HashMap<>(ScriptModule.CORE_CONTEXTS),
() -> 1L,
TestProjectResolvers.singleProject(randomProjectIdOrDefault())
);
RuntimeException exception = new RuntimeException("fail");
TestProcessor failProcessor = new TestProcessor("fail", "test", null, exception);
ConditionalProcessor conditionalProcessor = new ConditionalProcessor(
randomAlphaOfLength(10),
null,
new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Map.of()),
scriptService,
failProcessor
);
TestProcessor onFailureProcessor = new TestProcessor("success", "test", null, ingestDocument -> {});
CompoundProcessor actualProcessor = new CompoundProcessor(false, List.of(conditionalProcessor), List.of(onFailureProcessor));
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
trackingProcessor.execute(ingestDocument, (result, e) -> {});
SimulateProcessorResult expectedFailResult = new SimulateProcessorResult(
failProcessor.getType(),
failProcessor.getTag(),
failProcessor.getDescription(),
ingestDocument,
null
);
SimulateProcessorResult expectedSuccessResult = new SimulateProcessorResult(
onFailureProcessor.getType(),
onFailureProcessor.getTag(),
onFailureProcessor.getDescription(),
ingestDocument,
null
);
assertThat(failProcessor.getInvokedCounter(), equalTo(1));
assertThat(onFailureProcessor.getInvokedCounter(), equalTo(1));
assertThat(resultList.size(), equalTo(2));
assertThat(resultList.get(0).getIngestDocument(), nullValue());
assertThat(resultList.get(0).getFailure(), equalTo(exception));
assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedFailResult.getProcessorTag()));
assertThat(resultList.get(0).getConditionalWithResult().v1(), equalTo(scriptName));
assertThat(resultList.get(0).getConditionalWithResult().v2(), is(Boolean.TRUE));
Map<String, Object> metadata = resultList.get(1).getIngestDocument().getIngestMetadata();
assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail"));
assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test"));
assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail"));
assertThat(resultList.get(1).getFailure(), nullValue());
assertThat(resultList.get(1).getProcessorTag(), equalTo(expectedSuccessResult.getProcessorTag()));
}
public void testActualCompoundProcessorWithIgnoreFailure() throws Exception {
RuntimeException exception = new RuntimeException("processor failed");
TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; });
CompoundProcessor actualProcessor = new CompoundProcessor(true, List.of(testProcessor), List.of());
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
trackingProcessor.execute(ingestDocument, (result, e) -> {});
SimulateProcessorResult expectedResult = new SimulateProcessorResult(
testProcessor.getType(),
testProcessor.getTag(),
testProcessor.getDescription(),
ingestDocument,
null
);
assertThat(testProcessor.getInvokedCounter(), equalTo(1));
assertThat(resultList.size(), equalTo(1));
assertIngestDocument(resultList.get(0).getIngestDocument(), expectedResult.getIngestDocument());
assertThat(resultList.get(0).getFailure(), sameInstance(exception));
assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag()));
}
public void testActualCompoundProcessorWithFalseConditional() throws Exception {
String key1 = randomAlphaOfLength(10);
String key2 = randomAlphaOfLength(10);
String key3 = randomAlphaOfLength(10);
String scriptName = "conditionalScript";
ScriptService scriptService = new ScriptService(
Settings.builder().build(),
Map.of(
Script.DEFAULT_SCRIPT_LANG,
new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Map.of(scriptName, ctx -> false), Map.of())
),
new HashMap<>(ScriptModule.CORE_CONTEXTS),
() -> 1L,
TestProjectResolvers.singleProject(randomProjectIdOrDefault())
);
CompoundProcessor compoundProcessor = new CompoundProcessor(new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key1, randomInt());
}),
new ConditionalProcessor(
randomAlphaOfLength(10),
null,
new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Map.of()),
scriptService,
new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key2, randomInt());
})
),
new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key3, randomInt()); })
);
CompoundProcessor trackingProcessor = decorate(compoundProcessor, null, resultList);
trackingProcessor.execute(ingestDocument, (result, e) -> {});
SimulateProcessorResult expectedResult = new SimulateProcessorResult(
compoundProcessor.getType(),
compoundProcessor.getTag(),
compoundProcessor.getDescription(),
ingestDocument,
null
);
assertThat(resultList.size(), equalTo(3));
assertTrue(resultList.get(0).getIngestDocument().hasField(key1));
assertFalse(resultList.get(0).getIngestDocument().hasField(key2));
assertFalse(resultList.get(0).getIngestDocument().hasField(key3));
assertThat(resultList.get(1).getConditionalWithResult().v1(), equalTo(scriptName));
assertThat(resultList.get(1).getConditionalWithResult().v2(), is(Boolean.FALSE));
assertTrue(resultList.get(2).getIngestDocument().hasField(key1));
assertFalse(resultList.get(2).getIngestDocument().hasField(key2));
assertTrue(resultList.get(2).getIngestDocument().hasField(key3));
assertIngestDocument(resultList.get(2).getIngestDocument(), expectedResult.getIngestDocument());
assertThat(resultList.get(2).getFailure(), nullValue());
assertThat(resultList.get(2).getProcessorTag(), nullValue());
}
public void testActualPipelineProcessor() throws Exception {
String pipelineId = "pipeline1";
IngestService ingestService = createIngestService();
Map<String, Object> pipelineConfig = new HashMap<>();
pipelineConfig.put("name", pipelineId);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
String key1 = randomAlphaOfLength(10);
String key2 = randomAlphaOfLength(10);
String key3 = randomAlphaOfLength(10);
Pipeline pipeline = new Pipeline(pipelineId, null, null, null, new CompoundProcessor(new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key1, randomInt());
}), new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key2, randomInt()); }), new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key3, randomInt());
})));
when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline);
PipelineProcessor pipelineProcessor = factory.create(Map.of(), null, null, pipelineConfig, null);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
trackingProcessor.execute(ingestDocument, (result, e) -> {});
SimulateProcessorResult expectedResult = new SimulateProcessorResult(
actualProcessor.getType(),
actualProcessor.getTag(),
actualProcessor.getDescription(),
ingestDocument,
null
);
expectedResult.getIngestDocument().getIngestMetadata().put("pipeline", pipelineId);
verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId);
assertThat(resultList.size(), equalTo(4));
assertNull(resultList.get(0).getConditionalWithResult());
assertThat(resultList.get(0).getType(), equalTo("pipeline"));
assertTrue(resultList.get(1).getIngestDocument().hasField(key1));
assertFalse(resultList.get(1).getIngestDocument().hasField(key2));
assertFalse(resultList.get(1).getIngestDocument().hasField(key3));
assertTrue(resultList.get(2).getIngestDocument().hasField(key1));
assertTrue(resultList.get(2).getIngestDocument().hasField(key2));
assertFalse(resultList.get(2).getIngestDocument().hasField(key3));
assertIngestDocument(resultList.get(3).getIngestDocument(), expectedResult.getIngestDocument());
assertThat(resultList.get(3).getFailure(), nullValue());
assertThat(resultList.get(3).getProcessorTag(), nullValue());
}
public void testActualPipelineProcessorWithTrueConditional() throws Exception {
String pipelineId1 = "pipeline1";
String pipelineId2 = "pipeline2";
IngestService ingestService = createIngestService();
Map<String, Object> pipelineConfig0 = new HashMap<>();
pipelineConfig0.put("name", pipelineId1);
Map<String, Object> pipelineConfig1 = new HashMap<>();
pipelineConfig1.put("name", pipelineId1);
Map<String, Object> pipelineConfig2 = new HashMap<>();
pipelineConfig2.put("name", pipelineId2);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
String key1 = randomAlphaOfLength(10);
String key2 = randomAlphaOfLength(10);
String key3 = randomAlphaOfLength(10);
String scriptName = "conditionalScript";
ScriptService scriptService = new ScriptService(
Settings.builder().build(),
Map.of(Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Map.of(scriptName, ctx -> true), Map.of())),
new HashMap<>(ScriptModule.CORE_CONTEXTS),
() -> 1L,
TestProjectResolvers.singleProject(randomProjectIdOrDefault())
);
Pipeline pipeline1 = new Pipeline(pipelineId1, null, null, null, new CompoundProcessor(new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key1, randomInt());
}),
new ConditionalProcessor(
randomAlphaOfLength(10),
null,
new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Map.of()),
scriptService,
factory.create(Map.of(), "pipeline1", null, pipelineConfig2, null)
),
new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key3, randomInt());
})
));
Pipeline pipeline2 = new Pipeline(pipelineId2, null, null, null, new CompoundProcessor(new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key2, randomInt());
})));
when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1);
when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2);
PipelineProcessor pipelineProcessor = factory.create(Map.of(), "pipeline0", null, pipelineConfig0, null);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
trackingProcessor.execute(ingestDocument, (result, e) -> {});
SimulateProcessorResult expectedResult = new SimulateProcessorResult(
actualProcessor.getType(),
actualProcessor.getTag(),
actualProcessor.getDescription(),
ingestDocument,
null
);
expectedResult.getIngestDocument().getIngestMetadata().put("pipeline", pipelineId1);
verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId1);
verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId2);
assertThat(resultList.size(), equalTo(5));
assertNull(resultList.get(0).getConditionalWithResult());
assertThat(resultList.get(0).getType(), equalTo("pipeline"));
assertThat(resultList.get(0).getProcessorTag(), equalTo("pipeline0"));
assertTrue(resultList.get(1).getIngestDocument().hasField(key1));
assertFalse(resultList.get(1).getIngestDocument().hasField(key2));
assertFalse(resultList.get(1).getIngestDocument().hasField(key3));
assertThat(resultList.get(2).getConditionalWithResult().v1(), equalTo(scriptName));
assertThat(resultList.get(2).getConditionalWithResult().v2(), is(Boolean.TRUE));
assertThat(resultList.get(2).getType(), equalTo("pipeline"));
assertThat(resultList.get(2).getProcessorTag(), equalTo("pipeline1"));
assertTrue(resultList.get(3).getIngestDocument().hasField(key1));
assertTrue(resultList.get(3).getIngestDocument().hasField(key2));
assertFalse(resultList.get(3).getIngestDocument().hasField(key3));
assertIngestDocument(resultList.get(4).getIngestDocument(), expectedResult.getIngestDocument());
assertThat(resultList.get(4).getFailure(), nullValue());
assertThat(resultList.get(4).getProcessorTag(), nullValue());
}
public void testActualPipelineProcessorWithFalseConditional() throws Exception {
String pipelineId1 = "pipeline1";
String pipelineId2 = "pipeline2";
IngestService ingestService = createIngestService();
Map<String, Object> pipelineConfig0 = new HashMap<>();
pipelineConfig0.put("name", pipelineId1);
Map<String, Object> pipelineConfig1 = new HashMap<>();
pipelineConfig1.put("name", pipelineId1);
Map<String, Object> pipelineConfig2 = new HashMap<>();
pipelineConfig2.put("name", pipelineId2);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
String key1 = randomAlphaOfLength(10);
String key2 = randomAlphaOfLength(10);
String key3 = randomAlphaOfLength(10);
String scriptName = "conditionalScript";
ScriptService scriptService = new ScriptService(
Settings.builder().build(),
Map.of(
Script.DEFAULT_SCRIPT_LANG,
new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Map.of(scriptName, ctx -> false), Map.of())
),
new HashMap<>(ScriptModule.CORE_CONTEXTS),
() -> 1L,
TestProjectResolvers.singleProject(randomProjectIdOrDefault())
);
Pipeline pipeline1 = new Pipeline(pipelineId1, null, null, null, new CompoundProcessor(new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key1, randomInt());
}),
new ConditionalProcessor(
randomAlphaOfLength(10),
null,
new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Map.of()),
scriptService,
factory.create(Map.of(), null, null, pipelineConfig2, null)
),
new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key3, randomInt());
})
));
Pipeline pipeline2 = new Pipeline(pipelineId2, null, null, null, new CompoundProcessor(new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key2, randomInt());
})));
when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1);
when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2);
PipelineProcessor pipelineProcessor = factory.create(Map.of(), null, null, pipelineConfig0, null);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
trackingProcessor.execute(ingestDocument, (result, e) -> {});
SimulateProcessorResult expectedResult = new SimulateProcessorResult(
actualProcessor.getType(),
actualProcessor.getTag(),
actualProcessor.getDescription(),
ingestDocument,
null
);
expectedResult.getIngestDocument().getIngestMetadata().put("pipeline", pipelineId1);
verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId1);
verify(ingestService, Mockito.never()).getPipeline(pipelineId2);
assertThat(resultList.size(), equalTo(4));
assertNull(resultList.get(0).getConditionalWithResult());
assertThat(resultList.get(0).getType(), equalTo("pipeline"));
assertTrue(resultList.get(1).getIngestDocument().hasField(key1));
assertFalse(resultList.get(1).getIngestDocument().hasField(key2));
assertFalse(resultList.get(1).getIngestDocument().hasField(key3));
assertThat(resultList.get(2).getConditionalWithResult().v1(), equalTo(scriptName));
assertThat(resultList.get(2).getConditionalWithResult().v2(), is(Boolean.FALSE));
assertIngestDocument(resultList.get(3).getIngestDocument(), expectedResult.getIngestDocument());
assertThat(resultList.get(3).getFailure(), nullValue());
assertThat(resultList.get(3).getProcessorTag(), nullValue());
}
public void testActualPipelineProcessorWithHandledFailure() throws Exception {
RuntimeException exception = new RuntimeException("processor failed");
String pipelineId = "pipeline1";
IngestService ingestService = createIngestService();
Map<String, Object> pipelineConfig = new HashMap<>();
pipelineConfig.put("name", pipelineId);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
String key1 = randomAlphaOfLength(10);
String key2 = randomAlphaOfLength(10);
String key3 = randomAlphaOfLength(10);
Pipeline pipeline = new Pipeline(pipelineId, null, null, null, new CompoundProcessor(new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key1, randomInt());
}),
new CompoundProcessor(
false,
List.of(new TestProcessor(ingestDocument -> { throw exception; })),
List.of(new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key2, randomInt());
}))
),
new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key3, randomInt()); })
));
when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline);
PipelineProcessor pipelineProcessor = factory.create(Map.of(), null, null, pipelineConfig, null);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
trackingProcessor.execute(ingestDocument, (result, e) -> {});
SimulateProcessorResult expectedResult = new SimulateProcessorResult(
actualProcessor.getType(),
actualProcessor.getTag(),
actualProcessor.getDescription(),
ingestDocument,
null
);
expectedResult.getIngestDocument().getIngestMetadata().put("pipeline", pipelineId);
verify(ingestService, Mockito.atLeast(2)).getPipeline(pipelineId);
assertThat(resultList.size(), equalTo(5));
assertNull(resultList.get(0).getConditionalWithResult());
assertThat(resultList.get(0).getType(), equalTo("pipeline"));
assertTrue(resultList.get(1).getIngestDocument().hasField(key1));
assertFalse(resultList.get(1).getIngestDocument().hasField(key2));
assertFalse(resultList.get(1).getIngestDocument().hasField(key3));
// failed processor
assertNull(resultList.get(2).getIngestDocument());
assertThat(resultList.get(2).getFailure().getMessage(), equalTo(exception.getMessage()));
assertTrue(resultList.get(3).getIngestDocument().hasField(key1));
assertTrue(resultList.get(3).getIngestDocument().hasField(key2));
assertFalse(resultList.get(3).getIngestDocument().hasField(key3));
assertIngestDocument(resultList.get(4).getIngestDocument(), expectedResult.getIngestDocument());
assertThat(resultList.get(4).getFailure(), nullValue());
assertThat(resultList.get(4).getProcessorTag(), nullValue());
}
public void testActualPipelineProcessorWithUnhandledFailure() throws Exception {
String pipelineId = "pipeline1";
IngestService ingestService = createIngestService();
Map<String, Object> pipelineConfig = new HashMap<>();
pipelineConfig.put("name", pipelineId);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
String key1 = randomAlphaOfLength(10);
IllegalStateException exception = new IllegalStateException("Not a pipeline cycle error");
Pipeline pipeline = new Pipeline(
pipelineId,
null,
null,
null,
new CompoundProcessor(
new TestProcessor(ingestDocument -> ingestDocument.setFieldValue(key1, randomInt())),
new TestProcessor(ingestDocument -> {
throw exception;
})
)
);
when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline);
PipelineProcessor pipelineProcessor = factory.create(Map.of(), null, null, pipelineConfig, null);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
trackingProcessor.execute(ingestDocument, (result, e) -> {});
SimulateProcessorResult expectedResult = new SimulateProcessorResult(
actualProcessor.getType(),
actualProcessor.getTag(),
actualProcessor.getDescription(),
ingestDocument,
null
);
expectedResult.getIngestDocument().getIngestMetadata().put("pipeline", pipelineId);
verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId);
assertThat(resultList.size(), equalTo(3));
assertNull(resultList.get(0).getConditionalWithResult());
assertThat(resultList.get(0).getType(), equalTo("pipeline"));
assertTrue(resultList.get(1).getIngestDocument().hasField(key1));
assertThat(resultList.get(2).getFailure(), equalTo(exception));
}
public void testActualPipelineProcessorWithCycle() throws Exception {
String pipelineId1 = "pipeline1";
String pipelineId2 = "pipeline2";
IngestService ingestService = createIngestService();
Map<String, Object> pipelineConfig0 = new HashMap<>();
pipelineConfig0.put("name", pipelineId1);
Map<String, Object> pipelineConfig1 = new HashMap<>();
pipelineConfig1.put("name", pipelineId1);
Map<String, Object> pipelineConfig2 = new HashMap<>();
pipelineConfig2.put("name", pipelineId2);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
Pipeline pipeline1 = new Pipeline(
pipelineId1,
null,
null,
null,
new CompoundProcessor(factory.create(Map.of(), null, null, pipelineConfig2, null))
);
Pipeline pipeline2 = new Pipeline(
pipelineId2,
null,
null,
null,
new CompoundProcessor(factory.create(Map.of(), null, null, pipelineConfig1, null))
);
when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1);
when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2);
PipelineProcessor pipelineProcessor = factory.create(Map.of(), null, null, pipelineConfig0, null);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
Exception[] holder = new Exception[1];
trackingProcessor.execute(ingestDocument, (result, e) -> holder[0] = e);
IngestProcessorException exception = (IngestProcessorException) holder[0];
assertThat(exception.getCause(), instanceOf(GraphStructureException.class));
assertThat(exception.getMessage(), containsString("Cycle detected for pipeline: pipeline1"));
}
public void testActualPipelineProcessorNested() throws Exception {
/*
* This test creates a pipeline made up of many nested pipeline processors, ending in a processor that counts both how many times
* it is called for a given document (by updating a field on that document) and how many times it is called overall.
*/
IngestService ingestService = createIngestService();
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
int pipelineCount = randomIntBetween(2, IngestDocument.MAX_PIPELINES);
for (int i = 0; i < pipelineCount - 1; i++) {
String pipelineId = "pipeline" + i;
String nextPipelineId = "pipeline" + (i + 1);
Map<String, Object> nextPipelineConfig = new HashMap<>();
nextPipelineConfig.put("name", nextPipelineId);
Pipeline pipeline = new Pipeline(
pipelineId,
null,
null,
null,
new CompoundProcessor(factory.create(Map.of(), null, null, nextPipelineConfig, null))
);
when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline);
}
// The last pipeline calls the CountCallsProcessor rather than yet another pipeline processor:
String lastPipelineId = "pipeline" + (pipelineCount - 1);
CountCallsProcessor countCallsProcessor = new CountCallsProcessor();
Pipeline lastPipeline = new Pipeline(lastPipelineId, null, null, null, new CompoundProcessor(countCallsProcessor));
when(ingestService.getPipeline(lastPipelineId)).thenReturn(lastPipeline);
String firstPipelineId = "pipeline0";
Map<String, Object> firstPipelineConfig = new HashMap<>();
firstPipelineConfig.put("name", firstPipelineId);
PipelineProcessor pipelineProcessor = factory.create(Map.of(), null, null, firstPipelineConfig, null);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
IngestDocument[] holder = new IngestDocument[1];
trackingProcessor.execute(ingestDocument, (result, e) -> holder[0] = result);
IngestDocument document = holder[0];
assertNotNull(document);
// Make sure that the final processor was called exactly once on this document:
assertThat(document.getFieldValue(countCallsProcessor.getCountFieldName(), Integer.class), equalTo(1));
// But it was called exactly one other time during the pipeline cycle check:
assertThat(countCallsProcessor.getTotalCount(), equalTo(2));
assertThat(resultList.size(), equalTo(pipelineCount + 1)); // one result per pipeline, plus the "count_calls" processor
for (int i = 0; i < resultList.size() - 1; i++) {
SimulateProcessorResult result = resultList.get(i);
assertThat(result.getType(), equalTo(pipelineProcessor.getType()));
}
assertThat(resultList.get(resultList.size() - 1).getType(), equalTo(countCallsProcessor.getType()));
}
public void testActualPipelineProcessorNestedTooManyPipelines() throws Exception {
/*
* This test creates a pipeline made up of many nested pipeline processors, ending in a processor that counts both how many times
* it is called for a given document (by updating a field on that document) and how many times it is called overall.
*/
IngestService ingestService = createIngestService();
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
int pipelineCount = randomIntBetween(IngestDocument.MAX_PIPELINES + 1, 500);
for (int i = 0; i < pipelineCount - 1; i++) {
String pipelineId = "pipeline" + i;
String nextPipelineId = "pipeline" + (i + 1);
Map<String, Object> nextPipelineConfig = new HashMap<>();
nextPipelineConfig.put("name", nextPipelineId);
Pipeline pipeline = new Pipeline(
pipelineId,
null,
null,
null,
new CompoundProcessor(factory.create(Map.of(), null, null, nextPipelineConfig, null))
);
when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline);
}
// The last pipeline calls the CountCallsProcessor rather than yet another pipeline processor:
String lastPipelineId = "pipeline" + (pipelineCount - 1);
CountCallsProcessor countCallsProcessor = new CountCallsProcessor();
Pipeline lastPipeline = new Pipeline(lastPipelineId, null, null, null, new CompoundProcessor(countCallsProcessor));
when(ingestService.getPipeline(lastPipelineId)).thenReturn(lastPipeline);
String firstPipelineId = "pipeline0";
Map<String, Object> firstPipelineConfig = new HashMap<>();
firstPipelineConfig.put("name", firstPipelineId);
PipelineProcessor pipelineProcessor = factory.create(Map.of(), null, null, firstPipelineConfig, null);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
IngestDocument[] documentHolder = new IngestDocument[1];
Exception[] exceptionHolder = new Exception[1];
trackingProcessor.execute(ingestDocument, (result, e) -> {
documentHolder[0] = result;
exceptionHolder[0] = e;
});
IngestDocument document = documentHolder[0];
Exception exception = exceptionHolder[0];
assertNull(document);
assertNotNull(exception);
assertThat(exception.getMessage(), containsString("Too many nested pipelines"));
// We expect that the last processor was never called:
assertThat(countCallsProcessor.getTotalCount(), equalTo(0));
}
public void testActualPipelineProcessorRepeatedInvocation() throws Exception {
String pipelineId = "pipeline1";
IngestService ingestService = createIngestService();
Map<String, Object> pipelineConfig = new HashMap<>();
pipelineConfig.put("name", pipelineId);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
String key1 = randomAlphaOfLength(10);
PipelineProcessor pipelineProcessor = factory.create(Map.of(), null, null, pipelineConfig, null);
Pipeline pipeline = new Pipeline(pipelineId, null, null, null, new CompoundProcessor(new TestProcessor(ingestDocument -> {
ingestDocument.setFieldValue(key1, randomInt());
})));
when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline);
// calls the same pipeline twice
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor, pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, null, resultList);
trackingProcessor.execute(ingestDocument, (result, e) -> {});
SimulateProcessorResult expectedResult = new SimulateProcessorResult(
actualProcessor.getType(),
actualProcessor.getTag(),
actualProcessor.getDescription(),
ingestDocument,
null
);
expectedResult.getIngestDocument().getIngestMetadata().put("pipeline", pipelineId);
verify(ingestService, Mockito.atLeast(2)).getPipeline(pipelineId);
assertThat(resultList.size(), equalTo(4));
assertNull(resultList.get(0).getConditionalWithResult());
assertThat(resultList.get(0).getType(), equalTo("pipeline"));
assertThat(
resultList.get(1).getIngestDocument().getFieldValue(key1, Integer.class),
not(equalTo(expectedResult.getIngestDocument().getFieldValue(key1, Integer.class)))
);
assertThat(resultList.get(1).getFailure(), nullValue());
assertThat(resultList.get(1).getProcessorTag(), nullValue());
assertNull(resultList.get(2).getConditionalWithResult());
assertThat(resultList.get(2).getType(), equalTo("pipeline"));
assertIngestDocument(resultList.get(3).getIngestDocument(), expectedResult.getIngestDocument());
assertThat(resultList.get(3).getFailure(), nullValue());
assertThat(resultList.get(3).getProcessorTag(), nullValue());
// each invocation updates key1 with a random int
assertNotEquals(
resultList.get(1).getIngestDocument().getSourceAndMetadata().get(key1),
resultList.get(3).getIngestDocument().getSourceAndMetadata().get(key1)
);
}
/*
* This test processor keeps track of how many times it has been called. It also creates/updates the "count" field on documents with
* the number of times this processor has been called for that document.
*/
private static final | TrackingResultProcessorTests |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/privatemember/PrivateProducerFieldTest.java | {
"start": 668,
"end": 790
} | class ____ {
public String name() {
return null;
}
}
@ApplicationScoped
static | Head |
java | apache__flink | flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/api/runtime/metadata/OperatorStateSpecV2.java | {
"start": 1188,
"end": 1464
} | class ____ an operator state maintained by {@link SavepointMetadataV2}. An operator
* state is either represented as an existing {@link OperatorState}, or a {@link
* org.apache.flink.state.api.StateBootstrapTransformation} that will be used to create it.
*/
@Internal
| specifies |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/errors/LogDirNotFoundException.java | {
"start": 942,
"end": 1326
} | class ____ extends ApiException {
private static final long serialVersionUID = 1L;
public LogDirNotFoundException(String message) {
super(message);
}
public LogDirNotFoundException(String message, Throwable cause) {
super(message, cause);
}
public LogDirNotFoundException(Throwable cause) {
super(cause);
}
}
| LogDirNotFoundException |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/issues/SentExchangeEventNotifierIssueTest.java | {
"start": 1340,
"end": 1489
} | class ____ extends ContextTestSupport {
private final MyNotifier notifier = new MyNotifier();
private static | SentExchangeEventNotifierIssueTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/JSONFieldTest2.java | {
"start": 444,
"end": 742
} | class ____ {
private int id;
@JSONField(serialize = false)
private boolean flag;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public boolean isFlag() {
return flag;
}
public void setFlag(boolean flag) {
this.flag = flag;
}
}
}
| VO |
java | apache__camel | test-infra/camel-test-infra-kafka/src/main/java/org/apache/camel/test/infra/kafka/services/ConfluentInfraService.java | {
"start": 2297,
"end": 3855
} | class ____ extends ConfluentContainer {
public TestInfraConfluentContainer(Network network, String name, boolean fixedPort) {
super(network, name);
if (fixedPort) {
addFixedExposedPort(9092, 9092);
}
}
}
return new TestInfraConfluentContainer(network, instanceName, ContainerEnvironmentUtil.isFixedPort(this.getClass()));
}
protected Integer getKafkaPort() {
return confluentContainer.getKafkaPort();
}
@Override
public String getBootstrapServers() {
return confluentContainer.getHost() + ":" + getKafkaPort();
}
@Override
public String brokers() {
return getBootstrapServers();
}
@Override
public void registerProperties() {
System.setProperty(KafkaProperties.KAFKA_BOOTSTRAP_SERVERS, getBootstrapServers());
}
@Override
public void initialize() {
confluentContainer.start();
registerProperties();
LOG.info("Kafka bootstrap server running at address {}", getBootstrapServers());
}
private boolean stopped() {
return !confluentContainer.isRunning();
}
@Override
public void shutdown() {
try {
LOG.info("Stopping Kafka container");
confluentContainer.stop();
} finally {
TestUtils.waitFor(this::stopped);
}
}
@Override
public ConfluentContainer getContainer() {
return confluentContainer;
}
}
| TestInfraConfluentContainer |
java | apache__dubbo | dubbo-config/dubbo-config-api/src/test/java/org/apache/dubbo/config/url/UrlTestBase.java | {
"start": 1475,
"end": 9231
} | class ____ {
// ======================================================
// data column definition
// ======================================================
protected static final int KEY = 0;
protected static final int URL_KEY = 1;
protected static final int TESTVALUE1 = 4;
private static final Logger log = LoggerFactory.getLogger(UrlTestBase.class);
private static final int TYPE = 2;
private static final int DEFAULT = 3;
private static final int TESTVALUE2 = 5;
private static final int TESTVALUE3 = 6;
private static final int TESTVALUE4 = 7;
private static final int TESTVALUE5 = 8;
private static final int TESTVALUE6 = 9;
private static final int TESTVALUE7 = 10;
protected ApplicationConfig application;
protected RegistryConfig regConfForProvider;
protected RegistryConfig regConfForService;
protected ProviderConfig provConf;
protected ProtocolConfig protoConfForProvider;
protected ProtocolConfig protoConfForService;
protected MethodConfig methodConfForService;
protected ServiceConfig<DemoService> servConf;
protected Object servConfTable[][] = {
{"proxy", "proxy", "string", "javassist", "jdk", "javassist", "", "", "", ""},
{"actives", "actives", "int", 0, 90, "", "", "", "", ""},
{"executes", "executes", "int", 0, 90, "", "", "", "", ""},
{"deprecated", "deprecated", "boolean", false, true, "", "", "", "", ""},
{"dynamic", "dynamic", "boolean", true, false, "", "", "", "", ""},
{"accesslog", "accesslog", "string", "", "haominTest", "", "", "", "", ""},
{
"document",
"document",
"string",
"",
"http://dubbo.apache.org/zh-cn/docs/user/quick-start.html?testquery=你好你好",
"",
"",
"",
"",
""
},
{"weight", "weight", "int", 0, 90, "", "", "", "", ""},
// {"filter", "service.filter", "string", "", "", "", "", "", "", ""},
// {"listener", "listener", "string", "", "", "", "", "", "", ""},
};
protected Object regConfForServiceTable[][] = {
// {"timeout", "registry.timeout", "int", 5000, 9000, "", "", "", "", ""},
// {"file", "registry.file", "string", "", "regConfForServiceTable.log", "", "", "", "", ""},
// {"wait", "registry.wait", "int", 0, 9000, "", "", "", "", ""},
// {"transport", "registry.transporter", "string", "netty", "mina", "", "", "", "", ""},
// {"subscribe", "subscribe", "boolean", true, false, "", "", "", "", ""},
{"dynamic", "dynamic", "boolean", true, false, "", "", "", "", ""},
};
protected Object provConfTable[][] = {
{"cluster", "cluster", "string", "string", "failover", "failfast", "failsafe", "", "", ""},
{"async", "async", "boolean", false, true, "", "", "", "", ""},
{"loadbalance", "loadbalance", "string", "random", "leastactive", "", "", "", "", ""},
{"connections", "connections", "int", 0, 60, "", "", "", "", ""},
{"retries", "retries", "int", 2, 60, "", "", "", "", ""},
{"timeout", "timeout", "int", 5000, 60, "", "", "", "", ""},
// change by fengting listener 没有缺省值
// {"listener", "exporter.listener", "string", "", "", "", "", "", "", ""},
// {"filter", "service.filter", "string", "", "", "", "", "", "", ""},
};
protected Object methodConfForServiceTable[][] = {
{"actives", "sayName.actives", "int", 0, 90, "", "", "", "", ""},
{"executes", "sayName.executes", "int", 0, 90, "", "", "", "", ""},
{"deprecated", "sayName.deprecated", "boolean", false, true, "", "", "", "", ""},
{"async", "sayName.async", "boolean", false, true, "", "", "", "", ""},
{"timeout", "sayName.timeout", "int", 0, 90, "", "", "", "", ""},
};
protected DemoService demoService = new DemoServiceImpl();
private Object appConfForProviderTable[][] = {
{"", "", "", "", "", "", "", "", "", ""},
};
private Object appConfForServiceTable[][] = {
{"", "", "", "", "", "", "", "", "", ""},
};
private Object regConfForProviderTable[][] = {
{"", "", "", "", "", "", "", "", "", ""},
};
private Object protoConfForProviderTable[][] = {
{"", "", "", "", "", "", "", "", "", ""},
};
private Object protoConfForServiceTable[][] = {
{"", "", "", "", "", "", "", "", "", ""},
};
// ======================================================
// data table manipulation utils
// ======================================================
protected String genParamString(Object urlKey, Object value) {
return (String) urlKey + "=" + value.toString();
}
protected <T> void fillConfigs(T conf, Object[][] table, int column) {
for (Object[] row : table) {
fillConfig(conf, row, column);
}
}
protected <T> void fillConfig(T conf, Object[] row, int column) {
RpcConfigGetSetProxy proxy = new RpcConfigGetSetProxy(conf);
proxy.setValue((String) row[KEY], row[column]);
}
@SuppressWarnings("deprecation")
protected void initServConf() {
regConfForProvider = new RegistryConfig();
regConfForService = new RegistryConfig();
provConf = new ProviderConfig();
protoConfForProvider = new ProtocolConfig("mockprotocol");
protoConfForService = new ProtocolConfig("mockprotocol");
methodConfForService = new MethodConfig();
servConf = new ServiceConfig<DemoService>();
// provConf.setApplication(appConfForProvider);
application = new ApplicationConfig();
application.setMetadataServicePort(20881);
servConf.setApplication(application);
provConf.setRegistry(regConfForProvider);
servConf.setRegistry(regConfForService);
provConf.setProtocols(new ArrayList<>(Arrays.asList(protoConfForProvider)));
servConf.setProtocols(new ArrayList<>(Arrays.asList(protoConfForService)));
servConf.setMethods(Arrays.asList(new MethodConfig[] {methodConfForService}));
servConf.setProvider(provConf);
servConf.setRef(demoService);
servConf.setInterface(DemoService.class);
methodConfForService.setName("sayName");
regConfForService.setAddress("127.0.0.1:9090");
regConfForService.setProtocol("mockregistry");
application.setName("ConfigTests");
}
protected String getProviderParamString() {
return servConf.getExportedUrls().get(0).toString();
}
/**
* @param paramStringFromDb
* @param dataTable
* @param configName
* @param column
*/
protected void assertUrlStringWithLocalTable(
String paramStringFromDb, Object[][] dataTable, String configName, int column) {
final String FAILLOG_HEADER = "The following config items are not found in URLONE: ";
log.warn("Verifying service url for " + configName + "... ");
log.warn("Consumer url string: " + paramStringFromDb);
String failLog = FAILLOG_HEADER;
for (Object[] row : dataTable) {
String targetString = genParamString(row[URL_KEY], row[column]);
log.warn("Checking " + (String) row[KEY] + "for" + targetString);
if (paramStringFromDb.contains(targetString)) {
log.warn((String) row[KEY] + " --> " + targetString + " OK!");
} else {
failLog += targetString + ", ";
}
}
if (!failLog.equals(FAILLOG_HEADER)) {
fail(failLog);
}
}
}
| UrlTestBase |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/util/ContentCachingResponseWrapper.java | {
"start": 8960,
"end": 9489
} | class ____ extends PrintWriter {
public ResponsePrintWriter(String characterEncoding) throws UnsupportedEncodingException {
super(new OutputStreamWriter(content, characterEncoding));
}
@Override
public void write(char[] buf, int off, int len) {
super.write(buf, off, len);
super.flush();
}
@Override
public void write(String s, int off, int len) {
super.write(s, off, len);
super.flush();
}
@Override
public void write(int c) {
super.write(c);
super.flush();
}
}
}
| ResponsePrintWriter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TypeParameterNamingTest.java | {
"start": 3052,
"end": 3466
} | class ____<BadNameT> {
public <T, FooT> void method(FooT f) {
BadNameT bad = null;
FooT d = f;
}
}
""")
.setFixChooser(FixChoosers.FIRST)
.doTest(TestMode.TEXT_MATCH);
}
@Test
public void refactoring_single() {
refactoring
.addInputLines(
"in/Test.java",
"""
| Test |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/session/MockitoSessionBuilder.java | {
"start": 1883,
"end": 2083
} | class ____ for initialization of fields annotated with Mockito annotations
* like {@link org.mockito.Mock}.
* <p>
* In most scenarios, you only need to init mocks on a single test | instances |
java | apache__spark | common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/CollationFactory.java | {
"start": 11523,
"end": 11676
} | class ____ {
/**
* Bit 30 in collation ID having value 0 for predefined and 1 for user-defined collation.
*/
private | CollationSpec |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/security/HttpAuthorizer.java | {
"start": 759,
"end": 1922
} | class ____ extends AbstractHttpAuthorizer {
HttpAuthorizer(IdentityProviderManager identityProviderManager,
AuthorizationController controller, Instance<HttpSecurityPolicy> installedPolicies,
BlockingSecurityExecutor blockingExecutor, BeanManager beanManager,
Event<AuthorizationFailureEvent> authZFailureEvent, Event<AuthorizationSuccessEvent> authZSuccessEvent,
@ConfigProperty(name = "quarkus.security.events.enabled") boolean securityEventsEnabled) {
super(identityProviderManager, controller, toList(installedPolicies), beanManager, blockingExecutor,
authZFailureEvent, authZSuccessEvent, securityEventsEnabled);
}
private static List<HttpSecurityPolicy> toList(Instance<HttpSecurityPolicy> installedPolicies) {
List<HttpSecurityPolicy> globalPolicies = new ArrayList<>();
for (HttpSecurityPolicy i : installedPolicies) {
if (i.name() == null && !(i instanceof AbstractPathMatchingHttpSecurityPolicy i1 && i1.hasNoPermissions())) {
globalPolicies.add(i);
}
}
return globalPolicies;
}
}
| HttpAuthorizer |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/registration/RetryingRegistrationTest.java | {
"start": 18025,
"end": 18442
} | class ____ extends RegistrationResponse.Success {
private static final long serialVersionUID = 5542698790917150604L;
private final String correlationId;
public TestRegistrationSuccess(String correlationId) {
this.correlationId = correlationId;
}
public String getCorrelationId() {
return correlationId;
}
}
static | TestRegistrationSuccess |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/Plan.java | {
"start": 13408,
"end": 13935
} | class ____ implements Visitor<Operator<?>> {
private final Set<Operator> visitedOperators = new HashSet<>();
private int maxDop = -1;
@Override
public boolean preVisit(Operator<?> visitable) {
if (!visitedOperators.add(visitable)) {
return false;
}
this.maxDop = Math.max(this.maxDop, visitable.getParallelism());
return true;
}
@Override
public void postVisit(Operator<?> visitable) {}
}
}
| MaxDopVisitor |
java | apache__dubbo | dubbo-plugin/dubbo-rest-openapi/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/openapi/model/RequestBody.java | {
"start": 997,
"end": 2901
} | class ____ extends Node<RequestBody> {
private String description;
private Map<String, MediaType> contents;
private boolean required;
public String getDescription() {
return description;
}
public RequestBody setDescription(String description) {
this.description = description;
return this;
}
public Map<String, MediaType> getContents() {
return contents;
}
public MediaType getContent(String content) {
return contents == null ? null : contents.get(content);
}
public MediaType getOrAddContent(String content) {
if (contents == null) {
contents = new LinkedHashMap<>();
}
return contents.computeIfAbsent(content, k -> new MediaType());
}
public RequestBody setContents(Map<String, MediaType> contents) {
this.contents = contents;
return this;
}
public RequestBody addContent(String name, MediaType content) {
if (contents == null) {
contents = new LinkedHashMap<>();
}
contents.put(name, content);
return this;
}
public RequestBody removeContent(String name) {
if (contents != null) {
contents.remove(name);
}
return this;
}
public boolean isRequired() {
return required;
}
public RequestBody setRequired(boolean required) {
this.required = required;
return this;
}
@Override
public RequestBody clone() {
RequestBody clone = super.clone();
clone.contents = clone(contents);
return clone;
}
@Override
public Map<String, Object> writeTo(Map<String, Object> node, Context context) {
write(node, "description", description);
write(node, "required", required);
write(node, "content", contents, context);
return node;
}
}
| RequestBody |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/util/uuid/IdGeneratorCreationContext.java | {
"start": 585,
"end": 2027
} | class ____ implements GeneratorCreationContext {
private final ServiceRegistry serviceRegistry;
private final MetadataImplementor domainModel;
private final RootClass entityMapping;
private final KeyValue identifier;
public IdGeneratorCreationContext(
ServiceRegistry serviceRegistry,
MetadataImplementor domainModel,
RootClass entityMapping) {
this.serviceRegistry = serviceRegistry;
this.domainModel = domainModel;
this.entityMapping = entityMapping;
this.identifier = entityMapping.getIdentifier();
assert entityMapping.getIdentifierProperty() != null;
}
public IdGeneratorCreationContext(MetadataImplementor domainModel, RootClass entityMapping) {
this(
domainModel.getMetadataBuildingOptions().getServiceRegistry(),
domainModel,
entityMapping
);
}
@Override
public RootClass getRootClass() {
return entityMapping;
}
@Override
public Database getDatabase() {
return domainModel.getDatabase();
}
@Override
public ServiceRegistry getServiceRegistry() {
return serviceRegistry;
}
@Override
public String getDefaultCatalog() {
return "";
}
@Override
public String getDefaultSchema() {
return "";
}
@Override
public PersistentClass getPersistentClass() {
return entityMapping;
}
@Override
public Property getProperty() {
return entityMapping.getIdentifierProperty();
}
@Override
public Value getValue() {
return identifier;
}
}
| IdGeneratorCreationContext |
java | google__truth | core/src/main/java/com/google/common/truth/Subject.java | {
"start": 54265,
"end": 54545
} | class ____ off,
* but our users may run their own tests that way.)
*
* <p>Since Truth can normally infer this on its own, this mechanism is not something that would
* normally be useful outside of core Truth. But to support running Truth's own tests run with
* | metadata |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/beans/factory/xml/XmlBeanFactoryTests.java | {
"start": 72536,
"end": 72669
} | class ____ {
/** Init method */
public void init2() throws IOException {
throw new IOException();
}
}
static | BadInitializer |
java | dropwizard__dropwizard | dropwizard-jetty/src/main/java/io/dropwizard/jetty/HttpConnectorFactory.java | {
"start": 21323,
"end": 21555
} | class ____ extends StringMethodDeserializer<HttpCompliance> {
public HttpComplianceDeserializer() {
super(HttpCompliance.class, HttpCompliance::valueOf);
}
}
private static | HttpComplianceDeserializer |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/app/BeanPropertiesDefinition.java | {
"start": 1213,
"end": 1587
} | class ____ {
@XmlElement(name = "property")
private List<BeanPropertyDefinition> properties;
public List<BeanPropertyDefinition> getProperties() {
return properties;
}
/**
* Bean properties
*/
public void setProperties(List<BeanPropertyDefinition> properties) {
this.properties = properties;
}
}
| BeanPropertiesDefinition |
java | quarkusio__quarkus | integration-tests/spring-web/src/main/java/io/quarkus/it/spring/web/UnhandledAnnotatedException.java | {
"start": 122,
"end": 265
} | class ____ extends Exception {
public UnhandledAnnotatedException(String message) {
super(message);
}
}
| UnhandledAnnotatedException |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/response/AzureAiStudioRerankResponseEntityTests.java | {
"start": 799,
"end": 3899
} | class ____ extends ESTestCase {
public void testResponse_WithDocuments() throws IOException {
final String responseJson = getResponseJsonWithDocuments();
final var parsedResults = getParsedResults(responseJson);
final var expectedResults = List.of(
new RankedDocsResults.RankedDoc(0, 0.1111111F, "test text one"),
new RankedDocsResults.RankedDoc(1, 0.2222222F, "test text two")
);
assertThat(parsedResults.getRankedDocs(), is(expectedResults));
}
public void testResponse_NoDocuments() throws IOException {
final String responseJson = getResponseJsonNoDocuments();
final var parsedResults = getParsedResults(responseJson);
final var expectedResults = List.of(
new RankedDocsResults.RankedDoc(0, 0.1111111F, null),
new RankedDocsResults.RankedDoc(1, 0.2222222F, null)
);
assertThat(parsedResults.getRankedDocs(), is(expectedResults));
}
private RankedDocsResults getParsedResults(String responseJson) throws IOException {
final var entity = new AzureAiStudioRerankResponseEntity();
return (RankedDocsResults) entity.apply(
mock(Request.class),
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
}
private String getResponseJsonWithDocuments() {
return """
{
"id": "222e59de-c712-40cb-ae87-ecd402d0d2f1",
"results": [
{
"document": {
"text": "test text one"
},
"index": 0,
"relevance_score": 0.1111111
},
{
"document": {
"text": "test text two"
},
"index": 1,
"relevance_score": 0.2222222
}
],
"meta": {
"api_version": {
"version": "1"
},
"billed_units": {
"search_units": 1
}
}
}
""";
}
private String getResponseJsonNoDocuments() {
return """
{
"id": "222e59de-c712-40cb-ae87-ecd402d0d2f1",
"results": [
{
"index": 0,
"relevance_score": 0.1111111
},
{
"index": 1,
"relevance_score": 0.2222222
}
],
"meta": {
"api_version": {
"version": "1"
},
"billed_units": {
"search_units": 1
}
}
}
""";
}
}
| AzureAiStudioRerankResponseEntityTests |
java | quarkusio__quarkus | extensions/security-webauthn/runtime/src/main/java/io/quarkus/security/webauthn/WebAuthnAuthenticatorStorage.java | {
"start": 631,
"end": 3020
} | class ____ {
@Inject
WebAuthnUserProvider userProvider;
@Inject
Vertx vertx;
public Uni<List<WebAuthnCredentialRecord>> findByUsername(String username) {
return runPotentiallyBlocking(() -> userProvider.findByUsername(username));
}
public Uni<WebAuthnCredentialRecord> findByCredID(String credID) {
return runPotentiallyBlocking(() -> userProvider.findByCredentialId(credID));
}
public Uni<Void> create(WebAuthnCredentialRecord credentialRecord) {
return runPotentiallyBlocking(() -> userProvider.store(credentialRecord));
}
public Uni<Void> update(String credID, long counter) {
return runPotentiallyBlocking(() -> userProvider.update(credID, counter));
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private <T> Uni<T> runPotentiallyBlocking(Supplier<Uni<? extends T>> supplier) {
if (BlockingOperationControl.isBlockingAllowed()
|| isNonBlocking(userProvider.getClass())) {
return (Uni<T>) supplier.get();
}
if (isRunOnVirtualThread(userProvider.getClass())) {
return Uni.createFrom().deferred(supplier).runSubscriptionOn(VirtualThreadsRecorder.getCurrent());
}
// run it in a worker thread
return vertx.executeBlocking(Uni.createFrom().deferred((Supplier) supplier));
}
private boolean isNonBlocking(Class<?> klass) {
do {
if (klass.isAnnotationPresent(NonBlocking.class))
return true;
if (klass.isAnnotationPresent(Blocking.class))
return false;
if (klass.isAnnotationPresent(RunOnVirtualThread.class))
return false;
klass = klass.getSuperclass();
} while (klass != null);
// no information, assumed non-blocking
return true;
}
private boolean isRunOnVirtualThread(Class<?> klass) {
do {
if (klass.isAnnotationPresent(RunOnVirtualThread.class))
return true;
if (klass.isAnnotationPresent(Blocking.class))
return false;
if (klass.isAnnotationPresent(NonBlocking.class))
return false;
klass = klass.getSuperclass();
} while (klass != null);
// no information, assumed non-blocking
return false;
}
}
| WebAuthnAuthenticatorStorage |
java | elastic__elasticsearch | x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java | {
"start": 1099,
"end": 12435
} | class ____ extends AbstractRemoteClusterSecurityTestCase {
protected static final String REMOTE_SEARCH_USER_NO_DLS_FLS = REMOTE_SEARCH_USER + "_no_dls_fls";
protected static final String REMOTE_SEARCH_USER_DLS_FLS = REMOTE_SEARCH_USER + "_dls_fls";
protected static final String REMOTE_SEARCH_USER_DLS = REMOTE_SEARCH_USER + "_dls";
protected static final String REMOTE_SEARCH_USER_FLS = REMOTE_SEARCH_USER + "_fls";
/**
* Creates remote search users where each has access to all remote clusters but with different DLS/FLS restrictions.
*
* @throws IOException in case of an I/O errors
*/
private void createRemoteSearchUsers() throws IOException {
createRemoteSearchUserAndRole(REMOTE_SEARCH_USER_NO_DLS_FLS, REMOTE_SEARCH_USER_NO_DLS_FLS + "_role", """
{
"cluster": ["manage_own_api_key"],
"remote_indices": [
{
"names": ["remote_index*"],
"privileges": ["read", "read_cross_cluster"],
"clusters": ["my_*_cluster*"]
}
]
}""");
createRemoteSearchUserAndRole(REMOTE_SEARCH_USER_DLS_FLS, REMOTE_SEARCH_USER_DLS_FLS + "_role", """
{
"cluster": ["manage_own_api_key"],
"remote_indices": [
{
"names": ["remote_index*"],
"privileges": ["read", "read_cross_cluster"],
"clusters": ["my_*_cluster*"],
"query": {
"bool": {
"should" : [
{ "term" : { "field1" : "value1" } },
{ "term" : { "field2" : "value2" } }
],
"minimum_should_match" : 1
}
},
"field_security": {"grant": [ "field2" ]}
},
{
"names": ["remote_index1", "remote_index2", "remote_index3"],
"privileges": ["read", "read_cross_cluster"],
"clusters": ["my_remote_cluster*"],
"query": {
"bool": {
"should" : [
{ "term" : { "field2" : "value1" } },
{ "term" : { "field1" : "value2" } }
],
"minimum_should_match" : 1
}
},
"field_security": {"grant": [ "field1" ]}
}
]
}""");
createRemoteSearchUserAndRole(REMOTE_SEARCH_USER_DLS, REMOTE_SEARCH_USER_DLS + "_role", """
{
"cluster": ["manage_own_api_key"],
"remote_indices": [
{
"names": ["remote_index*"],
"privileges": ["read", "read_cross_cluster"],
"clusters": ["my_*_cluster*"],
"query": {"bool": { "must_not": { "term" : {"field1" : "value1"}}}}
},
{
"names": ["remote_index*"],
"privileges": ["read", "read_cross_cluster"],
"clusters": ["my_*_cluster*"],
"query": {"bool": { "must_not": { "term" : {"field2" : "value1"}}}}
}
]
}""");
createRemoteSearchUserAndRole(REMOTE_SEARCH_USER_FLS, REMOTE_SEARCH_USER_FLS + "_role", """
{
"cluster": ["manage_own_api_key"],
"remote_indices": [
{
"names": ["remote_index*"],
"privileges": ["read", "read_cross_cluster"],
"clusters": ["my_*_cluster*"],
"field_security": {"grant": [ "field1", "field2" ], "except": ["field2"]}
},
{
"names": ["remote_index*"],
"privileges": ["read", "read_cross_cluster"],
"clusters": ["my_*_cluster*"],
"field_security": {"grant": [ "field3" ]}
}
]
}""");
}
private void createIndicesOnFulfillingCluster() throws IOException {
final Request bulkRequest = new Request("POST", "/_bulk?refresh=true");
bulkRequest.setJsonEntity("""
{ "index": { "_index": "remote_index1" } }
{ "field1": "value1", "field2": "value1", "field3": "value1" }
{ "index": { "_index": "remote_index2" } }
{ "field1": "value2", "field2": "value2", "field3": "value2" }
{ "index": { "_index": "remote_index3" } }
{ "field1": "value3", "field2": "value3", "field3": "value3" }
{ "index": { "_index": "remote_index4" } }
{ "field1": "value4", "field2": "value4", "field3": "value4" }
{ "index": { "_index": "not-shared-index1" } }
{ "name": "foo" }
{ "index": { "_index": "not-shared-index2" } }
{ "name": "bar" }
{ "index": { "_index": "not-shared-index3" } }
{ "name": "baz" }\n""");
assertOK(performRequestAgainstFulfillingCluster(bulkRequest));
}
protected void setupRemoteClusterTestCase(String clusterAlias) throws Exception {
// Configures one of pre-defined remote clusters on querying cluster side where each remote cluster uses
// a different API key that has (or doesn't) various DLS/FLS restrictions.
configureRemoteCluster(clusterAlias);
// Insert some documents on the fulfilling cluster, so we can attempt to search it from the querying cluster.
createIndicesOnFulfillingCluster();
// Create remote search users (on querying cluster) with combinations of DLS/FLS restrictions.
createRemoteSearchUsers();
}
/**
* Asserts that a search result contains expected indices and that for every index we get the same expected fields.
*/
protected void assertSearchResponseContainsExpectedIndicesAndFields(
Response searchResponse,
String[] expectedRemoteIndices,
String[] expectedFields
) {
try {
assertOK(searchResponse);
var response = SearchResponseUtils.responseAsSearchResponse(searchResponse);
try {
final var searchResult = Arrays.stream(response.getHits().getHits())
.collect(Collectors.toMap(SearchHit::getIndex, SearchHit::getSourceAsMap));
assertThat(searchResult.keySet(), containsInAnyOrder(expectedRemoteIndices));
for (String remoteIndex : expectedRemoteIndices) {
assertThat(searchResult.get(remoteIndex).keySet(), containsInAnyOrder(expectedFields));
}
} finally {
response.decRef();
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Asserts that a search result contains expected indices and expected fields per index.
* @param searchResponse to check
* @param expectedRemoteIndicesAndFields map of expected indices and expected fields per single index
*/
protected void assertSearchResponseContainsExpectedIndicesAndFields(
Response searchResponse,
Map<String, Set<String>> expectedRemoteIndicesAndFields
) {
try {
assertOK(searchResponse);
var response = SearchResponseUtils.responseAsSearchResponse(searchResponse);
try {
final var searchResult = Arrays.stream(response.getHits().getHits())
.collect(Collectors.toMap(SearchHit::getIndex, SearchHit::getSourceAsMap));
assertThat(searchResult.keySet(), equalTo(expectedRemoteIndicesAndFields.keySet()));
for (String remoteIndex : expectedRemoteIndicesAndFields.keySet()) {
Set<String> expectedFields = expectedRemoteIndicesAndFields.get(remoteIndex);
assertThat(searchResult.get(remoteIndex).keySet(), equalTo(expectedFields));
}
} finally {
response.decRef();
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
protected void assertSearchResponseContainsEmptyResult(Response response) {
try {
assertOK(response);
SearchResponse searchResponse = SearchResponseUtils.responseAsSearchResponse(response);
try {
assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L));
} finally {
searchResponse.decRef();
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
protected Response performRequestWithUser(final Request request, final String username) throws IOException {
request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", basicAuthHeaderValue(username, PASS)));
return client().performRequest(request);
}
protected Tuple<String, String> createRemoteSearchApiKeyWithUser(String username, String roleDescriptorsJson) throws IOException {
final Request request = new Request("PUT", "/_security/api_key");
request.setJsonEntity(Strings.format("""
{
"name": "%s",
"role_descriptors": %s
}
}""", username + "_" + randomAlphaOfLength(5), roleDescriptorsJson));
final Response response = performRequestWithUser(request, username);
assertOK(response);
ObjectPath path = ObjectPath.createFromResponse(response);
final String apiKeyEncoded = path.evaluate("encoded");
final String apiKeyId = path.evaluate("id");
assertThat(apiKeyEncoded, notNullValue());
assertThat(apiKeyId, notNullValue());
return Tuple.tuple(apiKeyId, apiKeyEncoded);
}
protected static String createCrossClusterAccessApiKey(String accessJson, AtomicReference<Map<String, Object>> apiKeyRef) {
if (apiKeyRef.get() == null) {
apiKeyRef.set(createCrossClusterAccessApiKey(accessJson));
}
return (String) apiKeyRef.get().get("encoded");
}
private void createRemoteSearchUserAndRole(String username, String roleName, String roleJson) throws IOException {
final var putRoleRequest = new Request("PUT", "/_security/role/" + roleName);
putRoleRequest.setJsonEntity(roleJson);
assertOK(adminClient().performRequest(putRoleRequest));
final var putUserRequest = new Request("PUT", "/_security/user/" + username);
putUserRequest.setJsonEntity(Strings.format("""
{
"password": "%s",
"roles" : ["%s"]
}""", PASS, roleName));
assertOK(adminClient().performRequest(putUserRequest));
}
protected Response performRequestWithApiKey(final Request request, final String encoded) throws IOException {
request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + encoded));
return client().performRequest(request);
}
}
| AbstractRemoteClusterSecurityDlsAndFlsRestIT |
java | spring-projects__spring-security | access/src/test/java/org/springframework/security/access/annotation/SecuredAnnotationSecurityMetadataSourceTests.java | {
"start": 6775,
"end": 6940
} | interface ____ extends BusinessService {
@Secured({ "ROLE_USER" })
Department someUserMethod3(Department dept);
}
@SuppressWarnings("serial")
| DepartmentService |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/NullAwareComparator.java | {
"start": 1384,
"end": 1536
} | class ____ to be used within a composite type comparator (such as {@link
* RowComparator}) that handles serialized comparison.
*/
@Internal
public | assumes |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/SimpleJmsRequestReplySharedReplyToTest.java | {
"start": 1385,
"end": 4098
} | class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Test
public void testJmsRequestReplySharedReplyTo() {
assertEquals("Hello A", template.requestBody(
"activemq:queue:queue:SimpleJmsRequestReplySharedReplyToTest.foo?replyTo=queue:SimpleJmsRequestReplySharedReplyToTest.bar&replyToType=Shared&replyToDestinationSelectorName=mySelector&replyToConsumerType=Simple",
"A"));
assertEquals("Hello B", template.requestBody(
"activemq:queue:queue:SimpleJmsRequestReplySharedReplyToTest.foo?replyTo=queue:SimpleJmsRequestReplySharedReplyToTest.bar&replyToType=Shared&replyToDestinationSelectorName=mySelector&replyToConsumerType=Simple",
"B"));
assertEquals("Hello C", template.requestBody(
"activemq:queue:queue:SimpleJmsRequestReplySharedReplyToTest.foo?replyTo=queue:SimpleJmsRequestReplySharedReplyToTest.bar&replyToType=Shared&replyToDestinationSelectorName=mySelector&replyToConsumerType=Simple",
"C"));
assertEquals("Hello D", template.requestBody(
"activemq:queue:queue:SimpleJmsRequestReplySharedReplyToTest.foo?replyTo=queue:SimpleJmsRequestReplySharedReplyToTest.bar&replyToType=Shared&replyToDestinationSelectorName=mySelector&replyToConsumerType=Simple",
"D"));
assertEquals("Hello E", template.requestBody(
"activemq:queue:queue:SimpleJmsRequestReplySharedReplyToTest.foo?replyTo=queue:SimpleJmsRequestReplySharedReplyToTest.bar&replyToType=Shared&replyToDestinationSelectorName=mySelector&replyToConsumerType=Simple",
"E"));
}
@Override
protected String getComponentName() {
return "activemq";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("activemq:queue:queue:SimpleJmsRequestReplySharedReplyToTest.foo")
.transform(body().prepend("Hello "));
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
| SimpleJmsRequestReplySharedReplyToTest |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/RoutePolicySupport.java | {
"start": 1234,
"end": 1319
} | class ____ developing custom {@link RoutePolicy} implementations.
*/
public abstract | for |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/association/OneToOneAssociationTest.java | {
"start": 2008,
"end": 2193
} | class ____ {
@Id
Long id;
@OneToOne
User user;
User getUser() {
return user;
}
void setUser(User newUser) {
user = newUser;
}
}
@Entity
private static | Customer |
java | apache__maven | compat/maven-embedder/src/main/java/org/apache/maven/cli/CLIReportingUtils.java | {
"start": 1127,
"end": 1231
} | class ____ to report errors, statistics, application version info, etc.
*
*/
@Deprecated
public final | used |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/TruthContainsExactlyElementsInUsage.java | {
"start": 1972,
"end": 5121
} | class ____ extends BugChecker
implements MethodInvocationTreeMatcher {
private static final Matcher<ExpressionTree> CONTAINS_EXACTLY_ELEMENTS_IN_METHOD_MATCHER =
instanceMethod()
.onDescendantOf("com.google.common.truth.IterableSubject")
.named("containsExactlyElementsIn");
// Not including Sets for the rare occasion of duplicate element declarations inside the set. If
// refactored to containsExactly, the behavior is different.
private static final Matcher<ExpressionTree> NEW_ITERABLE_MATCHERS =
anyOf(
staticMethod().onClass("com.google.common.collect.Lists").named("newArrayList"),
staticMethod().onClass("com.google.common.collect.ImmutableList").named("of"),
staticMethod().onClass("java.util.Arrays").named("asList"),
staticMethod().onClass("java.util.Collections").named("singletonList"),
staticMethod().onClass("java.util.List").named("of"));
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (!CONTAINS_EXACTLY_ELEMENTS_IN_METHOD_MATCHER.matches(tree, state)) {
return Description.NO_MATCH;
}
// Avoids refactoring variables and method invocations that are not creating new iterables.
// The first param from containsExactlyElementsIn should always be an Iterable.
return getArgumentsFromNewIterableExpression(tree.getArguments().getFirst(), state)
.map(arguments -> describeMatch(tree, refactor(arguments, tree, state)))
.orElse(Description.NO_MATCH);
}
// Returns the arguments from the expression. If it is not a valid expression, returns empty.
private static Optional<ImmutableList<ExpressionTree>> getArgumentsFromNewIterableExpression(
ExpressionTree expression, VisitorState state) {
if (expression instanceof MethodInvocationTree paramMethodInvocationTree
&& NEW_ITERABLE_MATCHERS.matches(expression, state)) {
return Optional.of(ImmutableList.copyOf(paramMethodInvocationTree.getArguments()));
} else if (expression instanceof NewArrayTree newArrayTree) {
return Optional.of(ImmutableList.copyOf(newArrayTree.getInitializers()));
}
return Optional.empty();
}
private static SuggestedFix refactor(
ImmutableList<ExpressionTree> arguments, MethodInvocationTree tree, VisitorState state) {
// First we replace the containsExactlyElementsIn method with containsExactly.
SuggestedFix.Builder fix =
SuggestedFixes.renameMethodInvocation(tree, "containsExactly", state).toBuilder();
// Finally, we use the arguments from the new iterable to build the containsExactly arguments.
ExpressionTree expressionToReplace = tree.getArguments().getFirst();
if (!arguments.isEmpty()) {
fix.replace(getStartPosition(expressionToReplace), getStartPosition(arguments.getFirst()), "")
.replace(
state.getEndPosition(getLast(arguments)),
state.getEndPosition(expressionToReplace),
"");
} else {
fix.delete(expressionToReplace);
}
return fix.build();
}
}
| TruthContainsExactlyElementsInUsage |
java | qos-ch__slf4j | integration/src/test/java/org/slf4j/OldAPIVersionMismatchAssertionTest.java | {
"start": 1401,
"end": 2384
} | class ____ {
StringPrintStream sps = new StringPrintStream(System.err);
PrintStream old = System.err;
int diff = 1024 + new Random().nextInt(10000);
@Before
public void setUp() throws Exception {
System.setErr(sps);
}
@After
public void tearDown() throws Exception {
System.setErr(old);
}
@Test
public void test() throws Exception {
Logger logger = LoggerFactory.getLogger(this.getClass());
String msg = "hello world " + diff;
logger.info(msg);
OutputVerifier.dump(sps);
String s0 = (String) sps.stringList.get(0);
assertTrue(s0.matches("SLF4J: The requested version .* by your slf4j binding is not compatible with.*"));
String s1 = (String) sps.stringList.get(1);
assertTrue(s1.contains(LoggerFactory.VERSION_MISMATCH));
String s2 = (String) sps.stringList.get(2);
assertTrue(s2.contains(msg));
}
}
| OldAPIVersionMismatchAssertionTest |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/util/FieldUtilsTests.java | {
"start": 1497,
"end": 1638
} | class ____ {
private String protectedField = "x";
private Nested nested = new Nested();
}
@SuppressWarnings("unused")
static | TestClass |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java | {
"start": 2500,
"end": 28334
} | class ____ extends DocValuesConsumer {
IndexOutput data, meta;
final int maxDoc;
private byte[] termsDictBuffer;
private final int skipIndexIntervalSize;
ES87TSDBDocValuesConsumer(
SegmentWriteState state,
int skipIndexIntervalSize,
String dataCodec,
String dataExtension,
String metaCodec,
String metaExtension
) throws IOException {
this.termsDictBuffer = new byte[1 << 14];
boolean success = false;
try {
final String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
data = state.directory.createOutput(dataName, state.context);
CodecUtil.writeIndexHeader(
data,
dataCodec,
ES87TSDBDocValuesFormat.VERSION_CURRENT,
state.segmentInfo.getId(),
state.segmentSuffix
);
String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
meta = state.directory.createOutput(metaName, state.context);
CodecUtil.writeIndexHeader(
meta,
metaCodec,
ES87TSDBDocValuesFormat.VERSION_CURRENT,
state.segmentInfo.getId(),
state.segmentSuffix
);
maxDoc = state.segmentInfo.maxDoc();
this.skipIndexIntervalSize = skipIndexIntervalSize;
success = true;
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(this);
}
}
}
@Override
public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException {
meta.writeInt(field.number);
meta.writeByte(ES87TSDBDocValuesFormat.NUMERIC);
DocValuesProducer producer = new EmptyDocValuesProducer() {
@Override
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
return DocValues.singleton(valuesProducer.getNumeric(field));
}
};
if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) {
writeSkipIndex(field, producer);
}
writeField(field, producer, -1);
}
private long[] writeField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException {
int numDocsWithValue = 0;
long numValues = 0;
SortedNumericDocValues values = valuesProducer.getSortedNumeric(field);
for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) {
numDocsWithValue++;
final int count = values.docValueCount();
numValues += count;
}
if (numDocsWithValue == 0) { // meta[-2, 0]: No documents with values
meta.writeLong(-2); // docsWithFieldOffset
meta.writeLong(0L); // docsWithFieldLength
meta.writeShort((short) -1); // jumpTableEntryCount
meta.writeByte((byte) -1); // denseRankPower
} else if (numDocsWithValue == maxDoc) { // meta[-1, 0]: All documents have values
meta.writeLong(-1); // docsWithFieldOffset
meta.writeLong(0L); // docsWithFieldLength
meta.writeShort((short) -1); // jumpTableEntryCount
meta.writeByte((byte) -1); // denseRankPower
} else { // meta[data.offset, data.length]: IndexedDISI structure for documents with values
long offset = data.getFilePointer();
meta.writeLong(offset); // docsWithFieldOffset
values = valuesProducer.getSortedNumeric(field);
final short jumpTableEntryCount = IndexedDISI.writeBitSet(values, data, IndexedDISI.DEFAULT_DENSE_RANK_POWER);
meta.writeLong(data.getFilePointer() - offset); // docsWithFieldLength
meta.writeShort(jumpTableEntryCount);
meta.writeByte(IndexedDISI.DEFAULT_DENSE_RANK_POWER);
}
meta.writeLong(numValues);
if (numValues > 0) {
// Special case for maxOrd of 1, signal -1 that no blocks will be written
meta.writeInt(maxOrd != 1 ? ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT : -1);
final ByteBuffersDataOutput indexOut = new ByteBuffersDataOutput();
final DirectMonotonicWriter indexWriter = DirectMonotonicWriter.getInstance(
meta,
new ByteBuffersIndexOutput(indexOut, "temp-dv-index", "temp-dv-index"),
1L + ((numValues - 1) >>> ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SHIFT),
ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT
);
final long valuesDataOffset = data.getFilePointer();
// Special case for maxOrd of 1, skip writing the blocks
if (maxOrd != 1) {
final long[] buffer = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE];
int bufferSize = 0;
final TSDBDocValuesEncoder encoder = new TSDBDocValuesEncoder(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE);
values = valuesProducer.getSortedNumeric(field);
final int bitsPerOrd = maxOrd >= 0 ? PackedInts.bitsRequired(maxOrd - 1) : -1;
for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) {
final int count = values.docValueCount();
for (int i = 0; i < count; ++i) {
buffer[bufferSize++] = values.nextValue();
if (bufferSize == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE) {
indexWriter.add(data.getFilePointer() - valuesDataOffset);
if (maxOrd >= 0) {
encoder.encodeOrdinals(buffer, data, bitsPerOrd);
} else {
encoder.encode(buffer, data);
}
bufferSize = 0;
}
}
}
if (bufferSize > 0) {
indexWriter.add(data.getFilePointer() - valuesDataOffset);
// Fill unused slots in the block with zeroes rather than junk
Arrays.fill(buffer, bufferSize, ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE, 0L);
if (maxOrd >= 0) {
encoder.encodeOrdinals(buffer, data, bitsPerOrd);
} else {
encoder.encode(buffer, data);
}
}
}
final long valuesDataLength = data.getFilePointer() - valuesDataOffset;
if (maxOrd != 1) {
// Special case for maxOrd of 1, indexWriter isn't really used, so no need to invoke finish() method.
indexWriter.finish();
}
final long indexDataOffset = data.getFilePointer();
data.copyBytes(indexOut.toDataInput(), indexOut.size());
meta.writeLong(indexDataOffset);
meta.writeLong(data.getFilePointer() - indexDataOffset);
meta.writeLong(valuesDataOffset);
meta.writeLong(valuesDataLength);
}
return new long[] { numDocsWithValue, numValues };
}
@Override
public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException {
meta.writeInt(field.number);
meta.writeByte(ES87TSDBDocValuesFormat.BINARY);
BinaryDocValues values = valuesProducer.getBinary(field);
long start = data.getFilePointer();
meta.writeLong(start); // dataOffset
int numDocsWithField = 0;
int minLength = Integer.MAX_VALUE;
int maxLength = 0;
for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) {
numDocsWithField++;
BytesRef v = values.binaryValue();
int length = v.length;
data.writeBytes(v.bytes, v.offset, v.length);
minLength = Math.min(length, minLength);
maxLength = Math.max(length, maxLength);
}
assert numDocsWithField <= maxDoc;
meta.writeLong(data.getFilePointer() - start); // dataLength
if (numDocsWithField == 0) {
meta.writeLong(-2); // docsWithFieldOffset
meta.writeLong(0L); // docsWithFieldLength
meta.writeShort((short) -1); // jumpTableEntryCount
meta.writeByte((byte) -1); // denseRankPower
} else if (numDocsWithField == maxDoc) {
meta.writeLong(-1); // docsWithFieldOffset
meta.writeLong(0L); // docsWithFieldLength
meta.writeShort((short) -1); // jumpTableEntryCount
meta.writeByte((byte) -1); // denseRankPower
} else {
long offset = data.getFilePointer();
meta.writeLong(offset); // docsWithFieldOffset
values = valuesProducer.getBinary(field);
final short jumpTableEntryCount = IndexedDISI.writeBitSet(values, data, IndexedDISI.DEFAULT_DENSE_RANK_POWER);
meta.writeLong(data.getFilePointer() - offset); // docsWithFieldLength
meta.writeShort(jumpTableEntryCount);
meta.writeByte(IndexedDISI.DEFAULT_DENSE_RANK_POWER);
}
meta.writeInt(numDocsWithField);
meta.writeInt(minLength);
meta.writeInt(maxLength);
if (maxLength > minLength) {
start = data.getFilePointer();
meta.writeLong(start);
meta.writeVInt(ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT);
final DirectMonotonicWriter writer = DirectMonotonicWriter.getInstance(
meta,
data,
numDocsWithField + 1,
ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT
);
long addr = 0;
writer.add(addr);
values = valuesProducer.getBinary(field);
for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) {
addr += values.binaryValue().length;
writer.add(addr);
}
writer.finish();
meta.writeLong(data.getFilePointer() - start);
}
}
@Override
public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException {
meta.writeInt(field.number);
meta.writeByte(ES87TSDBDocValuesFormat.SORTED);
doAddSortedField(field, valuesProducer, false);
}
private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer, boolean addTypeByte) throws IOException {
DocValuesProducer producer = new EmptyDocValuesProducer() {
@Override
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
SortedDocValues sorted = valuesProducer.getSorted(field);
NumericDocValues sortedOrds = new NumericDocValues() {
@Override
public long longValue() throws IOException {
return sorted.ordValue();
}
@Override
public boolean advanceExact(int target) throws IOException {
return sorted.advanceExact(target);
}
@Override
public int docID() {
return sorted.docID();
}
@Override
public int nextDoc() throws IOException {
return sorted.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return sorted.advance(target);
}
@Override
public long cost() {
return sorted.cost();
}
};
return DocValues.singleton(sortedOrds);
}
};
if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) {
writeSkipIndex(field, producer);
}
if (addTypeByte) {
meta.writeByte((byte) 0); // multiValued (0 = singleValued)
}
SortedDocValues sorted = valuesProducer.getSorted(field);
int maxOrd = sorted.getValueCount();
writeField(field, producer, maxOrd);
addTermsDict(DocValues.singleton(valuesProducer.getSorted(field)));
}
private void addTermsDict(SortedSetDocValues values) throws IOException {
final long size = values.getValueCount();
meta.writeVLong(size);
int blockMask = ES87TSDBDocValuesFormat.TERMS_DICT_BLOCK_LZ4_MASK;
int shift = ES87TSDBDocValuesFormat.TERMS_DICT_BLOCK_LZ4_SHIFT;
meta.writeInt(DIRECT_MONOTONIC_BLOCK_SHIFT);
ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput();
ByteBuffersIndexOutput addressOutput = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp");
long numBlocks = (size + blockMask) >>> shift;
DirectMonotonicWriter writer = DirectMonotonicWriter.getInstance(meta, addressOutput, numBlocks, DIRECT_MONOTONIC_BLOCK_SHIFT);
BytesRefBuilder previous = new BytesRefBuilder();
long ord = 0;
long start = data.getFilePointer();
int maxLength = 0, maxBlockLength = 0;
TermsEnum iterator = values.termsEnum();
LZ4.FastCompressionHashTable ht = new LZ4.FastCompressionHashTable();
ByteArrayDataOutput bufferedOutput = new ByteArrayDataOutput(termsDictBuffer);
int dictLength = 0;
for (BytesRef term = iterator.next(); term != null; term = iterator.next()) {
if ((ord & blockMask) == 0) {
if (ord != 0) {
// flush the previous block
final int uncompressedLength = compressAndGetTermsDictBlockLength(bufferedOutput, dictLength, ht);
maxBlockLength = Math.max(maxBlockLength, uncompressedLength);
bufferedOutput.reset(termsDictBuffer);
}
writer.add(data.getFilePointer() - start);
// Write the first term both to the index output, and to the buffer where we'll use it as a
// dictionary for compression
data.writeVInt(term.length);
data.writeBytes(term.bytes, term.offset, term.length);
bufferedOutput = maybeGrowBuffer(bufferedOutput, term.length);
bufferedOutput.writeBytes(term.bytes, term.offset, term.length);
dictLength = term.length;
} else {
final int prefixLength = StringHelper.bytesDifference(previous.get(), term);
final int suffixLength = term.length - prefixLength;
assert suffixLength > 0; // terms are unique
// Will write (suffixLength + 1 byte + 2 vint) bytes. Grow the buffer in need.
bufferedOutput = maybeGrowBuffer(bufferedOutput, suffixLength + 11);
bufferedOutput.writeByte((byte) (Math.min(prefixLength, 15) | (Math.min(15, suffixLength - 1) << 4)));
if (prefixLength >= 15) {
bufferedOutput.writeVInt(prefixLength - 15);
}
if (suffixLength >= 16) {
bufferedOutput.writeVInt(suffixLength - 16);
}
bufferedOutput.writeBytes(term.bytes, term.offset + prefixLength, suffixLength);
}
maxLength = Math.max(maxLength, term.length);
previous.copyBytes(term);
++ord;
}
// Compress and write out the last block
if (bufferedOutput.getPosition() > dictLength) {
final int uncompressedLength = compressAndGetTermsDictBlockLength(bufferedOutput, dictLength, ht);
maxBlockLength = Math.max(maxBlockLength, uncompressedLength);
}
writer.finish();
meta.writeInt(maxLength);
// Write one more int for storing max block length.
meta.writeInt(maxBlockLength);
meta.writeLong(start);
meta.writeLong(data.getFilePointer() - start);
start = data.getFilePointer();
addressBuffer.copyTo(data);
meta.writeLong(start);
meta.writeLong(data.getFilePointer() - start);
// Now write the reverse terms index
writeTermsIndex(values);
}
private int compressAndGetTermsDictBlockLength(ByteArrayDataOutput bufferedOutput, int dictLength, LZ4.FastCompressionHashTable ht)
throws IOException {
int uncompressedLength = bufferedOutput.getPosition() - dictLength;
data.writeVInt(uncompressedLength);
LZ4.compressWithDictionary(termsDictBuffer, 0, dictLength, uncompressedLength, data, ht);
return uncompressedLength;
}
private ByteArrayDataOutput maybeGrowBuffer(ByteArrayDataOutput bufferedOutput, int termLength) {
int pos = bufferedOutput.getPosition(), originalLength = termsDictBuffer.length;
if (pos + termLength >= originalLength - 1) {
termsDictBuffer = ArrayUtil.grow(termsDictBuffer, originalLength + termLength);
bufferedOutput = new ByteArrayDataOutput(termsDictBuffer, pos, termsDictBuffer.length - pos);
}
return bufferedOutput;
}
private void writeTermsIndex(SortedSetDocValues values) throws IOException {
final long size = values.getValueCount();
meta.writeInt(ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT);
long start = data.getFilePointer();
long numBlocks = 1L + ((size + ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK)
>>> ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT);
ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput();
DirectMonotonicWriter writer;
try (ByteBuffersIndexOutput addressOutput = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp")) {
writer = DirectMonotonicWriter.getInstance(meta, addressOutput, numBlocks, DIRECT_MONOTONIC_BLOCK_SHIFT);
TermsEnum iterator = values.termsEnum();
BytesRefBuilder previous = new BytesRefBuilder();
long offset = 0;
long ord = 0;
for (BytesRef term = iterator.next(); term != null; term = iterator.next()) {
if ((ord & ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == 0) {
writer.add(offset);
final int sortKeyLength;
if (ord == 0) {
// no previous term: no bytes to write
sortKeyLength = 0;
} else {
sortKeyLength = StringHelper.sortKeyLength(previous.get(), term);
}
offset += sortKeyLength;
data.writeBytes(term.bytes, term.offset, sortKeyLength);
} else if ((ord
& ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) {
previous.copyBytes(term);
}
++ord;
}
writer.add(offset);
writer.finish();
meta.writeLong(start);
meta.writeLong(data.getFilePointer() - start);
start = data.getFilePointer();
addressBuffer.copyTo(data);
meta.writeLong(start);
meta.writeLong(data.getFilePointer() - start);
}
}
@Override
public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException {
meta.writeInt(field.number);
meta.writeByte(ES87TSDBDocValuesFormat.SORTED_NUMERIC);
writeSortedNumericField(field, valuesProducer, -1);
}
private void writeSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException {
if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) {
writeSkipIndex(field, valuesProducer);
}
if (maxOrd > -1) {
meta.writeByte((byte) 1); // multiValued (1 = multiValued)
}
long[] stats = writeField(field, valuesProducer, maxOrd);
int numDocsWithField = Math.toIntExact(stats[0]);
long numValues = stats[1];
assert numValues >= numDocsWithField;
meta.writeInt(numDocsWithField);
if (numValues > numDocsWithField) {
long start = data.getFilePointer();
meta.writeLong(start);
meta.writeVInt(ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT);
final DirectMonotonicWriter addressesWriter = DirectMonotonicWriter.getInstance(
meta,
data,
numDocsWithField + 1L,
ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT
);
long addr = 0;
addressesWriter.add(addr);
SortedNumericDocValues values = valuesProducer.getSortedNumeric(field);
for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) {
addr += values.docValueCount();
addressesWriter.add(addr);
}
addressesWriter.finish();
meta.writeLong(data.getFilePointer() - start);
}
}
private static boolean isSingleValued(SortedSetDocValues values) throws IOException {
if (DocValues.unwrapSingleton(values) != null) {
return true;
}
assert values.docID() == -1;
for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) {
int docValueCount = values.docValueCount();
assert docValueCount > 0;
if (docValueCount > 1) {
return false;
}
}
return true;
}
@Override
public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException {
meta.writeInt(field.number);
meta.writeByte(SORTED_SET);
if (isSingleValued(valuesProducer.getSortedSet(field))) {
doAddSortedField(field, new EmptyDocValuesProducer() {
@Override
public SortedDocValues getSorted(FieldInfo field) throws IOException {
return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN);
}
}, true);
return;
}
SortedSetDocValues values = valuesProducer.getSortedSet(field);
long maxOrd = values.getValueCount();
writeSortedNumericField(field, new EmptyDocValuesProducer() {
@Override
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
SortedSetDocValues values = valuesProducer.getSortedSet(field);
return new SortedNumericDocValues() {
long[] ords = LongsRef.EMPTY_LONGS;
int i, docValueCount;
@Override
public long nextValue() {
return ords[i++];
}
@Override
public int docValueCount() {
return docValueCount;
}
@Override
public boolean advanceExact(int target) {
throw new UnsupportedOperationException();
}
@Override
public int docID() {
return values.docID();
}
@Override
public int nextDoc() throws IOException {
int doc = values.nextDoc();
if (doc != NO_MORE_DOCS) {
docValueCount = values.docValueCount();
ords = ArrayUtil.grow(ords, docValueCount);
for (int j = 0; j < docValueCount; j++) {
ords[j] = values.nextOrd();
}
i = 0;
}
return doc;
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return values.cost();
}
};
}
}, maxOrd);
addTermsDict(valuesProducer.getSortedSet(field));
}
@Override
public void close() throws IOException {
boolean success = false;
try {
if (meta != null) {
meta.writeInt(-1); // write EOF marker
CodecUtil.writeFooter(meta); // write checksum
}
if (data != null) {
CodecUtil.writeFooter(data); // write checksum
}
success = true;
} finally {
if (success) {
IOUtils.close(data, meta);
} else {
IOUtils.closeWhileHandlingException(data, meta);
}
meta = data = null;
}
}
private static | ES87TSDBDocValuesConsumer |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/monitor/fs/FsProbeTests.java | {
"start": 13526,
"end": 14895
} | class ____ extends FileStore {
@Override
public String name() {
return "myHugeFS";
}
@Override
public String type() {
return "bigFS";
}
@Override
public boolean isReadOnly() {
return false;
}
@Override
public long getTotalSpace() throws IOException {
return randomIntBetween(-1000, 1000);
}
@Override
public long getUsableSpace() throws IOException {
return randomIntBetween(-1000, 1000);
}
@Override
public long getUnallocatedSpace() throws IOException {
return randomIntBetween(-1000, 1000);
}
@Override
public boolean supportsFileAttributeView(Class<? extends FileAttributeView> type) {
return false;
}
@Override
public boolean supportsFileAttributeView(String name) {
return false;
}
@Override
public <V extends FileStoreAttributeView> V getFileStoreAttributeView(Class<V> type) {
throw new UnsupportedOperationException("don't call me");
}
@Override
public Object getAttribute(String attribute) throws IOException {
throw new UnsupportedOperationException("don't call me");
}
}
}
| HugeFileStore |
java | apache__camel | components/camel-kafka/src/main/java/org/apache/camel/component/kafka/consumer/DefaultKafkaManualCommit.java | {
"start": 1015,
"end": 2890
} | class ____ implements KafkaManualCommit {
protected final KafkaManualCommitFactory.CamelExchangePayload camelExchangePayload;
protected final KafkaManualCommitFactory.KafkaRecordPayload kafkaRecordPayload;
protected DefaultKafkaManualCommit(KafkaManualCommitFactory.CamelExchangePayload camelExchangePayload,
KafkaManualCommitFactory.KafkaRecordPayload kafkaRecordPayload) {
this.camelExchangePayload = camelExchangePayload;
this.kafkaRecordPayload = kafkaRecordPayload;
}
/**
* @deprecated Use {@link #getCamelExchangePayload()}
*/
@Deprecated(since = "3.15.0")
public Consumer<?, ?> getConsumer() {
return camelExchangePayload.consumer;
}
public String getTopicName() {
return getPartition().topic();
}
public String getThreadId() {
return camelExchangePayload.threadId;
}
@Deprecated
public StateRepository<String, String> getOffsetRepository() {
return camelExchangePayload.offsetRepository;
}
public TopicPartition getPartition() {
return kafkaRecordPayload.partition;
}
public long getRecordOffset() {
return kafkaRecordPayload.recordOffset;
}
public long getCommitTimeout() {
return kafkaRecordPayload.commitTimeout;
}
/**
* Gets the Camel Exchange payload
*/
public KafkaManualCommitFactory.CamelExchangePayload getCamelExchangePayload() {
return camelExchangePayload;
}
/**
* Gets the Kafka record payload
*/
public KafkaManualCommitFactory.KafkaRecordPayload getKafkaRecordPayload() {
return kafkaRecordPayload;
}
@Override
public String toString() {
return "KafkaManualCommit[topic=" + getTopicName() + ", offset=" + getRecordOffset() + "]";
}
}
| DefaultKafkaManualCommit |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/QueryOperationDefaultVisitor.java | {
"start": 2157,
"end": 4069
} | class ____<T> implements QueryOperationVisitor<T> {
@Override
public T visit(ProjectQueryOperation projection) {
return defaultMethod(projection);
}
@Override
public T visit(AggregateQueryOperation aggregation) {
return defaultMethod(aggregation);
}
@Override
public T visit(WindowAggregateQueryOperation windowAggregate) {
return defaultMethod(windowAggregate);
}
@Override
public T visit(JoinQueryOperation join) {
return defaultMethod(join);
}
@Override
public T visit(SetQueryOperation setOperation) {
return defaultMethod(setOperation);
}
@Override
public T visit(FilterQueryOperation filter) {
return defaultMethod(filter);
}
@Override
public T visit(DistinctQueryOperation distinct) {
return defaultMethod(distinct);
}
@Override
public T visit(SortQueryOperation sort) {
return defaultMethod(sort);
}
@Override
public T visit(FunctionQueryOperation function) {
return defaultMethod(function);
}
@Override
public T visit(PartitionQueryOperation partition) {
return defaultMethod(partition);
}
@Override
public T visit(CorrelatedFunctionQueryOperation correlatedFunction) {
return defaultMethod(correlatedFunction);
}
@Override
public T visit(SourceQueryOperation catalogTable) {
return defaultMethod(catalogTable);
}
@Override
public T visit(ValuesQueryOperation values) {
return defaultMethod(values);
}
@Override
public <U> T visit(TableSourceQueryOperation<U> tableSourceTable) {
return defaultMethod(tableSourceTable);
}
@Override
public T visit(QueryOperation other) {
return defaultMethod(other);
}
public abstract T defaultMethod(QueryOperation other);
}
| QueryOperationDefaultVisitor |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/aggregate/AggregationStrategyMethodInfo.java | {
"start": 1092,
"end": 2738
} | class ____ {
private final Method method;
private final List<AggregationStrategyParameterInfo> oldParameters;
private final List<AggregationStrategyParameterInfo> newParameters;
public AggregationStrategyMethodInfo(Method method,
List<AggregationStrategyParameterInfo> oldParameters,
List<AggregationStrategyParameterInfo> newParameters) {
this.method = method;
this.oldParameters = oldParameters;
this.newParameters = newParameters;
}
public Object invoke(Object pojo, Exchange oldExchange, Exchange newExchange) throws Exception {
// evaluate the parameters
List<Object> list = new ArrayList<>(oldParameters.size() + newParameters.size());
for (AggregationStrategyParameterInfo info : oldParameters) {
if (oldExchange != null) {
Object value = info.getExpression().evaluate(oldExchange, info.getType());
list.add(value);
} else {
// use a null value as oldExchange is null
list.add(null);
}
}
for (AggregationStrategyParameterInfo info : newParameters) {
if (newExchange != null) {
Object value = info.getExpression().evaluate(newExchange, info.getType());
list.add(value);
} else {
// use a null value as newExchange is null
list.add(null);
}
}
Object[] args = list.toArray();
return method.invoke(pojo, args);
}
}
| AggregationStrategyMethodInfo |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationsTests.java | {
"start": 53211,
"end": 57595
} | class ____ mixed with @Transactional and @Order declarations
assertThat(getSuperClassSourceWithTypeIn(TransactionalClass.class,
candidates)).isEqualTo(TransactionalClass.class);
assertThat(getSuperClassSourceWithTypeIn(TransactionalAndOrderedClass.class,
candidates)).isEqualTo(TransactionalAndOrderedClass.class);
assertThat(getSuperClassSourceWithTypeIn(SubTransactionalAndOrderedClass.class,
candidates)).isEqualTo(TransactionalAndOrderedClass.class);
}
private Object getSuperClassSourceWithTypeIn(Class<?> clazz, List<Class<? extends Annotation>> annotationTypes) {
return MergedAnnotations.from(clazz, SearchStrategy.SUPERCLASS).stream().filter(
MergedAnnotationPredicates.typeIn(annotationTypes).and(
MergedAnnotation::isDirectlyPresent)).map(
MergedAnnotation::getSource).findFirst().orElse(null);
}
@Test
void isDirectlyPresentForAllScenarios() {
// no class-level annotation
assertThat(MergedAnnotations.from(NonAnnotatedInterface.class).get(
Transactional.class).isDirectlyPresent()).isFalse();
assertThat(MergedAnnotations.from(NonAnnotatedInterface.class).isDirectlyPresent(
Transactional.class)).isFalse();
assertThat(MergedAnnotations.from(NonAnnotatedClass.class).get(
Transactional.class).isDirectlyPresent()).isFalse();
assertThat(MergedAnnotations.from(NonAnnotatedClass.class).isDirectlyPresent(
Transactional.class)).isFalse();
// inherited class-level annotation; note: @Transactional is inherited
assertThat(MergedAnnotations.from(InheritedAnnotationInterface.class).get(
Transactional.class).isDirectlyPresent()).isTrue();
assertThat(MergedAnnotations.from(
InheritedAnnotationInterface.class).isDirectlyPresent(
Transactional.class)).isTrue();
assertThat(MergedAnnotations.from(SubInheritedAnnotationInterface.class).get(
Transactional.class).isDirectlyPresent()).isFalse();
assertThat(MergedAnnotations.from(
SubInheritedAnnotationInterface.class).isDirectlyPresent(
Transactional.class)).isFalse();
assertThat(MergedAnnotations.from(InheritedAnnotationClass.class).get(
Transactional.class).isDirectlyPresent()).isTrue();
assertThat(
MergedAnnotations.from(InheritedAnnotationClass.class).isDirectlyPresent(
Transactional.class)).isTrue();
assertThat(MergedAnnotations.from(SubInheritedAnnotationClass.class).get(
Transactional.class).isDirectlyPresent()).isFalse();
assertThat(MergedAnnotations.from(
SubInheritedAnnotationClass.class).isDirectlyPresent(
Transactional.class)).isFalse();
// non-inherited class-level annotation; note: @Order is not inherited
assertThat(MergedAnnotations.from(NonInheritedAnnotationInterface.class).get(
Order.class).isDirectlyPresent()).isTrue();
assertThat(MergedAnnotations.from(
NonInheritedAnnotationInterface.class).isDirectlyPresent(
Order.class)).isTrue();
assertThat(MergedAnnotations.from(SubNonInheritedAnnotationInterface.class).get(
Order.class).isDirectlyPresent()).isFalse();
assertThat(MergedAnnotations.from(
SubNonInheritedAnnotationInterface.class).isDirectlyPresent(
Order.class)).isFalse();
assertThat(MergedAnnotations.from(NonInheritedAnnotationClass.class).get(
Order.class).isDirectlyPresent()).isTrue();
assertThat(MergedAnnotations.from(
NonInheritedAnnotationClass.class).isDirectlyPresent(
Order.class)).isTrue();
assertThat(MergedAnnotations.from(SubNonInheritedAnnotationClass.class).get(
Order.class).isDirectlyPresent()).isFalse();
assertThat(MergedAnnotations.from(
SubNonInheritedAnnotationClass.class).isDirectlyPresent(
Order.class)).isFalse();
}
@Test
void getAggregateIndexForAllScenarios() {
// no class-level annotation
assertThat(MergedAnnotations.from(NonAnnotatedInterface.class,
SearchStrategy.INHERITED_ANNOTATIONS).get(
Transactional.class).getAggregateIndex()).isEqualTo(-1);
assertThat(MergedAnnotations.from(NonAnnotatedClass.class,
SearchStrategy.INHERITED_ANNOTATIONS).get(
Transactional.class).getAggregateIndex()).isEqualTo(-1);
// inherited class-level annotation; note: @Transactional is inherited
assertThat(MergedAnnotations.from(InheritedAnnotationInterface.class,
SearchStrategy.INHERITED_ANNOTATIONS).get(
Transactional.class).getAggregateIndex()).isEqualTo(0);
// Since we're not traversing | hierarchy |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/AppSchedulingInfo.java | {
"start": 3394,
"end": 3570
} | class ____ track of all the consumption of an application. This also
* keeps track of current running/completed containers for the application.
*/
@Private
@Unstable
public | keeps |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.