language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/lifecycle/internal/DefaultLifecyclePluginAnalyzer.java | {
"start": 9074,
"end": 9199
} | class ____ {
String groupId;
String artifactId;
String version;
String goal;
}
}
| GoalSpec |
java | junit-team__junit5 | junit-platform-launcher/src/main/java/org/junit/platform/launcher/listeners/SummaryGeneratingListener.java | {
"start": 1171,
"end": 4335
} | class ____ implements TestExecutionListener {
private @Nullable TestPlan testPlan;
private @Nullable MutableTestExecutionSummary summary;
public SummaryGeneratingListener() {
}
/**
* Get the summary generated by this listener.
*/
public TestExecutionSummary getSummary() {
return getMutableSummary();
}
private MutableTestExecutionSummary getMutableSummary() {
return Preconditions.notNull(this.summary, "No tests have yet been executed");
}
@Override
public void testPlanExecutionStarted(TestPlan testPlan) {
this.testPlan = testPlan;
this.summary = new MutableTestExecutionSummary(testPlan);
}
@Override
public void testPlanExecutionFinished(TestPlan testPlan) {
var summary = getMutableSummary();
summary.timeFinished = System.currentTimeMillis();
summary.timeFinishedNanos = System.nanoTime();
}
@Override
public void dynamicTestRegistered(TestIdentifier testIdentifier) {
var summary = getMutableSummary();
if (testIdentifier.isContainer()) {
summary.containersFound.incrementAndGet();
}
if (testIdentifier.isTest()) {
summary.testsFound.incrementAndGet();
}
}
@Override
public void executionSkipped(TestIdentifier testIdentifier, String reason) {
var testPlan = requireNonNull(this.testPlan);
// @formatter:off
long skippedContainers = concat(Stream.of(testIdentifier), testPlan.getDescendants(testIdentifier).stream())
.filter(TestIdentifier::isContainer)
.count();
long skippedTests = concat(Stream.of(testIdentifier), testPlan.getDescendants(testIdentifier).stream())
.filter(TestIdentifier::isTest)
.count();
// @formatter:on
var summary = getMutableSummary();
summary.containersSkipped.addAndGet(skippedContainers);
summary.testsSkipped.addAndGet(skippedTests);
}
@Override
public void executionStarted(TestIdentifier testIdentifier) {
var summary = getMutableSummary();
if (testIdentifier.isContainer()) {
summary.containersStarted.incrementAndGet();
}
if (testIdentifier.isTest()) {
summary.testsStarted.incrementAndGet();
}
}
@Override
public void executionFinished(TestIdentifier testIdentifier, TestExecutionResult testExecutionResult) {
var summary = getMutableSummary();
switch (testExecutionResult.getStatus()) {
case SUCCESSFUL -> {
if (testIdentifier.isContainer()) {
summary.containersSucceeded.incrementAndGet();
}
if (testIdentifier.isTest()) {
summary.testsSucceeded.incrementAndGet();
}
}
case ABORTED -> {
if (testIdentifier.isContainer()) {
summary.containersAborted.incrementAndGet();
}
if (testIdentifier.isTest()) {
summary.testsAborted.incrementAndGet();
}
}
case FAILED -> {
if (testIdentifier.isContainer()) {
summary.containersFailed.incrementAndGet();
}
if (testIdentifier.isTest()) {
summary.testsFailed.incrementAndGet();
}
testExecutionResult.getThrowable().ifPresent(
throwable -> summary.addFailure(testIdentifier, throwable));
}
default -> throw new PreconditionViolationException(
"Unsupported execution status:" + testExecutionResult.getStatus());
}
}
}
| SummaryGeneratingListener |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/shard/SparseVectorStatsTests.java | {
"start": 675,
"end": 1253
} | class ____ extends AbstractWireSerializingTestCase<SparseVectorStats> {
@Override
protected Writeable.Reader<SparseVectorStats> instanceReader() {
return SparseVectorStats::new;
}
@Override
protected SparseVectorStats createTestInstance() {
return new SparseVectorStats(randomNonNegativeLong());
}
@Override
protected SparseVectorStats mutateInstance(SparseVectorStats instance) {
return new SparseVectorStats(randomValueOtherThan(instance.getValueCount(), ESTestCase::randomNonNegativeLong));
}
}
| SparseVectorStatsTests |
java | quarkusio__quarkus | integration-tests/main/src/main/java/io/quarkus/it/jaxb/mapper/codegen/feed/Feed.java | {
"start": 24534,
"end": 25435
} | class ____ anonymous complex type.
*
* <p>
* The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <simpleContent>
* <extension base="<http://www.w3.org/2001/XMLSchema>string">
* <attribute name="rel" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="type" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="href" type="{http://www.w3.org/2001/XMLSchema}anyURI" />
* </extension>
* </simpleContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"value"
})
public static | for |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSortedMapWritable.java | {
"start": 1219,
"end": 6732
} | class ____ {
/** the test */
@Test
@SuppressWarnings("unchecked")
public void testSortedMapWritable() {
Text[] keys = {
new Text("key1"),
new Text("key2"),
new Text("key3"),
};
BytesWritable[] values = {
new BytesWritable("value1".getBytes()),
new BytesWritable("value2".getBytes()),
new BytesWritable("value3".getBytes())
};
SortedMapWritable<Text> inMap = new SortedMapWritable<Text>();
for (int i = 0; i < keys.length; i++) {
inMap.put(keys[i], values[i]);
}
assertEquals(0, inMap.firstKey().compareTo(keys[0]));
assertEquals(0, inMap.lastKey().compareTo(keys[2]));
SortedMapWritable<Text> outMap = new SortedMapWritable<Text>(inMap);
assertEquals(inMap.size(), outMap.size());
for (Map.Entry<Text, Writable> e: inMap.entrySet()) {
assertTrue(outMap.containsKey(e.getKey()));
WritableComparable<WritableComparable<?>> aValue = (WritableComparable<WritableComparable<?>>) outMap.get(e.getKey());
WritableComparable<WritableComparable<?>> bValue = (WritableComparable<WritableComparable<?>>) e.getValue();
assertEquals(0, aValue.compareTo(bValue));
}
// Now for something a little harder...
Text[] maps = {
new Text("map1"),
new Text("map2")
};
SortedMapWritable<Text> mapOfMaps = new SortedMapWritable<Text>();
mapOfMaps.put(maps[0], inMap);
mapOfMaps.put(maps[1], outMap);
SortedMapWritable<Text> copyOfMapOfMaps = new SortedMapWritable<Text>(mapOfMaps);
for (int i = 0; i < maps.length; i++) {
assertTrue(copyOfMapOfMaps.containsKey(maps[i]));
SortedMapWritable<Text> a = (SortedMapWritable<Text>) mapOfMaps.get(maps[i]);
SortedMapWritable<Text> b = (SortedMapWritable<Text>) copyOfMapOfMaps.get(maps[i]);
assertEquals(a.size(), b.size());
for (Writable key: a.keySet()) {
assertTrue(b.containsKey(key));
// This will work because we know what we put into each set
WritableComparable<WritableComparable<?>> aValue = (WritableComparable<WritableComparable<?>>) a.get(key);
WritableComparable<WritableComparable<?>> bValue = (WritableComparable<WritableComparable<?>>) b.get(key);
assertEquals(0, aValue.compareTo(bValue));
}
}
}
/**
* Test that number of "unknown" classes is propagated across multiple copies.
*/
@Test
@SuppressWarnings("deprecation")
public void testForeignClass() {
SortedMapWritable<Text> inMap = new SortedMapWritable<Text>();
inMap.put(new Text("key"), new UTF8("value"));
inMap.put(new Text("key2"), new UTF8("value2"));
SortedMapWritable<Text> outMap = new SortedMapWritable<Text>(inMap);
SortedMapWritable<Text> copyOfCopy = new SortedMapWritable<Text>(outMap);
assertEquals(1, copyOfCopy.getNewClasses());
}
/**
* Tests if equal and hashCode method still hold the contract.
*/
@Test
public void testEqualsAndHashCode() {
String failureReason;
SortedMapWritable<Text> mapA = new SortedMapWritable<Text>();
SortedMapWritable<Text> mapB = new SortedMapWritable<Text>();
// Sanity checks
failureReason = "SortedMapWritable couldn't be initialized. Got null reference";
assertNotNull(mapA, failureReason);
assertNotNull(mapB, failureReason);
// Basic null check
assertFalse(mapA.equals(null), "equals method returns true when passed null");
// When entry set is empty, they should be equal
assertTrue(mapA.equals(mapB), "Two empty SortedMapWritables are no longer equal");
// Setup
Text[] keys = {
new Text("key1"),
new Text("key2")
};
BytesWritable[] values = {
new BytesWritable("value1".getBytes()),
new BytesWritable("value2".getBytes())
};
mapA.put(keys[0], values[0]);
mapB.put(keys[1], values[1]);
// entrySets are different
failureReason = "Two SortedMapWritables with different data are now equal";
assertTrue(mapA.hashCode() != mapB.hashCode(), failureReason);
assertTrue(!mapA.equals(mapB), failureReason);
assertTrue(!mapB.equals(mapA), failureReason);
mapA.put(keys[1], values[1]);
mapB.put(keys[0], values[0]);
// entrySets are now same
failureReason = "Two SortedMapWritables with same entry sets formed in different order are now different";
assertEquals(mapA.hashCode(), mapB.hashCode(), failureReason);
assertTrue(mapA.equals(mapB), failureReason);
assertTrue(mapB.equals(mapA), failureReason);
// Let's check if entry sets of same keys but different values
mapA.put(keys[0], values[1]);
mapA.put(keys[1], values[0]);
failureReason = "Two SortedMapWritables with different content are now equal";
assertTrue(mapA.hashCode() != mapB.hashCode(), failureReason);
assertTrue(!mapA.equals(mapB), failureReason);
assertTrue(!mapB.equals(mapA), failureReason);
}
@Test
@Timeout(value = 10)
public void testPutAll() {
SortedMapWritable<Text> map1 = new SortedMapWritable<Text>();
SortedMapWritable<Text> map2 = new SortedMapWritable<Text>();
map1.put(new Text("key"), new Text("value"));
map2.putAll(map1);
assertEquals(map1, map2, "map1 entries don't match map2 entries");
assertTrue(map2.classToIdMap.containsKey(Text.class)
&& map2.idToClassMap.containsValue(Text.class),
"map2 doesn't have | TestSortedMapWritable |
java | qos-ch__slf4j | jcl-over-slf4j/src/main/java/org/apache/commons/logging/impl/NoOpLog.java | {
"start": 1038,
"end": 3082
} | class ____ implements Log, Serializable {
private static final long serialVersionUID = 561423906191706148L;
/** Convenience constructor */
public NoOpLog() {
}
/** Base constructor */
public NoOpLog(String name) {
}
/** Do nothing */
public void trace(Object message) {
}
/** Do nothing */
public void trace(Object message, Throwable t) {
}
/** Do nothing */
public void debug(Object message) {
}
/** Do nothing */
public void debug(Object message, Throwable t) {
}
/** Do nothing */
public void info(Object message) {
}
/** Do nothing */
public void info(Object message, Throwable t) {
}
/** Do nothing */
public void warn(Object message) {
}
/** Do nothing */
public void warn(Object message, Throwable t) {
}
/** Do nothing */
public void error(Object message) {
}
/** Do nothing */
public void error(Object message, Throwable t) {
}
/** Do nothing */
public void fatal(Object message) {
}
/** Do nothing */
public void fatal(Object message, Throwable t) {
}
/**
* Debug is never enabled.
*
* @return false
*/
public final boolean isDebugEnabled() {
return false;
}
/**
* Error is never enabled.
*
* @return false
*/
public final boolean isErrorEnabled() {
return false;
}
/**
* Fatal is never enabled.
*
* @return false
*/
public final boolean isFatalEnabled() {
return false;
}
/**
* Info is never enabled.
*
* @return false
*/
public final boolean isInfoEnabled() {
return false;
}
/**
* Trace is never enabled.
*
* @return false
*/
public final boolean isTraceEnabled() {
return false;
}
/**
* Warn is never enabled.
*
* @return false
*/
public final boolean isWarnEnabled() {
return false;
}
}
| NoOpLog |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/aot/generate/GeneratedFiles.java | {
"start": 8718,
"end": 10196
} | class ____ {
private final boolean exists;
private final Supplier<InputStreamSource> existingContent;
protected FileHandler(boolean exists, Supplier<InputStreamSource> existingContent) {
this.exists = exists;
this.existingContent = existingContent;
}
/**
* Specify whether the file already exists.
* @return {@code true} if the file already exists
*/
public boolean exists() {
return this.exists;
}
/**
* Return an {@link InputStreamSource} for the content of the file or
* {@code null} if the file does not exist.
*/
public @Nullable InputStreamSource getContent() {
return (exists() ? this.existingContent.get() : null);
}
/**
* Create a file with the given {@linkplain InputStreamSource content}.
* @throws IllegalStateException if the file already exists
*/
public void create(InputStreamSource content) {
Assert.notNull(content, "'content' must not be null");
if (exists()) {
throw new IllegalStateException("%s already exists".formatted(this));
}
copy(content, false);
}
/**
* Override the content of the file handled by this instance using the
* given {@linkplain InputStreamSource content}. If the file does not
* exist, it is created.
*/
public void override(InputStreamSource content) {
Assert.notNull(content, "'content' must not be null");
copy(content, true);
}
protected abstract void copy(InputStreamSource content, boolean override);
}
}
| FileHandler |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/result/method/annotation/CookieValueMethodArgumentResolverTests.java | {
"start": 1810,
"end": 5191
} | class ____ {
private CookieValueMethodArgumentResolver resolver;
private BindingContext bindingContext;
private MethodParameter cookieParameter;
private MethodParameter cookieStringParameter;
private MethodParameter stringParameter;
private MethodParameter cookieMonoParameter;
@BeforeEach
void setup() throws Exception {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.refresh();
ReactiveAdapterRegistry adapterRegistry = ReactiveAdapterRegistry.getSharedInstance();
this.resolver = new CookieValueMethodArgumentResolver(context.getBeanFactory(), adapterRegistry);
this.bindingContext = new BindingContext();
Method method = ReflectionUtils.findMethod(getClass(), "params", (Class<?>[]) null);
this.cookieParameter = new SynthesizingMethodParameter(method, 0);
this.cookieStringParameter = new SynthesizingMethodParameter(method, 1);
this.stringParameter = new SynthesizingMethodParameter(method, 2);
this.cookieMonoParameter = new SynthesizingMethodParameter(method, 3);
}
@Test
void supportsParameter() {
assertThat(this.resolver.supportsParameter(this.cookieParameter)).isTrue();
assertThat(this.resolver.supportsParameter(this.cookieStringParameter)).isTrue();
}
@Test
void doesNotSupportParameter() {
assertThat(this.resolver.supportsParameter(this.stringParameter)).isFalse();
assertThatIllegalStateException().isThrownBy(() ->
this.resolver.supportsParameter(this.cookieMonoParameter))
.withMessageStartingWith("CookieValueMethodArgumentResolver does not support reactive type wrapper");
}
@Test
void resolveCookieArgument() {
HttpCookie expected = new HttpCookie("name", "foo");
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("/").cookie(expected));
Mono<Object> mono = this.resolver.resolveArgument(
this.cookieParameter, this.bindingContext, exchange);
assertThat(mono.block()).isEqualTo(expected);
}
@Test
void resolveCookieStringArgument() {
HttpCookie cookie = new HttpCookie("name", "foo");
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("/").cookie(cookie));
Mono<Object> mono = this.resolver.resolveArgument(
this.cookieStringParameter, this.bindingContext, exchange);
assertThat(mono.block()).as("Invalid result").isEqualTo(cookie.getValue());
}
@Test
void resolveCookieDefaultValue() {
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("/"));
Object result = this.resolver.resolveArgument(this.cookieStringParameter, this.bindingContext, exchange).block();
boolean condition = result instanceof String;
assertThat(condition).isTrue();
assertThat(result).isEqualTo("bar");
}
@Test
void notFound() {
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("/"));
Mono<Object> mono = resolver.resolveArgument(this.cookieParameter, this.bindingContext, exchange);
StepVerifier.create(mono)
.expectNextCount(0)
.expectError(ServerWebInputException.class)
.verify();
}
@SuppressWarnings("unused")
public void params(
@CookieValue("name") HttpCookie cookie,
@CookieValue(name = "name", defaultValue = "bar") String cookieString,
String stringParam,
@CookieValue Mono<String> monoCookie) {
}
}
| CookieValueMethodArgumentResolverTests |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeDiskMetrics.java | {
"start": 2371,
"end": 8516
} | class ____ {
public static final Logger LOG = LoggerFactory.getLogger(
DataNodeDiskMetrics.class);
private DataNode dn;
private final long detectionInterval;
private volatile boolean shouldRun;
private OutlierDetector slowDiskDetector;
private Daemon slowDiskDetectionDaemon;
private volatile Map<String, Map<DiskOp, Double>>
diskOutliersStats = Maps.newHashMap();
// Adding for test purpose. When addSlowDiskForTesting() called from test
// code, status should not be overridden by daemon thread.
private boolean overrideStatus = true;
/**
* Minimum number of disks to run outlier detection.
*/
private volatile long minOutlierDetectionDisks;
/**
* Threshold in milliseconds below which a disk is definitely not slow.
*/
private volatile long lowThresholdMs;
/**
* The number of slow disks that needs to be excluded.
*/
private volatile int maxSlowDisksToExclude;
/**
* List of slow disks that need to be excluded.
*/
private List<String> slowDisksToExclude = new ArrayList<>();
public DataNodeDiskMetrics(DataNode dn, long diskOutlierDetectionIntervalMs,
Configuration conf) {
this.dn = dn;
this.detectionInterval = diskOutlierDetectionIntervalMs;
minOutlierDetectionDisks =
conf.getLong(DFSConfigKeys.DFS_DATANODE_MIN_OUTLIER_DETECTION_DISKS_KEY,
DFSConfigKeys.DFS_DATANODE_MIN_OUTLIER_DETECTION_DISKS_DEFAULT);
lowThresholdMs =
conf.getLong(DFSConfigKeys.DFS_DATANODE_SLOWDISK_LOW_THRESHOLD_MS_KEY,
DFSConfigKeys.DFS_DATANODE_SLOWDISK_LOW_THRESHOLD_MS_DEFAULT);
maxSlowDisksToExclude =
conf.getInt(DFSConfigKeys.DFS_DATANODE_MAX_SLOWDISKS_TO_EXCLUDE_KEY,
DFSConfigKeys.DFS_DATANODE_MAX_SLOWDISKS_TO_EXCLUDE_DEFAULT);
slowDiskDetector =
new OutlierDetector(minOutlierDetectionDisks, lowThresholdMs);
shouldRun = true;
startDiskOutlierDetectionThread();
}
private void startDiskOutlierDetectionThread() {
slowDiskDetectionDaemon = new Daemon(new Runnable() {
@Override
public void run() {
while (shouldRun) {
if (dn.getFSDataset() != null) {
Map<String, Double> metadataOpStats = Maps.newHashMap();
Map<String, Double> readIoStats = Maps.newHashMap();
Map<String, Double> writeIoStats = Maps.newHashMap();
FsDatasetSpi.FsVolumeReferences fsVolumeReferences = null;
try {
fsVolumeReferences = dn.getFSDataset().getFsVolumeReferences();
Iterator<FsVolumeSpi> volumeIterator = fsVolumeReferences
.iterator();
while (volumeIterator.hasNext()) {
FsVolumeSpi volume = volumeIterator.next();
DataNodeVolumeMetrics metrics = volume.getMetrics();
String volumeName = volume.getBaseURI().getPath();
metadataOpStats.put(volumeName,
metrics.getMetadataOperationMean());
readIoStats.put(volumeName, metrics.getReadIoMean());
writeIoStats.put(volumeName, metrics.getWriteIoMean());
}
} finally {
if (fsVolumeReferences != null) {
try {
fsVolumeReferences.close();
} catch (IOException e) {
LOG.error("Error in releasing FS Volume references", e);
}
}
}
if (metadataOpStats.isEmpty() && readIoStats.isEmpty()
&& writeIoStats.isEmpty()) {
LOG.debug("No disk stats available for detecting outliers.");
continue;
}
detectAndUpdateDiskOutliers(metadataOpStats, readIoStats,
writeIoStats);
// Sort the slow disks by latency and extract the top n by maxSlowDisksToExclude.
if (maxSlowDisksToExclude > 0) {
ArrayList<DiskLatency> diskLatencies = new ArrayList<>();
for (Map.Entry<String, Map<DiskOp, Double>> diskStats :
diskOutliersStats.entrySet()) {
diskLatencies.add(new DiskLatency(diskStats.getKey(), diskStats.getValue()));
}
Collections.sort(diskLatencies, (o1, o2)
-> Double.compare(o2.getMaxLatency(), o1.getMaxLatency()));
slowDisksToExclude = diskLatencies.stream().limit(maxSlowDisksToExclude)
.map(DiskLatency::getSlowDisk).collect(Collectors.toList());
}
}
try {
Thread.sleep(detectionInterval);
} catch (InterruptedException e) {
LOG.error("Disk Outlier Detection thread interrupted", e);
Thread.currentThread().interrupt();
}
}
}
});
slowDiskDetectionDaemon.start();
}
private void detectAndUpdateDiskOutliers(Map<String, Double> metadataOpStats,
Map<String, Double> readIoStats, Map<String, Double> writeIoStats) {
Map<String, Map<DiskOp, Double>> diskStats = Maps.newHashMap();
// Get MetadataOp Outliers
Map<String, Double> metadataOpOutliers = slowDiskDetector
.getOutliers(metadataOpStats);
for (Map.Entry<String, Double> entry : metadataOpOutliers.entrySet()) {
addDiskStat(diskStats, entry.getKey(), DiskOp.METADATA, entry.getValue());
}
// Get ReadIo Outliers
Map<String, Double> readIoOutliers = slowDiskDetector
.getOutliers(readIoStats);
for (Map.Entry<String, Double> entry : readIoOutliers.entrySet()) {
addDiskStat(diskStats, entry.getKey(), DiskOp.READ, entry.getValue());
}
// Get WriteIo Outliers
Map<String, Double> writeIoOutliers = slowDiskDetector
.getOutliers(writeIoStats);
for (Map.Entry<String, Double> entry : writeIoOutliers.entrySet()) {
addDiskStat(diskStats, entry.getKey(), DiskOp.WRITE, entry.getValue());
}
if (overrideStatus) {
diskOutliersStats = diskStats;
LOG.debug("Updated disk outliers.");
}
}
/**
* This structure is a wrapper over disk latencies.
*/
public static | DataNodeDiskMetrics |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/header/writers/CrossOriginOpenerPolicyHeaderWriter.java | {
"start": 1981,
"end": 2535
} | enum ____ {
UNSAFE_NONE("unsafe-none"),
SAME_ORIGIN_ALLOW_POPUPS("same-origin-allow-popups"),
SAME_ORIGIN("same-origin");
private final String policy;
CrossOriginOpenerPolicy(String policy) {
this.policy = policy;
}
public String getPolicy() {
return this.policy;
}
public static @Nullable CrossOriginOpenerPolicy from(String openerPolicy) {
for (CrossOriginOpenerPolicy policy : values()) {
if (policy.getPolicy().equals(openerPolicy)) {
return policy;
}
}
return null;
}
}
}
| CrossOriginOpenerPolicy |
java | spring-projects__spring-framework | spring-aop/src/test/java/org/springframework/aop/scope/DefaultScopedObjectTests.java | {
"start": 997,
"end": 1861
} | class ____ {
private static final String GOOD_BEAN_NAME = "foo";
@Test
void testCtorWithNullBeanFactory() {
assertThatIllegalArgumentException().isThrownBy(() ->
new DefaultScopedObject(null, GOOD_BEAN_NAME));
}
@Test
void testCtorWithNullTargetBeanName() {
assertThatIllegalArgumentException().isThrownBy(() ->
testBadTargetBeanName(null));
}
@Test
void testCtorWithEmptyTargetBeanName() {
assertThatIllegalArgumentException().isThrownBy(() ->
testBadTargetBeanName(""));
}
@Test
void testCtorWithJustWhitespacedTargetBeanName() {
assertThatIllegalArgumentException().isThrownBy(() ->
testBadTargetBeanName(" "));
}
private static void testBadTargetBeanName(final String badTargetBeanName) {
ConfigurableBeanFactory factory = mock();
new DefaultScopedObject(factory, badTargetBeanName);
}
}
| DefaultScopedObjectTests |
java | apache__camel | components/camel-aws/camel-aws2-eks/src/test/java/org/apache/camel/component/aws2/eks/EKS2ProducerSpringTest.java | {
"start": 1684,
"end": 5773
} | class ____ extends CamelSpringTestSupport {
@EndpointInject("mock:result")
private MockEndpoint mock;
@Test
public void eksListClustersTest() throws Exception {
mock.expectedMessageCount(1);
Exchange exchange = template.request("direct:listClusters", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(EKS2Constants.OPERATION, EKS2Operations.listClusters);
}
});
MockEndpoint.assertIsSatisfied(context);
ListClustersResponse resultGet = (ListClustersResponse) exchange.getIn().getBody();
assertEquals(1, resultGet.clusters().size());
assertEquals("Test", resultGet.clusters().get(0));
}
@Test
public void eksListClustersPojoTest() throws Exception {
mock.expectedMessageCount(1);
Exchange exchange = template.request("direct:listPojoClusters", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(EKS2Constants.OPERATION, EKS2Operations.listClusters);
exchange.getIn().setBody(ListClustersRequest.builder().maxResults(12).build());
}
});
MockEndpoint.assertIsSatisfied(context);
ListClustersResponse resultGet = (ListClustersResponse) exchange.getIn().getBody();
assertEquals(1, resultGet.clusters().size());
assertEquals("Test", resultGet.clusters().get(0));
}
@Test
public void eksCreateClusterTest() throws Exception {
mock.expectedMessageCount(1);
Exchange exchange = template.request("direct:createCluster", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(EKS2Constants.OPERATION, EKS2Operations.createCluster);
exchange.getIn().setHeader(EKS2Constants.CLUSTER_NAME, "Test");
VpcConfigRequest req = VpcConfigRequest.builder().build();
exchange.getIn().setHeader(EKS2Constants.VPC_CONFIG, req);
exchange.getIn().setHeader(EKS2Constants.ROLE_ARN, "arn:aws:eks::123456789012:user/Camel");
}
});
MockEndpoint.assertIsSatisfied(context);
CreateClusterResponse resultGet = (CreateClusterResponse) exchange.getIn().getBody();
assertEquals("Test", resultGet.cluster().name());
}
@Test
public void eksDescribeClusterTest() throws Exception {
mock.expectedMessageCount(1);
Exchange exchange = template.request("direct:describeCluster", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(EKS2Constants.OPERATION, EKS2Operations.describeCluster);
exchange.getIn().setHeader(EKS2Constants.CLUSTER_NAME, "Test");
}
});
MockEndpoint.assertIsSatisfied(context);
DescribeClusterResponse resultGet = exchange.getIn().getBody(DescribeClusterResponse.class);
assertEquals("Test", resultGet.cluster().name());
}
@Test
public void eksDeleteClusterTest() throws Exception {
mock.expectedMessageCount(1);
Exchange exchange = template.request("direct:deleteCluster", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(EKS2Constants.OPERATION, EKS2Operations.deleteCluster);
exchange.getIn().setHeader(EKS2Constants.CLUSTER_NAME, "Test");
}
});
MockEndpoint.assertIsSatisfied(context);
DeleteClusterResponse resultGet = exchange.getIn().getBody(DeleteClusterResponse.class);
assertEquals("Test", resultGet.cluster().name());
}
@Override
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/component/aws2/eks/EKSComponentSpringTest-context.xml");
}
}
| EKS2ProducerSpringTest |
java | redisson__redisson | redisson-tomcat/redisson-tomcat-11/src/main/java/org/redisson/tomcat/RedissonSingleSignOn.java | {
"start": 1261,
"end": 6639
} | class ____ extends SingleSignOn {
private static final StringManager sm = StringManager.getManager(RedissonSingleSignOn.class);
private static final String SSO_SESSION_ENTRIES = "redisson:tomcat_sso";
private RedissonSessionManager manager;
void setSessionManager(RedissonSessionManager manager) {
if (containerLog != null && containerLog.isTraceEnabled()) {
containerLog.trace(sm.getString("redissonSingleSignOn.trace.setSessionManager", manager));
}
this.manager = manager;
}
@Override
public void invoke(Request request, Response response) throws IOException, ServletException {
if (containerLog.isTraceEnabled()) {
containerLog.trace(sm.getString("redissonSingleSignOn.trace.invoke"));
}
String ssoSessionId = getSsoSessionId(request);
syncAndGetSsoEntry(ssoSessionId);
super.invoke(request, response);
}
@Override
public void sessionDestroyed(String ssoId, Session session) {
if (containerLog.isTraceEnabled()) {
containerLog.trace(sm.getString("redissonSingleSignOn.trace.sessionDestroyed"));
}
super.sessionDestroyed(ssoId, session);
manager.getMap(SSO_SESSION_ENTRIES).fastRemove(ssoId);
}
@Override
protected boolean associate(String ssoId, Session session) {
if (containerLog.isTraceEnabled()) {
containerLog.trace(sm.getString("redissonSingleSignOn.trace.associate", ssoId, session));
}
syncAndGetSsoEntry(ssoId);
boolean associated = super.associate(ssoId, session);
if (associated) {
manager.getMap(SSO_SESSION_ENTRIES).fastPut(ssoId, cache.get(ssoId));
}
return associated;
}
@Override
protected boolean reauthenticate(String ssoId, Realm realm, Request request) {
if (containerLog.isTraceEnabled()) {
containerLog.trace(sm.getString("redissonSingleSignOn.trace.reauthenticate"));
}
syncAndGetSsoEntry(ssoId);
return super.reauthenticate(ssoId, realm, request);
}
@Override
protected void register(String ssoId, Principal principal, String authType, String username, String password) {
if (containerLog.isTraceEnabled()) {
containerLog.trace(sm.getString("redissonSingleSignOn.trace.register"));
}
super.register(ssoId, principal, authType, username, password);
manager.getMap(SSO_SESSION_ENTRIES).fastPut(ssoId, cache.get(ssoId));
}
@Override
protected void deregister(String ssoId) {
if (containerLog.isTraceEnabled()) {
containerLog.trace(sm.getString("redissonSingleSignOn.trace.deregister"));
}
super.deregister(ssoId);
manager.getMap(SSO_SESSION_ENTRIES).fastRemove(ssoId);
}
@Override
protected boolean update(String ssoId, Principal principal, String authType, String username, String password) {
if (containerLog.isTraceEnabled()) {
containerLog.trace(sm.getString("redissonSingleSignOn.trace.update"));
}
syncAndGetSsoEntry(ssoId);
boolean updated = super.update(ssoId, principal, authType, username, password);
if (updated) {
manager.getMap(SSO_SESSION_ENTRIES).fastPut(ssoId, cache.get(ssoId));
}
return updated;
}
@Override
protected void removeSession(String ssoId, Session session) {
if (containerLog.isTraceEnabled()) {
containerLog.trace(sm.getString("redissonSingleSignOn.trace.removeSession", session, ssoId));
}
SingleSignOnEntry sso = syncAndGetSsoEntry(ssoId);
super.removeSession(ssoId, session);
if (sso != null && sso.findSessions().isEmpty()) {
deregister(ssoId);
}
}
/**
* Lookup {@code SingleSignOnEntry} for the given SSO ID and make sure local cache has the same value.
* That applies also to non existence.
*
* @param ssoSessionId SSO session id we are looking for
* @return matching {@code SingleSignOnEntry} instance or null when not found
*/
private SingleSignOnEntry syncAndGetSsoEntry(String ssoSessionId) {
if (containerLog.isTraceEnabled()) {
containerLog.trace(sm.getString("redissonSingleSignOn.trace.getSsoEntry", ssoSessionId));
}
if (ssoSessionId == null) {
return null;
}
SingleSignOnEntry entry = (SingleSignOnEntry) manager.getMap(SSO_SESSION_ENTRIES).get(ssoSessionId);
if (entry == null) {
this.cache.remove(ssoSessionId);
} else {
this.cache.put(ssoSessionId, entry);
}
return entry;
}
/**
* Retrieve SSO session ID from provided cookies in the request.
*
* @param request The request that has been sent to the server.
* @return SSO session ID provided with the request or null when none provided
*/
private String getSsoSessionId(Request request) {
if (containerLog.isTraceEnabled()) {
containerLog.trace(sm.getString("redissonSingleSignOn.trace.getSsoSessionId", request.getRequestURI()));
}
Cookie cookie = null;
Cookie[] cookies = request.getCookies();
if (cookies != null) {
for (Cookie value : cookies) {
if (getCookieName().equals(value.getName())) {
cookie = value;
break;
}
}
}
if (cookie != null) {
return cookie.getValue();
}
return null;
}
}
| RedissonSingleSignOn |
java | micronaut-projects__micronaut-core | http-client-tck/src/main/java/io/micronaut/http/client/tck/tests/filter/ClientResponseFilterTest.java | {
"start": 2041,
"end": 10690
} | class ____ {
public static final String SPEC_NAME = "ClientResponseFilterTest";
@Test
public void responseFilterImmediateRequestParameter() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/immediate-request-parameter"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("foo")
.build());
Assertions.assertEquals(
List.of("responseFilterImmediateRequestParameter /response-filter/immediate-request-parameter"),
server.getApplicationContext().getBean(MyClientFilter.class).events
);
})
.run();
}
@Test
public void responseFilterImmediateMutableRequestParameter() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/immediate-mutable-request-parameter"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("foo")
.build());
Assertions.assertEquals(
List.of("responseFilterImmediateMutableRequestParameter /response-filter/immediate-mutable-request-parameter"),
server.getApplicationContext().getBean(MyClientFilter.class).events
);
})
.run();
}
@Test
public void responseFilterResponseParameter() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/response-parameter"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("foo")
.build());
Assertions.assertEquals(
List.of("responseFilterResponseParameter foo"),
server.getApplicationContext().getBean(MyClientFilter.class).events
);
})
.run();
}
@Test
@Disabled // mutable response parameter is not currently supported by the client
public void responseFilterMutableResponseParameter() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/mutable-response-parameter"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("responseFilterMutableResponseParameter foo")
.build());
})
.run();
}
@Test
public void responseFilterThrowableParameter() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/throwable-parameter"))
.assertion((server, request) -> {
AssertionUtils.assertThrows(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.INTERNAL_SERVER_ERROR)
.build());
Assertions.assertEquals(
// don't care about the order
Set.of(
"responseFilterThrowableParameter Internal Server Error",
"responseFilterThrowableParameter HCRE Internal Server Error",
"responseFilterThrowableParameter NAE null"
),
new HashSet<>(server.getApplicationContext().getBean(MyClientFilter.class).events)
);
})
.run();
}
@Test
public void responseFilterReplaceResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/replace-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("responseFilterReplaceResponse foo")
.build());
})
.run();
}
@Test
public void responseFilterReplaceMutableResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/replace-mutable-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("responseFilterReplaceMutableResponse foo")
.build());
})
.run();
}
@Test
public void responseFilterReplaceResponseNull() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/replace-response-null"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("foo")
.build());
})
.run();
}
@Test
public void responseFilterReplaceResponseEmpty() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/replace-response-empty"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("foo")
.build());
})
.run();
}
@Test
public void responseFilterReplacePublisherResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/replace-publisher-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("responseFilterReplacePublisherResponse foo")
.build());
})
.run();
}
@Test
public void responseFilterReplaceMonoResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/replace-mono-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("responseFilterReplaceMonoResponse foo")
.build());
})
.run();
}
@Test
public void responseFilterReplaceCompletableResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/replace-completable-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("responseFilterReplaceCompletableResponse foo")
.build());
})
.run();
}
@Test
public void responseFilterReplaceCompletionResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/response-filter/replace-completion-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("responseFilterReplaceCompletionResponse foo")
.build());
})
.run();
}
@ClientFilter
@Singleton
@Requires(property = "spec.name", value = SPEC_NAME)
public static | ClientResponseFilterTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractParentQueue.java | {
"start": 4871,
"end": 12413
} | class ____ extends AbstractCSQueue {
private static final Logger LOG =
LoggerFactory.getLogger(AbstractParentQueue.class);
protected final List<CSQueue> childQueues;
private final boolean rootQueue;
private AtomicInteger numApplications = new AtomicInteger(0);
private final RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(null);
private QueueOrderingPolicy queueOrderingPolicy;
private long lastSkipQueueDebugLoggingTimestamp = -1;
private int runnableApps;
private final boolean allowZeroCapacitySum;
private AutoCreatedQueueTemplate autoCreatedQueueTemplate;
// A ratio of the queue's effective minimum resource and the summary of the configured
// minimum resource of its children grouped by labels and calculated for each resource names
// distinctively.
private final Map<String, Map<String, Float>> effectiveMinResourceRatio =
new ConcurrentHashMap<>();
public AbstractParentQueue(CapacitySchedulerQueueContext queueContext,
String queueName, CSQueue parent, CSQueue old)
throws IOException {
this(queueContext, queueName, parent, old, false);
}
public AbstractParentQueue(CapacitySchedulerQueueContext queueContext,
String queueName, CSQueue parent, CSQueue old, boolean isDynamic) throws
IOException {
super(queueContext, queueName, parent, old);
setDynamicQueue(isDynamic);
this.rootQueue = (parent == null);
float rawCapacity = queueContext.getConfiguration()
.getNonLabeledQueueCapacity(this.queuePath);
if (rootQueue &&
(rawCapacity != CapacitySchedulerConfiguration.MAXIMUM_CAPACITY_VALUE)) {
throw new IllegalArgumentException("Illegal " +
"capacity of " + rawCapacity + " for queue " + queueName +
". Must be " + CapacitySchedulerConfiguration.MAXIMUM_CAPACITY_VALUE);
}
this.childQueues = new ArrayList<>();
this.allowZeroCapacitySum =
queueContext.getConfiguration()
.getAllowZeroCapacitySum(getQueuePathObject());
}
// returns what is configured queue ordering policy
private String getQueueOrderingPolicyConfigName() {
return queueOrderingPolicy == null ?
null :
queueOrderingPolicy.getConfigName();
}
protected void setupQueueConfigs(Resource clusterResource)
throws IOException {
writeLock.lock();
try {
CapacitySchedulerConfiguration configuration = queueContext.getConfiguration();
autoCreatedQueueTemplate = new AutoCreatedQueueTemplate(
configuration, this.queuePath);
super.setupQueueConfigs(clusterResource);
StringBuilder aclsString = new StringBuilder();
for (Map.Entry<AccessType, AccessControlList> e : getACLs().entrySet()) {
aclsString.append(e.getKey()).append(":")
.append(e.getValue().getAclString());
}
StringBuilder labelStrBuilder = new StringBuilder();
if (getAccessibleNodeLabels() != null) {
for (String nodeLabel : getAccessibleNodeLabels()) {
labelStrBuilder.append(nodeLabel).append(",");
}
}
// Initialize queue ordering policy
queueOrderingPolicy = configuration.getQueueOrderingPolicy(
getQueuePathObject(), parent == null ?
null :
((AbstractParentQueue) parent).getQueueOrderingPolicyConfigName());
queueOrderingPolicy.setQueues(childQueues);
LOG.info(getQueueName() + ", " + getCapacityOrWeightString()
+ ", absoluteCapacity=" + getAbsoluteCapacity()
+ ", maxCapacity=" + getMaximumCapacity()
+ ", absoluteMaxCapacity=" + getAbsoluteMaximumCapacity()
+ ", state=" + getState() + ", acls="
+ aclsString + ", labels=" + labelStrBuilder + "\n"
+ ", reservationsContinueLooking=" + isReservationsContinueLooking()
+ ", orderingPolicy=" + getQueueOrderingPolicyConfigName()
+ ", priority=" + getPriority()
+ ", allowZeroCapacitySum=" + allowZeroCapacitySum);
} finally {
writeLock.unlock();
}
}
@Override
protected void setDynamicQueueACLProperties() {
super.setDynamicQueueACLProperties();
if (parent instanceof AbstractParentQueue) {
acls.putAll(getACLsForFlexibleAutoCreatedParentQueue(
((AbstractParentQueue) parent).getAutoCreatedQueueTemplate()));
}
}
private static float PRECISION = 0.0005f; // 0.05% precision
// Check weight configuration, throw exception when configuration is invalid
// return true when all children use weight mode.
public QueueCapacityType getCapacityConfigurationTypeForQueues(
Collection<CSQueue> queues) throws IOException {
// Do we have ANY queue set capacity in any labels?
boolean percentageIsSet = false;
// Do we have ANY queue set weight in any labels?
boolean weightIsSet = false;
// Do we have ANY queue set absolute in any labels?
boolean absoluteMinResSet = false;
StringBuilder diagMsg = new StringBuilder();
for (CSQueue queue : queues) {
for (String nodeLabel : queueCapacities.getExistingNodeLabels()) {
float capacityByLabel = queue.getQueueCapacities().getCapacity(nodeLabel);
if (capacityByLabel > 0) {
percentageIsSet = true;
}
float weightByLabel = queue.getQueueCapacities().getWeight(nodeLabel);
// By default weight is set to -1, so >= 0 is enough.
if (weightByLabel >= 0) {
weightIsSet = true;
diagMsg.append(
"{Queue=" + queue.getQueuePath() + ", label=" + nodeLabel
+ " uses weight mode}. ");
}
if (checkConfigTypeIsAbsoluteResource(queue.getQueuePathObject(), nodeLabel)) {
absoluteMinResSet = true;
// There's a special handling: when absolute resource is configured,
// capacity will be calculated (and set) for UI/metrics purposes, so
// when asboluteMinResource is set, unset percentage
percentageIsSet = false;
diagMsg.append(
"{Queue=" + queue.getQueuePath() + ", label=" + nodeLabel
+ " uses absolute mode}. ");
}
if (percentageIsSet) {
diagMsg.append(
"{Queue=" + queue.getQueuePath() + ", label=" + nodeLabel
+ " uses percentage mode}. ");
}
}
}
// If we have mixed capacity, weight or absolute resource (any of the two)
// We will throw exception
// Root queue is an exception here, because by default root queue returns
// 100 as capacity no matter what. We should look into this case in the
// future. To avoid impact too many code paths, we don;t check root queue's
// config.
if (queues.iterator().hasNext() &&
!queues.iterator().next().getQueuePath().equals(
CapacitySchedulerConfiguration.ROOT) &&
(percentageIsSet ? 1 : 0) + (weightIsSet ? 1 : 0) + (absoluteMinResSet ?
1 :
0) > 1) {
throw new IOException("Parent queue '" + getQueuePath()
+ "' have children queue used mixed of "
+ " weight mode, percentage and absolute mode, it is not allowed, please "
+ "double check, details:" + diagMsg.toString());
}
if (weightIsSet || queues.isEmpty()) {
return QueueCapacityType.WEIGHT;
} else if (absoluteMinResSet) {
return QueueCapacityType.ABSOLUTE_RESOURCE;
} else {
return QueueCapacityType.PERCENT;
}
}
public | AbstractParentQueue |
java | apache__hadoop | hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java | {
"start": 2546,
"end": 3042
} | class ____ implements Writable {
private static final byte NO_ACL_ENTRIES = -1;
private static final int NO_XATTRS = -1;
// FileStatus fields
private Path path;
private long length;
private boolean isdir;
private short blockReplication;
private long blocksize;
private long modificationTime;
private long accessTime;
private FsPermission permission;
private String owner;
private String group;
private String ecPolicy;
// Retain static arrays of | CopyListingFileStatus |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractGetFileStatusTest.java | {
"start": 25216,
"end": 25623
} | class ____ implements PathFilter {
private final String match;
MatchesNameFilter(String match) {
this.match = match;
}
@Override
public boolean accept(Path path) {
return match.equals(path.getName());
}
}
/**
* A filesystem filter which exposes the protected method
* {@link #listLocatedStatus(Path, PathFilter)}.
*/
protected static final | MatchesNameFilter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/TreatListJoinTest.java | {
"start": 4094,
"end": 4232
} | class ____ extends AbstractEntity {
public String valueB;
public EntityB() {
super.entityType = getClass().getName();
}
}
}
| EntityB |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/SpringBootContextLoaderTests.java | {
"start": 14004,
"end": 14148
} | class ____ {
}
@SpringBootTest(properties = { "key=my:Value", "anotherKey:another=Value" }, classes = Config.class)
static | SameSeparatorInValue |
java | grpc__grpc-java | xds/src/main/java/io/grpc/xds/WeightedRoundRobinLoadBalancer.java | {
"start": 15580,
"end": 16836
} | class ____ implements Runnable {
@Override
public void run() {
if (currentPicker != null && currentPicker instanceof WeightedRoundRobinPicker) {
updateWeight((WeightedRoundRobinPicker) currentPicker);
}
weightUpdateTimer = syncContext.schedule(this, config.weightUpdatePeriodNanos,
TimeUnit.NANOSECONDS, timeService);
}
}
private void createAndApplyOrcaListeners() {
for (ChildLbState child : getChildLbStates()) {
WeightedChildLbState wChild = (WeightedChildLbState) child;
for (WrrSubchannel weightedSubchannel : wChild.subchannels) {
if (config.enableOobLoadReport) {
OrcaOobUtil.setListener(weightedSubchannel,
wChild.getOrCreateOrcaListener(config.errorUtilizationPenalty),
OrcaOobUtil.OrcaReportingConfig.newBuilder()
.setReportInterval(config.oobReportingPeriodNanos, TimeUnit.NANOSECONDS)
.build());
} else {
OrcaOobUtil.setListener(weightedSubchannel, null, null);
}
}
}
}
@Override
public void shutdown() {
if (weightUpdateTimer != null) {
weightUpdateTimer.cancel();
}
super.shutdown();
}
@VisibleForTesting
final | UpdateWeightTask |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/replaces/JdbcBookService.java | {
"start": 985,
"end": 1690
} | class ____ implements BookService {
DataSource dataSource;
public JdbcBookService(DataSource dataSource) {
this.dataSource = dataSource;
}
// end::replaces[]
@Override
public Book findBook(String title) {
try(Connection connection = dataSource.getConnection()) {
PreparedStatement ps = connection.prepareStatement("select * from books where title = ?");
ps.setString(1, title);
ResultSet rs = ps.executeQuery();
if(rs.next()) {
return new Book(rs.getString("title"));
}
}
catch (SQLException ex) {
return null;
}
return null;
}
}
| JdbcBookService |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/ClassWriter.java | {
"start": 36879,
"end": 37067
} | class ____ build. Does nothing if the constant pool
* already contains a similar item. <i>This method is intended for {@link Attribute} sub classes,
* and is normally not needed by | being |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/inference/InferenceServiceResults.java | {
"start": 3130,
"end": 3194
} | interface ____ extends NamedWriteable, ChunkedToXContent {}
}
| Result |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/schemaStat/SchemaStatTest8.java | {
"start": 416,
"end": 1473
} | class ____ extends TestCase {
public void test_schemaStat() throws Exception {
String sql = "SELECT * FROM AQLQCAT " +
"JOIN AQLQCATB " +
" ON AQLQCAT.ZZ = AQLQCATB.RR " +
" AND trim(AQLQCAT.BB) = AQLQCATB.DD " +
"WHERE AQLQCAT.MANDT = 'A0' " +
" AND AQLQCATB.NUM = 'A1'";
DbType dbType = JdbcConstants.ORACLE;
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
SQLStatement stmt = parser.parseStatementList().get(0);
SchemaStatVisitor statVisitor = SQLUtils.createSchemaStatVisitor(dbType);
stmt.accept(statVisitor);
// System.out.println(statVisitor.getColumns());
// System.out.println(statVisitor.getGroupByColumns()); // group by
System.out.println("relationships : " + statVisitor.getRelationships()); // group by
assertEquals(8, statVisitor.getColumns().size());
assertEquals(6, statVisitor.getConditions().size());
}
}
| SchemaStatTest8 |
java | google__guice | core/test/com/google/inject/ScopesTest.java | {
"start": 16949,
"end": 16998
} | interface ____<T> {}
@Singleton
public static | In |
java | spring-projects__spring-boot | module/spring-boot-data-cassandra/src/test/java/org/springframework/boot/data/cassandra/autoconfigure/DataCassandraReactiveAutoConfigurationTests.java | {
"start": 1737,
"end": 3662
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withPropertyValues("spring.cassandra.keyspaceName=boot_test")
.withUserConfiguration(CassandraMockConfiguration.class)
.withConfiguration(AutoConfigurations.of(CassandraAutoConfiguration.class, DataCassandraAutoConfiguration.class,
DataCassandraReactiveAutoConfiguration.class));
@Test
void reactiveCqlTemplateExists() {
this.contextRunner.run((context) -> assertThat(context).hasSingleBean(ReactiveCqlTemplate.class));
}
@Test
void templateExists() {
this.contextRunner.run((context) -> assertThat(context).hasSingleBean(ReactiveCassandraTemplate.class));
}
@Test
void templateUsesReactiveCqlTemplate() {
this.contextRunner.run((context) -> {
assertThat(context).hasSingleBean(ReactiveCassandraTemplate.class);
assertThat(context.getBean(ReactiveCassandraTemplate.class).getReactiveCqlOperations())
.isSameAs(context.getBean(ReactiveCqlTemplate.class));
});
}
@Test
void entityScanShouldSetManagedTypes() {
this.contextRunner.withUserConfiguration(EntityScanConfig.class).run((context) -> {
assertThat(context).hasSingleBean(CassandraMappingContext.class);
CassandraMappingContext mappingContext = context.getBean(CassandraMappingContext.class);
assertThat(mappingContext.getManagedTypes()).singleElement()
.satisfies((typeInformation) -> assertThat(typeInformation.getType()).isEqualTo(City.class));
});
}
@Test
void userTypeResolverShouldBeSet() {
this.contextRunner.run((context) -> {
assertThat(context).hasSingleBean(CassandraConverter.class);
assertThat(context.getBean(CassandraConverter.class)).extracting("userTypeResolver")
.isInstanceOf(SimpleUserTypeResolver.class);
});
}
@Configuration(proxyBeanMethods = false)
@EntityScan("org.springframework.boot.data.cassandra.domain.city")
static | DataCassandraReactiveAutoConfigurationTests |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/ParameterMapper.java | {
"start": 1487,
"end": 1894
} | class ____ such proprietary details. However,
* it is best to avoid using such proprietary RDBMS features if possible.
* @return a Map of input parameters, keyed by name (never {@code null})
* @throws SQLException if an SQLException is encountered setting
* parameter values (that is, there's no need to catch SQLException)
*/
Map<String, ?> createMap(Connection con) throws SQLException;
}
| conceals |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/aot/nativex/RuntimeHintsWriterTests.java | {
"start": 12494,
"end": 13849
} | class ____ {
@Test
void shouldWriteEmptyHint() throws JSONException {
RuntimeHints hints = new RuntimeHints();
assertEquals("{}", hints);
}
@Test
void shouldWriteSingleHint() throws JSONException {
RuntimeHints hints = new RuntimeHints();
hints.serialization().registerType(TypeReference.of(String.class));
assertEquals("""
{
"serialization": [
{ "type": "java.lang.String" }
]
}
""", hints);
}
@Test
void shouldWriteMultipleHints() throws JSONException {
RuntimeHints hints = new RuntimeHints();
hints.serialization()
.registerType(TypeReference.of(Environment.class))
.registerType(TypeReference.of(String.class));
assertEquals("""
{
"serialization": [
{ "type": "java.lang.String" },
{ "type": "org.springframework.core.env.Environment" }
]
}
""", hints);
}
@Test
void shouldWriteSingleHintWithCondition() throws JSONException {
RuntimeHints hints = new RuntimeHints();
hints.serialization().registerType(TypeReference.of(String.class),
builder -> builder.onReachableType(TypeReference.of("org.example.Test")));
assertEquals("""
{
"serialization": [
{ "condition": { "typeReached": "org.example.Test" }, "type": "java.lang.String" }
]
}
""", hints);
}
}
@Nested
| SerializationHintsTests |
java | spring-projects__spring-boot | module/spring-boot-micrometer-observation/src/main/java/org/springframework/boot/micrometer/observation/autoconfigure/ObservationRegistryConfigurer.java | {
"start": 1580,
"end": 4253
} | class ____ {
private final ObjectProvider<ObservationRegistryCustomizer<?>> customizers;
private final ObjectProvider<ObservationPredicate> observationPredicates;
private final ObjectProvider<GlobalObservationConvention<?>> observationConventions;
private final ObjectProvider<ObservationHandler<?>> observationHandlers;
private final ObjectProvider<ObservationHandlerGroup> observationHandlerGroups;
private final ObjectProvider<ObservationFilter> observationFilters;
ObservationRegistryConfigurer(ObjectProvider<ObservationRegistryCustomizer<?>> customizers,
ObjectProvider<ObservationPredicate> observationPredicates,
ObjectProvider<GlobalObservationConvention<?>> observationConventions,
ObjectProvider<ObservationHandler<?>> observationHandlers,
ObjectProvider<ObservationHandlerGroup> observationHandlerGroups,
ObjectProvider<ObservationFilter> observationFilters) {
this.customizers = customizers;
this.observationPredicates = observationPredicates;
this.observationConventions = observationConventions;
this.observationHandlers = observationHandlers;
this.observationHandlerGroups = observationHandlerGroups;
this.observationFilters = observationFilters;
}
void configure(ObservationRegistry registry) {
registerObservationPredicates(registry);
registerGlobalObservationConventions(registry);
registerHandlers(registry);
registerFilters(registry);
customize(registry);
}
private void registerHandlers(ObservationRegistry registry) {
ObservationHandlerGroups groups = new ObservationHandlerGroups(this.observationHandlerGroups.stream().toList());
List<ObservationHandler<?>> orderedHandlers = this.observationHandlers.orderedStream().toList();
groups.register(registry.observationConfig(), orderedHandlers);
}
private void registerObservationPredicates(ObservationRegistry registry) {
this.observationPredicates.orderedStream().forEach(registry.observationConfig()::observationPredicate);
}
private void registerGlobalObservationConventions(ObservationRegistry registry) {
this.observationConventions.orderedStream().forEach(registry.observationConfig()::observationConvention);
}
private void registerFilters(ObservationRegistry registry) {
this.observationFilters.orderedStream().forEach(registry.observationConfig()::observationFilter);
}
@SuppressWarnings("unchecked")
private void customize(ObservationRegistry registry) {
LambdaSafe.callbacks(ObservationRegistryCustomizer.class, this.customizers.orderedStream().toList(), registry)
.withLogger(ObservationRegistryConfigurer.class)
.invoke((customizer) -> customizer.customize(registry));
}
}
| ObservationRegistryConfigurer |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/cache/RequestCache.java | {
"start": 1590,
"end": 3591
} | interface ____ {
/**
* Executes and optionally caches a request using the provided supplier function. If caching is enabled
* for this session, the result will be cached and subsequent identical requests will return the cached
* value without re-executing the supplier.
* <p>
* The caching behavior is determined by the cache retention specified in the request's metadata.
* If an error occurs during execution, it will be cached and re-thrown for subsequent identical requests.
*
* @param <REQ> The request type
* @param <REP> The response type
* @param req The request object used as the cache key
* @param supplier The function to execute and cache the result
* @return The result from the supplier (either fresh or cached)
* @throws RuntimeException Any exception thrown by the supplier will be cached and re-thrown on subsequent calls
*/
<REQ extends Request<?>, REP extends Result<REQ>> REP request(REQ req, Function<REQ, REP> supplier);
/**
* Executes and optionally caches a batch of requests using the provided supplier function.
* This method allows for efficient batch processing of multiple requests.
* <p>
* The implementation may optimize the execution by:
* <ul>
* <li>Returning cached results for previously executed requests</li>
* <li>Grouping similar requests for batch processing</li>
* <li>Processing requests in parallel where appropriate</li>
* </ul>
*
* @param <REQ> The request type
* @param <REP> The response type
* @param req List of requests to process
* @param supplier Function to execute the batch of requests
* @return List of results corresponding to the input requests
* @throws BatchRequestException if any request in the batch fails
*/
<REQ extends Request<?>, REP extends Result<REQ>> List<REP> requests(
List<REQ> req, Function<List<REQ>, List<REP>> supplier);
}
| RequestCache |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/KotlinRequestBodyAdvice.java | {
"start": 1642,
"end": 2784
} | class ____ extends RequestBodyAdviceAdapter {
@Override
public boolean supports(MethodParameter methodParameter, Type targetType,
Class<? extends HttpMessageConverter<?>> converterType) {
return AbstractKotlinSerializationHttpMessageConverter.class.isAssignableFrom(converterType);
}
@Override
public @Nullable Map<String, Object> determineReadHints(MethodParameter parameter, Type targetType,
Class<? extends SmartHttpMessageConverter<?>> converterType) {
KFunction<?> function = ReflectJvmMapping.getKotlinFunction(Objects.requireNonNull(parameter.getMethod()));
int i = 0;
int index = parameter.getParameterIndex();
for (KParameter p : Objects.requireNonNull(function).getParameters()) {
if (KParameter.Kind.VALUE.equals(p.getKind())) {
if (index == i++) {
if (HttpEntity.class.isAssignableFrom(parameter.getParameterType())) {
return Collections.singletonMap(KType.class.getName(),
Objects.requireNonNull(p.getType().getArguments().get(0).getType()));
}
return Collections.singletonMap(KType.class.getName(), p.getType());
}
}
}
return null;
}
}
| KotlinRequestBodyAdvice |
java | apache__maven | compat/maven-model-builder/src/main/java/org/apache/maven/model/plugin/ReportingConverter.java | {
"start": 1338,
"end": 1922
} | interface ____ {
/**
* Converts values from model's reporting section into the configuration for Maven Site Plugin 3.x.
*
* @param model The model whose reporting section should be converted, must not be <code>null</code>.
* @param request The model building request that holds further settings, must not be {@code null}.
* @param problems The container used to collect problems that were encountered, must not be {@code null}.
*/
void convertReporting(Model model, ModelBuildingRequest request, ModelProblemCollector problems);
}
| ReportingConverter |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataWriter.java | {
"start": 1319,
"end": 4652
} | class ____ extends DataOutputStream {
private final ByteBuffer buffer;
private final NativeDataTarget target;
private final static byte TRUE = (byte) 1;
private final static byte FALSE = (byte) 0;
private final java.io.DataOutputStream javaWriter;
private void checkSizeAndFlushIfNecessary(int length) throws IOException {
if (buffer.position() > 0 && buffer.remaining() < length) {
flush();
}
}
public ByteBufferDataWriter(NativeDataTarget handler) {
Preconditions.checkNotNull(handler);
this.buffer = handler.getOutputBuffer().getByteBuffer();
this.target = handler;
this.javaWriter = new java.io.DataOutputStream(this);
}
@Override
public synchronized void write(int v) throws IOException {
checkSizeAndFlushIfNecessary(1);
buffer.put((byte) v);
}
@Override
public boolean shortOfSpace(int dataLength) throws IOException {
if (buffer.remaining() < dataLength) {
return true;
}
return false;
}
@Override
public synchronized void write(byte b[], int off, int len) throws IOException {
int remain = len;
int offset = off;
while (remain > 0) {
int currentFlush = 0;
if (buffer.remaining() > 0) {
currentFlush = Math.min(buffer.remaining(), remain);
buffer.put(b, offset, currentFlush);
remain -= currentFlush;
offset += currentFlush;
} else {
flush();
}
}
}
@Override
public void flush() throws IOException {
target.sendData();
buffer.position(0);
}
@Override
public void close() throws IOException {
if (hasUnFlushedData()) {
flush();
}
target.finishSendData();
}
@Override
public final void writeBoolean(boolean v) throws IOException {
checkSizeAndFlushIfNecessary(1);
buffer.put(v ? TRUE : FALSE);
}
@Override
public final void writeByte(int v) throws IOException {
checkSizeAndFlushIfNecessary(1);
buffer.put((byte) v);
}
@Override
public final void writeShort(int v) throws IOException {
checkSizeAndFlushIfNecessary(2);
buffer.putShort((short) v);
}
@Override
public final void writeChar(int v) throws IOException {
checkSizeAndFlushIfNecessary(2);
buffer.put((byte) ((v >>> 8) & 0xFF));
buffer.put((byte) ((v >>> 0) & 0xFF));
}
@Override
public final void writeInt(int v) throws IOException {
checkSizeAndFlushIfNecessary(4);
buffer.putInt(v);
}
@Override
public final void writeLong(long v) throws IOException {
checkSizeAndFlushIfNecessary(8);
buffer.putLong(v);
}
@Override
public final void writeFloat(float v) throws IOException {
checkSizeAndFlushIfNecessary(4);
writeInt(Float.floatToIntBits(v));
}
@Override
public final void writeDouble(double v) throws IOException {
checkSizeAndFlushIfNecessary(8);
writeLong(Double.doubleToLongBits(v));
}
@Override
public final void writeBytes(String s) throws IOException {
javaWriter.writeBytes(s);
}
@Override
public final void writeChars(String s) throws IOException {
javaWriter.writeChars(s);
}
@Override
public final void writeUTF(String str) throws IOException {
javaWriter.writeUTF(str);
}
@Override
public boolean hasUnFlushedData() {
return buffer.position() > 0;
}
}
| ByteBufferDataWriter |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/multipart/MultipartOutputFileResponse.java | {
"start": 219,
"end": 369
} | class ____ {
@RestForm
String name;
@RestForm
@PartType(MediaType.APPLICATION_OCTET_STREAM)
File file;
}
| MultipartOutputFileResponse |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java | {
"start": 1275,
"end": 3243
} | class ____ extends ESTestCase {
public void testToXContentSerialiationWithSortedFields() throws Exception {
ShardRouting shardRouting = TestShardRouting.newShardRouting("foo", 0, "node_id", true, ShardRoutingState.STARTED);
Segment segment = new Segment("my");
SortField sortField = new SortField("foo", SortField.Type.STRING);
sortField.setMissingValue(SortField.STRING_LAST);
segment.segmentSort = new Sort(sortField);
ShardSegments shardSegments = new ShardSegments(shardRouting, Collections.singletonList(segment));
IndicesSegmentResponse response = new IndicesSegmentResponse(
new ShardSegments[] { shardSegments },
1,
1,
0,
Collections.emptyList()
);
try (XContentBuilder builder = jsonBuilder()) {
ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, EMPTY_PARAMS);
}
}
public void testChunking() {
final int indices = randomIntBetween(1, 10);
final List<ShardRouting> routings = new ArrayList<>(indices);
for (int i = 0; i < indices; i++) {
routings.add(TestShardRouting.newShardRouting("index-" + i, 0, "node_id", true, ShardRoutingState.STARTED));
}
Segment segment = new Segment("my");
SortField sortField = new SortField("foo", SortField.Type.STRING);
sortField.setMissingValue(SortField.STRING_LAST);
segment.segmentSort = new Sort(sortField);
AbstractChunkedSerializingTestCase.assertChunkCount(
new IndicesSegmentResponse(
routings.stream().map(routing -> new ShardSegments(routing, List.of(segment))).toArray(ShardSegments[]::new),
indices,
indices,
0,
Collections.emptyList()
),
response -> 11 * response.getIndices().size() + 4
);
}
}
| IndicesSegmentResponseTests |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/PatternMatchUtils.java | {
"start": 980,
"end": 5044
} | class ____ {
/**
* Match a String against the given pattern, supporting direct equality as
* well as the following simple pattern styles: {@code xxx*}, {@code *xxx},
* {@code *xxx*}, and {@code xxx*yyy} (with an arbitrary number of pattern parts).
* <p>Returns {@code false} if the supplied String or pattern is {@code null}.
* @param pattern the pattern to match against
* @param str the String to match
* @return whether the String matches the given pattern
*/
@Contract("null, _ -> false; _, null -> false")
public static boolean simpleMatch(@Nullable String pattern, @Nullable String str) {
return simpleMatch(pattern, str, false);
}
/**
* Variant of {@link #simpleMatch(String, String)} that ignores upper/lower case.
* @since 6.1.20
*/
@Contract("null, _ -> false; _, null -> false")
public static boolean simpleMatchIgnoreCase(@Nullable String pattern, @Nullable String str) {
return simpleMatch(pattern, str, true);
}
private static boolean simpleMatch(@Nullable String pattern, @Nullable String str, boolean ignoreCase) {
if (pattern == null || str == null) {
return false;
}
int firstIndex = pattern.indexOf('*');
if (firstIndex == -1) {
return (ignoreCase ? pattern.equalsIgnoreCase(str) : pattern.equals(str));
}
if (firstIndex == 0) {
if (pattern.length() == 1) {
return true;
}
int nextIndex = pattern.indexOf('*', 1);
if (nextIndex == -1) {
String part = pattern.substring(1);
return (ignoreCase ? StringUtils.endsWithIgnoreCase(str, part) : str.endsWith(part));
}
String part = pattern.substring(1, nextIndex);
if (part.isEmpty()) {
return simpleMatch(pattern.substring(nextIndex), str, ignoreCase);
}
int partIndex = indexOf(str, part, 0, ignoreCase);
while (partIndex != -1) {
if (simpleMatch(pattern.substring(nextIndex), str.substring(partIndex + part.length()), ignoreCase)) {
return true;
}
partIndex = indexOf(str, part, partIndex + 1, ignoreCase);
}
return false;
}
return (str.length() >= firstIndex &&
checkStartsWith(pattern, str, firstIndex, ignoreCase) &&
simpleMatch(pattern.substring(firstIndex), str.substring(firstIndex), ignoreCase));
}
private static boolean checkStartsWith(String pattern, String str, int index, boolean ignoreCase) {
String part = str.substring(0, index);
return (ignoreCase ? StringUtils.startsWithIgnoreCase(pattern, part) : pattern.startsWith(part));
}
private static int indexOf(String str, String otherStr, int startIndex, boolean ignoreCase) {
if (!ignoreCase) {
return str.indexOf(otherStr, startIndex);
}
for (int i = startIndex; i <= (str.length() - otherStr.length()); i++) {
if (str.regionMatches(true, i, otherStr, 0, otherStr.length())) {
return i;
}
}
return -1;
}
/**
* Match a String against the given patterns, supporting direct equality as
* well as the following simple pattern styles: {@code xxx*}, {@code *xxx},
* {@code *xxx*}, and {@code xxx*yyy} (with an arbitrary number of pattern parts).
* <p>Returns {@code false} if the supplied String is {@code null} or if the
* supplied patterns array is {@code null} or empty.
* @param patterns the patterns to match against
* @param str the String to match
* @return whether the String matches any of the given patterns
*/
@Contract("null, _ -> false; _, null -> false")
public static boolean simpleMatch(String @Nullable [] patterns, @Nullable String str) {
if (patterns != null) {
for (String pattern : patterns) {
if (simpleMatch(pattern, str)) {
return true;
}
}
}
return false;
}
/**
* Variant of {@link #simpleMatch(String[], String)} that ignores upper/lower case.
* @since 6.1.20
*/
@Contract("null, _ -> false; _, null -> false")
public static boolean simpleMatchIgnoreCase(String @Nullable [] patterns, @Nullable String str) {
if (patterns != null) {
for (String pattern : patterns) {
if (simpleMatch(pattern, str, true)) {
return true;
}
}
}
return false;
}
}
| PatternMatchUtils |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/session/SqlSessionFactory.java | {
"start": 827,
"end": 1359
} | interface ____ {
SqlSession openSession();
SqlSession openSession(boolean autoCommit);
SqlSession openSession(Connection connection);
SqlSession openSession(TransactionIsolationLevel level);
SqlSession openSession(ExecutorType execType);
SqlSession openSession(ExecutorType execType, boolean autoCommit);
SqlSession openSession(ExecutorType execType, TransactionIsolationLevel level);
SqlSession openSession(ExecutorType execType, Connection connection);
Configuration getConfiguration();
}
| SqlSessionFactory |
java | google__dagger | javatests/artifacts/dagger/build-tests/src/test/java/buildtests/TransitiveSubcomponentTest.java | {
"start": 2990,
"end": 3793
} | interface ____ {",
" MySubcomponent1 subcomponent1();",
"}");
GradleModule.create(projectDir, "library1")
.addBuildFile(
"plugins {",
" id 'org.jetbrains.kotlin.jvm' version \"$kotlin_version\"",
" id 'org.jetbrains.kotlin.kapt' version \"$kotlin_version\"",
"}",
"dependencies {",
" implementation \"com.google.dagger:dagger:$dagger_version\"",
" annotationProcessor \"com.google.dagger:dagger-compiler:$dagger_version\"",
"}")
.addSrcFile(
"MyModule.kt",
"package library1",
"",
"import dagger.Module",
"import dagger.Provides",
"",
"@Module",
"public | MyComponent |
java | quarkusio__quarkus | integration-tests/vertx/src/main/java/io/quarkus/it/vertx/verticles/BareVerticle.java | {
"start": 117,
"end": 434
} | class ____ extends AbstractVerticle {
@Override
public void start(Promise<Void> done) {
String address = config().getString("id");
vertx.eventBus().consumer(address)
.handler(message -> message.reply("OK-" + address))
.completionHandler(done);
}
}
| BareVerticle |
java | apache__camel | dsl/camel-xml-io-dsl/src/test/java/org/apache/camel/dsl/xml/io/XmlLoadRestTest.java | {
"start": 1306,
"end": 3210
} | class ____ {
@Test
public void testLoadRoutesBuilderFromXml() throws Exception {
try (DefaultCamelContext context = new DefaultCamelContext()) {
context.getCamelContextExtension().getRegistry().bind("dummy-rest", new DummyRestConsumerFactory());
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
restConfiguration()
.host("localhost")
.component("dummy-rest");
from("direct:foo")
.routeId("foo")
.to("mock:foo");
}
});
context.start();
assertNotNull(context.getRoute("foo"), "Existing foo route should be there");
assertEquals(1, context.getRoutes().size());
// test that existing route works
MockEndpoint foo = context.getEndpoint("mock:foo", MockEndpoint.class);
foo.expectedBodiesReceived("Hello World");
context.createProducerTemplate().sendBody("direct:foo", "Hello World");
foo.assertIsSatisfied();
// load rest from XML and add them to the existing camel context
Resource resource = PluginHelper.getResourceLoader(context).resolveResource(
"/org/apache/camel/dsl/xml/io/barRest.xml");
PluginHelper.getRoutesLoader(context).loadRoutes(resource);
assertEquals(2, context.getRoutes().size());
// test that loaded route works
MockEndpoint bar = context.getEndpoint("mock:bar", MockEndpoint.class);
bar.expectedBodiesReceived("Bye World");
context.createProducerTemplate().sendBody("seda:get-say-hello-bar", "Bye World");
bar.assertIsSatisfied();
}
}
}
| XmlLoadRestTest |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParserTests.java | {
"start": 3770,
"end": 33697
} | class ____ extends ESTestCase {
private HttpClient httpClient;
private Map<String, EmailAttachmentParser<? extends EmailAttachment>> attachmentParsers = new HashMap<>();
private EmailAttachmentsParser emailAttachmentsParser;
private ReportingAttachmentParser reportingAttachmentParser;
private MockTextTemplateEngine templateEngine = new MockTextTemplateEngine();
private String dashboardUrl = "http://www.example.org/ovb/api/reporting/generate/dashboard/My-Dashboard";
private ClusterSettings clusterSettings;
@Before
public void init() throws Exception {
httpClient = mock(HttpClient.class);
clusterSettings = mockClusterService().getClusterSettings();
WebhookService webhookService = new WebhookService(Settings.EMPTY, httpClient, clusterSettings);
reportingAttachmentParser = new ReportingAttachmentParser(Settings.EMPTY, webhookService, templateEngine, clusterSettings);
attachmentParsers.put(ReportingAttachmentParser.TYPE, reportingAttachmentParser);
emailAttachmentsParser = new EmailAttachmentsParser(attachmentParsers);
}
public void testSerializationWorks() throws Exception {
String id = "some-id";
XContentBuilder builder = jsonBuilder().startObject()
.startObject(id)
.startObject(ReportingAttachmentParser.TYPE)
.field("url", dashboardUrl);
Integer retries = null;
boolean withRetries = randomBoolean();
if (withRetries) {
retries = randomIntBetween(1, 10);
builder.field("retries", retries);
}
TimeValue interval = null;
boolean withInterval = randomBoolean();
if (withInterval) {
interval = randomTimeValue(1, 100, TimeUnit.SECONDS, TimeUnit.MINUTES, TimeUnit.HOURS);
builder.field("interval", interval.getStringRep());
}
boolean isInline = randomBoolean();
if (isInline) {
builder.field("inline", true);
}
BasicAuth auth = null;
boolean withAuth = randomBoolean();
boolean isPasswordEncrypted = randomBoolean();
if (withAuth) {
builder.startObject("auth")
.startObject("basic")
.field("username", "foo")
.field("password", isPasswordEncrypted ? "::es_redacted::" : "secret")
.endObject()
.endObject();
auth = new BasicAuth("foo", "secret".toCharArray());
}
HttpProxy proxy = null;
boolean withProxy = randomBoolean();
if (withProxy) {
proxy = new HttpProxy("example.org", 8080);
builder.startObject("proxy").field("host", proxy.getHost()).field("port", proxy.getPort()).endObject();
}
builder.endObject().endObject().endObject();
XContentParser parser = createParser(builder);
EmailAttachments emailAttachments = emailAttachmentsParser.parse(parser);
assertThat(emailAttachments.getAttachments(), hasSize(1));
XContentBuilder toXcontentBuilder = jsonBuilder().startObject();
List<EmailAttachment> attachments = new ArrayList<>(emailAttachments.getAttachments());
WatcherParams watcherParams = WatcherParams.builder().hideSecrets(isPasswordEncrypted).build();
attachments.get(0).toXContent(toXcontentBuilder, watcherParams);
toXcontentBuilder.endObject();
assertThat(Strings.toString(toXcontentBuilder), is(Strings.toString(builder)));
XContentBuilder attachmentXContentBuilder = jsonBuilder().startObject();
ReportingAttachment attachment = new ReportingAttachment(id, dashboardUrl, isInline, interval, retries, auth, proxy);
attachment.toXContent(attachmentXContentBuilder, watcherParams);
attachmentXContentBuilder.endObject();
assertThat(Strings.toString(attachmentXContentBuilder), is(Strings.toString(builder)));
assertThat(attachments.get(0).inline(), is(isInline));
}
public void testGoodCase() throws Exception {
// returns interval HTTP code for five times, then return expected data
String content = randomAlphaOfLength(200);
String path = "/ovb/api/reporting/jobs/download/iu5zfzvk15oa8990bfas9wy2";
String randomContentType = randomAlphaOfLength(20);
Map<String, String[]> headers = new HashMap<>();
headers.put("Content-Type", new String[] { randomContentType });
when(httpClient.execute(any(HttpRequest.class))).thenReturn(
new HttpResponse(200, "{\"path\":\"" + path + "\", \"other\":\"content\"}")
)
.thenReturn(new HttpResponse(503))
.thenReturn(new HttpResponse(503))
.thenReturn(new HttpResponse(503))
.thenReturn(new HttpResponse(503))
.thenReturn(new HttpResponse(503))
.thenReturn(new HttpResponse(200, content, headers));
ReportingAttachment reportingAttachment = new ReportingAttachment(
"foo",
dashboardUrl,
randomBoolean(),
TimeValue.timeValueMillis(1),
10,
null,
null
);
Attachment attachment = reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, reportingAttachment);
assertThat(attachment, instanceOf(Attachment.Bytes.class));
assertThat(attachment.getWarnings(), hasSize(0));
Attachment.Bytes bytesAttachment = (Attachment.Bytes) attachment;
assertThat(new String(bytesAttachment.bytes(), StandardCharsets.UTF_8), is(content));
assertThat(bytesAttachment.contentType(), is(randomContentType));
ArgumentCaptor<HttpRequest> requestArgumentCaptor = ArgumentCaptor.forClass(HttpRequest.class);
verify(httpClient, times(7)).execute(requestArgumentCaptor.capture());
assertThat(requestArgumentCaptor.getAllValues(), hasSize(7));
// first invocation to the original URL
assertThat(requestArgumentCaptor.getAllValues().get(0).path(), is("/ovb/api/reporting/generate/dashboard/My-Dashboard"));
assertThat(requestArgumentCaptor.getAllValues().get(0).method(), is(HttpMethod.POST));
// all other invocations to the redirected urls from the JSON payload
for (int i = 1; i < 7; i++) {
assertThat(requestArgumentCaptor.getAllValues().get(i).path(), is(path));
assertThat(requestArgumentCaptor.getAllValues().get(i).params().keySet(), hasSize(0));
}
// test that the header "kbn-xsrf" has been set to "reporting" in all requests
requestArgumentCaptor.getAllValues().forEach((req) -> assertThat(req.headers(), hasEntry("kbn-xsrf", "reporting")));
}
public void testInitialRequestFailsWithError() throws Exception {
when(httpClient.execute(any(HttpRequest.class))).thenReturn(new HttpResponse(403));
ReportingAttachment attachment = new ReportingAttachment("foo", dashboardUrl, randomBoolean(), null, null, null, null);
ElasticsearchException e = expectThrows(
ElasticsearchException.class,
() -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment)
);
assertThat(e.getMessage(), containsString("Error response when trying to trigger reporting generation"));
}
public void testInitialRequestThrowsIOException() throws Exception {
when(httpClient.execute(any(HttpRequest.class))).thenThrow(new IOException("Connection timed out"));
ReportingAttachment attachment = new ReportingAttachment("foo", "http://www.example.org/", randomBoolean(), null, null, null, null);
IOException e = expectThrows(
IOException.class,
() -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment)
);
assertThat(e.getMessage(), containsString("Connection timed out"));
}
public void testInitialRequestContainsInvalidPayload() throws Exception {
when(httpClient.execute(any(HttpRequest.class)))
// closing json bracket is missing
.thenReturn(new HttpResponse(200, "{\"path\":\"anything\""));
ReportingAttachment attachment = new ReportingAttachment("foo", dashboardUrl, randomBoolean(), null, null, null, null);
XContentParseException e = expectThrows(
XContentParseException.class,
() -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment)
);
assertThat(e.getMessage(), containsString("Unexpected end of file"));
}
public void testInitialRequestContainsPathAsObject() throws Exception {
when(httpClient.execute(any(HttpRequest.class)))
// path must be a field, but is an object here
.thenReturn(new HttpResponse(200, "{\"path\": { \"foo\" : \"anything\"}}"));
ReportingAttachment attachment = new ReportingAttachment("foo", "http://www.example.org/", randomBoolean(), null, null, null, null);
XContentParseException e = expectThrows(
XContentParseException.class,
() -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment)
);
assertThat(
e.getMessage(),
containsString("[reporting_attachment_kibana_payload] path doesn't support values of type: START_OBJECT")
);
}
public void testInitialRequestDoesNotContainPathInJson() throws Exception {
when(httpClient.execute(any(HttpRequest.class))).thenReturn(new HttpResponse(200, "{\"foo\":\"bar\"}"));
ReportingAttachment attachment = new ReportingAttachment("foo", dashboardUrl, randomBoolean(), null, null, null, null);
ElasticsearchException e = expectThrows(
ElasticsearchException.class,
() -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment)
);
assertThat(e.getMessage(), containsString("Watch[watch1] reporting[foo] field path found in JSON payload"));
}
public void testPollingRequestIsError() throws Exception {
boolean hasBody = randomBoolean();
when(httpClient.execute(any(HttpRequest.class))).thenReturn(new HttpResponse(200, "{\"path\":\"whatever\"}"))
.thenReturn(new HttpResponse(403, hasBody ? "no permissions" : null));
ReportingAttachment attachment = new ReportingAttachment(
"foo",
"http://www.example.org/",
randomBoolean(),
TimeValue.timeValueMillis(1),
10,
null,
null
);
ElasticsearchException e = expectThrows(
ElasticsearchException.class,
() -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment)
);
assertThat(e.getMessage(), containsString("Error when polling pdf"));
if (hasBody) {
assertThat(e.getMessage(), containsString("body[no permissions]"));
}
}
public void testPollingRequestRetryIsExceeded() throws Exception {
when(httpClient.execute(any(HttpRequest.class))).thenReturn(new HttpResponse(200, "{\"path\":\"whatever\"}"))
.thenReturn(new HttpResponse(503))
.thenReturn(new HttpResponse(503));
ReportingAttachment attachment = new ReportingAttachment(
"foo",
"http://www.example.org/",
randomBoolean(),
TimeValue.timeValueMillis(1),
1,
null,
null
);
ElasticsearchException e = expectThrows(
ElasticsearchException.class,
() -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment)
);
assertThat(e.getMessage(), containsString("Aborting due to maximum number of retries hit [1]"));
}
public void testPollingRequestUnknownHTTPError() throws Exception {
when(httpClient.execute(any(HttpRequest.class))).thenReturn(new HttpResponse(200, "{\"path\":\"whatever\"}"))
.thenReturn(new HttpResponse(1));
ReportingAttachment attachment = new ReportingAttachment(
"foo",
"http://www.example.org/",
randomBoolean(),
TimeValue.timeValueMillis(1),
null,
null,
null
);
IllegalStateException e = expectThrows(
IllegalStateException.class,
() -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment)
);
assertThat(e.getMessage(), containsString("Unexpected status code"));
}
public void testPollingRequestIOException() throws Exception {
when(httpClient.execute(any(HttpRequest.class))).thenReturn(new HttpResponse(200, "{\"path\":\"whatever\"}"))
.thenThrow(new IOException("whatever"));
ReportingAttachment attachment = new ReportingAttachment(
"foo",
"http://www.example.org/",
randomBoolean(),
TimeValue.timeValueMillis(1),
null,
null,
null
);
IOException e = expectThrows(
IOException.class,
() -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment)
);
assertThat(e.getMessage(), containsString("whatever"));
}
public void testWithBasicAuth() throws Exception {
String content = randomAlphaOfLength(200);
when(httpClient.execute(any(HttpRequest.class))).thenReturn(new HttpResponse(200, "{\"path\":\"whatever\"}"))
.thenReturn(new HttpResponse(503))
.thenReturn(new HttpResponse(200, content));
ReportingAttachment attachment = new ReportingAttachment(
"foo",
dashboardUrl,
randomBoolean(),
TimeValue.timeValueMillis(1),
10,
new BasicAuth("foo", "bar".toCharArray()),
null
);
reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment);
ArgumentCaptor<HttpRequest> requestArgumentCaptor = ArgumentCaptor.forClass(HttpRequest.class);
verify(httpClient, times(3)).execute(requestArgumentCaptor.capture());
List<HttpRequest> allRequests = requestArgumentCaptor.getAllValues();
assertThat(allRequests, hasSize(3));
for (HttpRequest request : allRequests) {
assertThat(request.auth(), is(notNullValue()));
assertThat(request.auth(), instanceOf(BasicAuth.class));
BasicAuth basicAuth = request.auth();
assertThat(basicAuth.getUsername(), is("foo"));
}
}
public void testPollingDefaultsRetries() throws Exception {
when(httpClient.execute(any(HttpRequest.class))).thenReturn(new HttpResponse(200, "{\"path\":\"whatever\"}"))
.thenReturn(new HttpResponse(503));
ReportingAttachment attachment = new ReportingAttachment(
"foo",
dashboardUrl,
randomBoolean(),
TimeValue.timeValueMillis(1),
RETRIES_SETTING.getDefault(Settings.EMPTY),
new BasicAuth("foo", "bar".toCharArray()),
null
);
expectThrows(
ElasticsearchException.class,
() -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment)
);
verify(httpClient, times(RETRIES_SETTING.getDefault(Settings.EMPTY) + 1)).execute(any());
}
public void testPollingDefaultCanBeOverriddenBySettings() throws Exception {
int retries = 10;
when(httpClient.execute(any(HttpRequest.class))).thenReturn(new HttpResponse(200, "{\"path\":\"whatever\"}"))
.thenReturn(new HttpResponse(503));
ReportingAttachment attachment = new ReportingAttachment("foo", dashboardUrl, randomBoolean(), null, null, null, null);
Settings settings = Settings.builder().put(INTERVAL_SETTING.getKey(), "1ms").put(RETRIES_SETTING.getKey(), retries).build();
reportingAttachmentParser = new ReportingAttachmentParser(
settings,
new WebhookService(settings, httpClient, clusterSettings),
templateEngine,
clusterSettings
);
expectThrows(
ElasticsearchException.class,
() -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment)
);
verify(httpClient, times(retries + 1)).execute(any());
}
public void testThatUrlIsTemplatable() throws Exception {
when(httpClient.execute(any(HttpRequest.class))).thenReturn(new HttpResponse(200, "{\"path\":\"whatever\"}"))
.thenReturn(new HttpResponse(503))
.thenReturn(new HttpResponse(200, randomAlphaOfLength(10)));
TextTemplateEngine replaceHttpWithHttpsTemplateEngine = new TextTemplateEngine(null) {
@Override
public String render(TextTemplate textTemplate, Map<String, Object> model) {
return textTemplate.getTemplate().replaceAll("REPLACEME", "REPLACED");
}
};
ReportingAttachment attachment = new ReportingAttachment(
"foo",
"http://www.example.org/REPLACEME",
randomBoolean(),
TimeValue.timeValueMillis(1),
10,
new BasicAuth("foo", "bar".toCharArray()),
null
);
reportingAttachmentParser = new ReportingAttachmentParser(
Settings.EMPTY,
new WebhookService(Settings.EMPTY, httpClient, clusterSettings),
replaceHttpWithHttpsTemplateEngine,
clusterSettings
);
reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment);
ArgumentCaptor<HttpRequest> requestArgumentCaptor = ArgumentCaptor.forClass(HttpRequest.class);
verify(httpClient, times(3)).execute(requestArgumentCaptor.capture());
List<String> paths = requestArgumentCaptor.getAllValues().stream().map(HttpRequest::path).collect(Collectors.toList());
assertThat(paths, not(hasItem(containsString("REPLACEME"))));
}
public void testRetrySettingCannotBeNegative() throws Exception {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> new ReportingAttachment("foo", "http://www.example.org/REPLACEME", randomBoolean(), null, -10, null, null)
);
assertThat(e.getMessage(), is("Retries for attachment must be >= 0"));
Settings invalidSettings = Settings.builder().put("xpack.notification.reporting.retries", -10).build();
e = expectThrows(
IllegalArgumentException.class,
() -> new ReportingAttachmentParser(
invalidSettings,
new WebhookService(invalidSettings, httpClient, clusterSettings),
templateEngine,
clusterSettings
)
);
assertThat(e.getMessage(), is("Failed to parse value [-10] for setting [xpack.notification.reporting.retries] must be >= 0"));
}
public void testHttpProxy() throws Exception {
String content = randomAlphaOfLength(200);
String path = "/ovb/api/reporting/jobs/download/iu5zfzvk15oa8990bfas9wy2";
String randomContentType = randomAlphaOfLength(20);
Map<String, String[]> headers = new HashMap<>();
headers.put("Content-Type", new String[] { randomContentType });
ArgumentCaptor<HttpRequest> requestCaptor = ArgumentCaptor.forClass(HttpRequest.class);
when(httpClient.execute(requestCaptor.capture())).thenReturn(
new HttpResponse(200, "{\"path\":\"" + path + "\", \"other\":\"content\"}")
).thenReturn(new HttpResponse(503)).thenReturn(new HttpResponse(200, content, headers));
HttpProxy proxy = new HttpProxy("localhost", 8080);
ReportingAttachment reportingAttachment = new ReportingAttachment(
"foo",
"http://www.example.org/",
randomBoolean(),
TimeValue.timeValueMillis(1),
null,
null,
proxy
);
reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, reportingAttachment);
assertThat(requestCaptor.getAllValues(), hasSize(3));
requestCaptor.getAllValues().forEach(req -> assertThat(req.proxy(), is(proxy)));
}
public void testDefaultWarnings() throws Exception {
String content = randomAlphaOfLength(200);
String path = "/ovb/api/reporting/jobs/download/iu5zfzvk15oa8990bfas9wy2";
String randomContentType = randomAlphaOfLength(20);
String reportId = randomAlphaOfLength(5);
Map<String, String[]> headers = new HashMap<>();
headers.put("Content-Type", new String[] { randomContentType });
WARNINGS.keySet().forEach((k) -> headers.put(k, new String[] { "true" }));
when(httpClient.execute(any(HttpRequest.class))).thenReturn(
new HttpResponse(200, "{\"path\":\"" + path + "\", \"other\":\"content\"}")
).thenReturn(new HttpResponse(200, content, headers));
ReportingAttachment reportingAttachment = new ReportingAttachment(
reportId,
dashboardUrl,
randomBoolean(),
TimeValue.timeValueMillis(1),
10,
null,
null
);
Attachment attachment = reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, reportingAttachment);
assertThat(attachment, instanceOf(Attachment.Bytes.class));
assertThat(attachment.getWarnings(), hasSize(WARNINGS.keySet().size()));
// parameterize the messages
assertEquals(
attachment.getWarnings(),
WARNINGS.values().stream().map(s -> Strings.format(s, reportId)).collect(Collectors.toSet())
);
Attachment.Bytes bytesAttachment = (Attachment.Bytes) attachment;
assertThat(new String(bytesAttachment.bytes(), StandardCharsets.UTF_8), is(content));
assertThat(bytesAttachment.contentType(), is(randomContentType));
}
public void testCustomWarningsNoParams() throws Exception {
String content = randomAlphaOfLength(200);
String path = "/ovb/api/reporting/jobs/download/iu5zfzvk15oa8990bfas9wy2";
String randomContentType = randomAlphaOfLength(20);
String reportId = randomAlphaOfLength(5);
Map<String, String[]> headers = new HashMap<>();
headers.put("Content-Type", new String[] { randomContentType });
Map<String, String> customWarnings = Maps.newMapWithExpectedSize(WARNINGS.size());
WARNINGS.keySet().forEach((k) -> {
final String warning = randomAlphaOfLength(20);
customWarnings.put(k, warning);
reportingAttachmentParser.addWarningText(k, warning);
headers.put(k, new String[] { "true" });
});
when(httpClient.execute(any(HttpRequest.class))).thenReturn(
new HttpResponse(200, "{\"path\":\"" + path + "\", \"other\":\"content\"}")
).thenReturn(new HttpResponse(200, content, headers));
ReportingAttachment reportingAttachment = new ReportingAttachment(
reportId,
dashboardUrl,
randomBoolean(),
TimeValue.timeValueMillis(1),
10,
null,
null
);
Attachment attachment = reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, reportingAttachment);
assertThat(attachment, instanceOf(Attachment.Bytes.class));
assertThat(attachment.getWarnings(), hasSize(WARNINGS.keySet().size()));
assertEquals(attachment.getWarnings(), new HashSet<>(customWarnings.values()));
Attachment.Bytes bytesAttachment = (Attachment.Bytes) attachment;
assertThat(new String(bytesAttachment.bytes(), StandardCharsets.UTF_8), is(content));
assertThat(bytesAttachment.contentType(), is(randomContentType));
}
public void testCustomWarningsWithParams() throws Exception {
String content = randomAlphaOfLength(200);
String path = "/ovb/api/reporting/jobs/download/iu5zfzvk15oa8990bfas9wy2";
String randomContentType = randomAlphaOfLength(20);
String reportId = randomAlphaOfLength(5);
Map<String, String[]> headers = new HashMap<>();
headers.put("Content-Type", new String[] { randomContentType });
Map<String, String> customWarnings = Maps.newMapWithExpectedSize(WARNINGS.size());
WARNINGS.keySet().forEach((k) -> {
// add a parameter
final String warning = randomAlphaOfLength(20) + " %s";
customWarnings.put(k, warning);
reportingAttachmentParser.addWarningText(k, warning);
headers.put(k, new String[] { "true" });
});
when(httpClient.execute(any(HttpRequest.class))).thenReturn(
new HttpResponse(200, "{\"path\":\"" + path + "\", \"other\":\"content\"}")
).thenReturn(new HttpResponse(200, content, headers));
ReportingAttachment reportingAttachment = new ReportingAttachment(
reportId,
dashboardUrl,
randomBoolean(),
TimeValue.timeValueMillis(1),
10,
null,
null
);
Attachment attachment = reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, reportingAttachment);
assertThat(attachment, instanceOf(Attachment.Bytes.class));
assertThat(attachment.getWarnings(), hasSize(WARNINGS.keySet().size()));
// parameterize the messages
assertEquals(
attachment.getWarnings(),
customWarnings.values().stream().map(s -> Strings.format(s, reportId)).collect(Collectors.toSet())
);
// ensure the reportId is parameterized in
attachment.getWarnings().forEach(s -> { assertThat(s, containsString(reportId)); });
Attachment.Bytes bytesAttachment = (Attachment.Bytes) attachment;
assertThat(new String(bytesAttachment.bytes(), StandardCharsets.UTF_8), is(content));
assertThat(bytesAttachment.contentType(), is(randomContentType));
}
public void testWarningsSuppress() throws Exception {
String content = randomAlphaOfLength(200);
String path = "/ovb/api/reporting/jobs/download/iu5zfzvk15oa8990bfas9wy2";
String randomContentType = randomAlphaOfLength(20);
String reportId = randomAlphaOfLength(5);
Map<String, String[]> headers = new HashMap<>();
headers.put("Content-Type", new String[] { randomContentType });
Map<String, String> customWarnings = Maps.newMapWithExpectedSize(WARNINGS.size());
WARNINGS.keySet().forEach((k) -> {
final String warning = randomAlphaOfLength(20);
customWarnings.put(k, warning);
reportingAttachmentParser.addWarningText(k, warning);
reportingAttachmentParser.setWarningEnabled(false);
headers.put(k, new String[] { "true" });
});
when(httpClient.execute(any(HttpRequest.class))).thenReturn(
new HttpResponse(200, "{\"path\":\"" + path + "\", \"other\":\"content\"}")
).thenReturn(new HttpResponse(200, content, headers));
ReportingAttachment reportingAttachment = new ReportingAttachment(
reportId,
dashboardUrl,
randomBoolean(),
TimeValue.timeValueMillis(1),
10,
null,
null
);
Attachment attachment = reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, reportingAttachment);
assertThat(attachment, instanceOf(Attachment.Bytes.class));
assertThat(attachment.getWarnings(), hasSize(0));
Attachment.Bytes bytesAttachment = (Attachment.Bytes) attachment;
assertThat(new String(bytesAttachment.bytes(), StandardCharsets.UTF_8), is(content));
assertThat(bytesAttachment.contentType(), is(randomContentType));
}
public void testWarningValidation() {
WARNINGS.forEach((k, v) -> {
String keyName = randomAlphaOfLength(5) + "notavalidsettingname";
IllegalArgumentException expectedException = expectThrows(
IllegalArgumentException.class,
() -> ReportingAttachmentParser.warningValidator(keyName, randomAlphaOfLength(10))
);
assertThat(expectedException.getMessage(), containsString(keyName));
assertThat(expectedException.getMessage(), containsString("is not supported"));
});
}
private WatchExecutionContext createWatchExecutionContext() {
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
return mockExecutionContextBuilder("watch1").wid(new Wid(randomAlphaOfLength(5), now))
.payload(new Payload.Simple())
.time("watch1", now)
.metadata(Collections.emptyMap())
.buildMock();
}
private ClusterService mockClusterService() {
ClusterService clusterService = mock(ClusterService.class);
ClusterSettings clusterSettings = new ClusterSettings(
Settings.EMPTY,
Set.of(INTERVAL_SETTING, RETRIES_SETTING, REPORT_WARNING_ENABLED_SETTING, REPORT_WARNING_TEXT)
);
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
return clusterService;
}
}
| ReportingAttachmentParserTests |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/rpchook/DynamicalExtFieldRPCHook.java | {
"start": 1051,
"end": 1814
} | class ____ implements RPCHook {
@Override
public void doBeforeRequest(String remoteAddr, RemotingCommand request) {
String zoneName = System.getProperty(MixAll.ROCKETMQ_ZONE_PROPERTY, System.getenv(MixAll.ROCKETMQ_ZONE_ENV));
if (StringUtils.isNotBlank(zoneName)) {
request.addExtField(MixAll.ZONE_NAME, zoneName);
}
String zoneMode = System.getProperty(MixAll.ROCKETMQ_ZONE_MODE_PROPERTY, System.getenv(MixAll.ROCKETMQ_ZONE_MODE_ENV));
if (StringUtils.isNotBlank(zoneMode)) {
request.addExtField(MixAll.ZONE_MODE, zoneMode);
}
}
@Override
public void doAfterResponse(String remoteAddr, RemotingCommand request, RemotingCommand response) {
}
}
| DynamicalExtFieldRPCHook |
java | apache__rocketmq | test/src/test/java/org/apache/rocketmq/test/offset/OffsetResetForPopIT.java | {
"start": 2245,
"end": 14604
} | class ____ extends BaseConf {
private static final Logger LOGGER = LoggerFactory.getLogger(OffsetResetForPopIT.class);
private String topic;
private String group;
private RMQNormalProducer producer = null;
private RMQPopConsumer consumer = null;
private DefaultMQAdminExt adminExt;
@Before
public void setUp() throws Exception {
// reset pop offset rely on server side offset
brokerController1.getBrokerConfig().setUseServerSideResetOffset(true);
adminExt = BaseConf.getAdmin(NAMESRV_ADDR);
adminExt.start();
topic = MQRandomUtils.getRandomTopic();
this.createAndWaitTopicRegister(BROKER1_NAME, topic);
group = initConsumerGroup();
LOGGER.info(String.format("use topic: %s, group: %s", topic, group));
producer = getProducer(NAMESRV_ADDR, topic);
}
@After
public void tearDown() {
shutdown();
}
private void createAndWaitTopicRegister(String brokerName, String topic) throws Exception {
String brokerAddress = CommandUtil.fetchMasterAddrByBrokerName(adminExt, brokerName);
TopicConfig topicConfig = new TopicConfig(topic);
topicConfig.setReadQueueNums(1);
topicConfig.setWriteQueueNums(1);
adminExt.createAndUpdateTopicConfig(brokerAddress, topicConfig);
await().atMost(30, TimeUnit.SECONDS).until(
() -> MQAdminTestUtils.checkTopicExist(adminExt, topic));
}
private void resetOffsetInner(long resetOffset) {
try {
// reset offset by queue
adminExt.resetOffsetByQueueId(brokerController1.getBrokerAddr(),
consumer.getConsumerGroup(), consumer.getTopic(), 0, resetOffset);
} catch (Exception ignore) {
}
}
private void ackMessageSync(MessageExt messageExt) {
try {
consumer.ackAsync(brokerController1.getBrokerAddr(),
messageExt.getProperty(MessageConst.PROPERTY_POP_CK)).get();
} catch (Exception e) {
e.printStackTrace();
}
}
private void ackMessageSync(List<MessageExt> messageExtList) {
if (messageExtList != null) {
messageExtList.forEach(this::ackMessageSync);
}
}
@Test
public void testResetOffsetAfterPop() throws Exception {
int messageCount = 10;
int resetOffset = 4;
producer.send(messageCount);
consumer = new RMQPopConsumer(NAMESRV_ADDR, topic, "*", group, new RMQNormalListener());
consumer.start();
MessageQueue mq = new MessageQueue(topic, BROKER1_NAME, 0);
PopResult popResult = consumer.pop(brokerController1.getBrokerAddr(), mq);
Assert.assertEquals(10, popResult.getMsgFoundList().size());
resetOffsetInner(resetOffset);
popResult = consumer.pop(brokerController1.getBrokerAddr(), mq);
Assert.assertTrue(popResult != null && popResult.getMsgFoundList() != null);
Assert.assertEquals(messageCount - resetOffset, popResult.getMsgFoundList().size());
}
@Test
public void testResetOffsetThenAckOldForPopOrderly() throws Exception {
int messageCount = 10;
int resetOffset = 2;
producer.send(messageCount);
consumer = new RMQPopConsumer(NAMESRV_ADDR, topic, "*", group, new RMQNormalListener());
consumer.start();
MessageQueue mq = new MessageQueue(topic, BROKER1_NAME, 0);
PopResult popResult1 = consumer.popOrderly(brokerController1.getBrokerAddr(), mq);
Assert.assertEquals(10, popResult1.getMsgFoundList().size());
resetOffsetInner(resetOffset);
ConsumeStats consumeStats = adminExt.examineConsumeStats(group, topic);
Assert.assertEquals(resetOffset, consumeStats.getOffsetTable().get(mq).getConsumerOffset());
PopResult popResult2 = consumer.popOrderly(brokerController1.getBrokerAddr(), mq);
Assert.assertTrue(popResult2 != null && popResult2.getMsgFoundList() != null);
Assert.assertEquals(messageCount - resetOffset, popResult2.getMsgFoundList().size());
// ack old msg, expect has no effect
ackMessageSync(popResult1.getMsgFoundList());
Assert.assertTrue(brokerController1.getConsumerOrderInfoManager()
.checkBlock(null, topic, group, 0, RMQPopConsumer.DEFAULT_INVISIBLE_TIME));
// ack new msg
ackMessageSync(popResult2.getMsgFoundList());
Assert.assertFalse(brokerController1.getConsumerOrderInfoManager()
.checkBlock(null, topic, group, 0, RMQPopConsumer.DEFAULT_INVISIBLE_TIME));
}
@Test
public void testRestOffsetToSkipMsgForPopOrderly() throws Exception {
int messageCount = 10;
int resetOffset = 4;
producer.send(messageCount);
consumer = new RMQPopConsumer(NAMESRV_ADDR, topic, "*", group, new RMQNormalListener());
resetOffsetInner(resetOffset);
consumer.start();
MessageQueue mq = new MessageQueue(topic, BROKER1_NAME, 0);
PopResult popResult = consumer.popOrderly(brokerController1.getBrokerAddr(), mq);
Assert.assertEquals(messageCount - resetOffset, popResult.getMsgFoundList().size());
Assert.assertTrue(brokerController1.getConsumerOrderInfoManager()
.checkBlock(null, topic, group, 0, RMQPopConsumer.DEFAULT_INVISIBLE_TIME));
ackMessageSync(popResult.getMsgFoundList());
TimeUnit.SECONDS.sleep(1);
Assert.assertFalse(brokerController1.getConsumerOrderInfoManager()
.checkBlock(null, topic, group, 0, RMQPopConsumer.DEFAULT_INVISIBLE_TIME));
}
@Test
public void testResetOffsetAfterPopWhenOpenBufferAndWait() throws Exception {
int messageCount = 10;
int resetOffset = 4;
brokerController1.getBrokerConfig().setEnablePopBufferMerge(true);
producer.send(messageCount);
consumer = new RMQPopConsumer(NAMESRV_ADDR, topic, "*", group, new RMQNormalListener());
consumer.start();
MessageQueue mq = new MessageQueue(topic, BROKER1_NAME, 0);
PopResult popResult = consumer.pop(brokerController1.getBrokerAddr(), mq);
Assert.assertEquals(10, popResult.getMsgFoundList().size());
resetOffsetInner(resetOffset);
TimeUnit.MILLISECONDS.sleep(brokerController1.getBrokerConfig().getPopCkStayBufferTimeOut());
popResult = consumer.pop(brokerController1.getBrokerAddr(), mq);
Assert.assertTrue(popResult != null && popResult.getMsgFoundList() != null);
Assert.assertEquals(messageCount - resetOffset, popResult.getMsgFoundList().size());
}
@Test
public void testResetOffsetWhilePopWhenOpenBuffer() {
testResetOffsetWhilePop(8, false, false, 5);
}
@Test
public void testResetOffsetWhilePopWhenOpenBufferAndAck() {
testResetOffsetWhilePop(8, false, true, 5);
}
@Test
public void testMultipleResetOffsetWhilePopWhenOpenBufferAndAck() {
testResetOffsetWhilePop(8, false, true, 3, 5);
}
@Test
public void testResetFutureOffsetWhilePopWhenOpenBufferAndAck() {
testResetOffsetWhilePop(2, true, true, 8);
}
@Test
public void testMultipleResetFutureOffsetWhilePopWhenOpenBufferAndAck() {
testResetOffsetWhilePop(2, true, true, 5, 8);
}
private void testResetOffsetWhilePop(int targetCount, boolean resetFuture, boolean needAck,
int... resetOffset) {
brokerController1.getBrokerConfig().setEnablePopBufferMerge(true);
producer.send(10);
// max pop one message per request
consumer =
new RMQPopConsumer(NAMESRV_ADDR, topic, "*", group, new RMQNormalListener(), 1);
MessageQueue mq = new MessageQueue(topic, BROKER1_NAME, 0);
AtomicInteger counter = new AtomicInteger(0);
consumer.start();
Executors.newSingleThreadScheduledExecutor().execute(() -> {
long start = System.currentTimeMillis();
while (System.currentTimeMillis() - start <= 30 * 1000L) {
try {
PopResult popResult = consumer.pop(brokerController1.getBrokerAddr(), mq);
if (popResult == null || popResult.getMsgFoundList() == null) {
continue;
}
int count = counter.addAndGet(popResult.getMsgFoundList().size());
if (needAck) {
ackMessageSync(popResult.getMsgFoundList());
}
if (count == targetCount) {
for (int offset : resetOffset) {
resetOffsetInner(offset);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
await().atMost(10, TimeUnit.SECONDS).until(() -> {
boolean result = true;
if (resetFuture) {
result = counter.get() < 10;
}
result &= counter.get() >= targetCount + 10 - resetOffset[resetOffset.length - 1];
return result;
});
}
@Test
public void testResetFutureOffsetWhilePopOrderlyAndAck() {
testResetOffsetWhilePopOrderly(1,
Lists.newArrayList(0, 5, 6, 7, 8, 9), Lists.newArrayList(5), 6);
}
@Test
public void testMultipleResetFutureOffsetWhilePopOrderlyAndAck() {
testResetOffsetWhilePopOrderly(1,
Lists.newArrayList(0, 5, 6, 7, 8, 9), Lists.newArrayList(3, 5), 6);
}
@Test
public void testResetOffsetWhilePopOrderlyAndAck() {
testResetOffsetWhilePopOrderly(5,
Lists.newArrayList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9),
Lists.newArrayList(3), 12);
}
@Test
public void testMultipleResetOffsetWhilePopOrderlyAndAck() {
testResetOffsetWhilePopOrderly(5,
Lists.newArrayList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9),
Lists.newArrayList(3, 1), 14);
}
private void testResetOffsetWhilePopOrderly(int targetCount, List<Integer> expectMsgReceive,
List<Integer> resetOffset, int expectCount) {
brokerController1.getBrokerConfig().setEnablePopBufferMerge(true);
for (int i = 0; i < 10; i++) {
Message msg = new Message(topic, (String.valueOf(i)).getBytes());
producer.send(msg);
}
consumer = new RMQPopConsumer(NAMESRV_ADDR, topic, "*", group, new RMQNormalListener(), 1);
MessageQueue mq = new MessageQueue(topic, BROKER1_NAME, 0);
Set<Integer> msgReceive = Collections.newSetFromMap(new ConcurrentHashMap<>());
AtomicInteger counter = new AtomicInteger(0);
consumer.start();
Executors.newSingleThreadScheduledExecutor().execute(() -> {
long start = System.currentTimeMillis();
while (System.currentTimeMillis() - start <= 30 * 1000L) {
try {
PopResult popResult = consumer.popOrderly(brokerController1.getBrokerAddr(), mq);
if (popResult == null || popResult.getMsgFoundList() == null) {
continue;
}
int count = counter.addAndGet(popResult.getMsgFoundList().size());
for (MessageExt messageExt : popResult.getMsgFoundList()) {
msgReceive.add(Integer.valueOf(new String(messageExt.getBody())));
ackMessageSync(messageExt);
}
if (count == targetCount) {
for (int offset : resetOffset) {
resetOffsetInner(offset);
}
}
} catch (Exception e) {
// do nothing;
}
}
});
await().atMost(10, TimeUnit.SECONDS).until(() -> {
boolean result = true;
if (expectMsgReceive.size() != msgReceive.size()) {
return false;
}
if (counter.get() != expectCount) {
return false;
}
for (Integer expectMsg : expectMsgReceive) {
result &= msgReceive.contains(expectMsg);
}
return result;
});
}
}
| OffsetResetForPopIT |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/config/ConfigChangeItemTest.java | {
"start": 813,
"end": 2119
} | class ____ {
@Test
void testSetNewValue() {
ConfigChangeItem item = new ConfigChangeItem("testKey", null, "testValue");
item.setType(PropertyChangeType.ADDED);
assertEquals("testKey", item.getKey());
assertNull(item.getOldValue());
assertEquals("testValue", item.getNewValue());
assertEquals(PropertyChangeType.ADDED, item.getType());
item.setOldValue("testValue");
item.setNewValue("testValue2");
item.setType(PropertyChangeType.MODIFIED);
assertEquals("testKey", item.getKey());
assertEquals("testValue", item.getOldValue());
assertEquals("testValue2", item.getNewValue());
assertEquals(PropertyChangeType.MODIFIED, item.getType());
item.setKey("deletedKey");
item.setType(PropertyChangeType.DELETED);
assertEquals("deletedKey", item.getKey());
assertEquals(PropertyChangeType.DELETED, item.getType());
}
@Test
void testToString() {
ConfigChangeItem item = new ConfigChangeItem("testKey", null, "testValue");
item.setType(PropertyChangeType.ADDED);
assertEquals("ConfigChangeItem{key='testKey', oldValue='null', newValue='testValue', type=ADDED}",
item.toString());
}
} | ConfigChangeItemTest |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/wiring/BeanWiringInfoResolver.java | {
"start": 1159,
"end": 1384
} | class ____ as bean name.
*
* @author Rod Johnson
* @since 2.0
* @see BeanWiringInfo
* @see ClassNameBeanWiringInfoResolver
* @see org.springframework.beans.factory.annotation.AnnotationBeanWiringInfoResolver
*/
public | name |
java | apache__camel | components/camel-aws/camel-aws-secrets-manager/src/test/java/org/apache/camel/component/aws/secretsmanager/SecretsManagerComponentConfigurationTest.java | {
"start": 1018,
"end": 1715
} | class ____ extends CamelTestSupport {
@Test
public void createEndpointWithMinimalConfiguration() throws Exception {
SecretsManagerComponent component = context.getComponent("aws-secrets-manager", SecretsManagerComponent.class);
SecretsManagerEndpoint endpoint = (SecretsManagerEndpoint) component
.createEndpoint("aws-secrets-manager://test?accessKey=xxx&secretKey=yyy®ion=eu-west-1");
assertEquals("xxx", endpoint.getConfiguration().getAccessKey());
assertEquals("yyy", endpoint.getConfiguration().getSecretKey());
assertEquals("eu-west-1", endpoint.getConfiguration().getRegion());
}
}
| SecretsManagerComponentConfigurationTest |
java | micronaut-projects__micronaut-core | router/src/main/java/io/micronaut/web/router/Router.java | {
"start": 1098,
"end": 1260
} | interface ____ allows discovery of a route given an HTTP method and URI.</p>
*
* @author Graeme Rocher
* @since 1.0
*/
@SuppressWarnings("MethodName")
public | that |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/lob/JpaLargeBlobTest.java | {
"start": 2319,
"end": 2737
} | class ____ extends InputStream {
private boolean read = false;
private Long count = (long) 200 * 1024 * 1024;
@Override
public int read() throws IOException {
read = true;
if ( count > 0 ) {
count--;
return new Random().nextInt();
}
return -1;
}
@Override
public int available() throws IOException {
return 1;
}
public boolean wasRead() {
return read;
}
}
}
| LobInputStream |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/CommandSegmentCommandFactory.java | {
"start": 815,
"end": 3653
} | class ____ implements CommandFactory {
private final CommandMethod commandMethod;
private final CommandSegments segments;
private final CommandOutputFactoryResolver outputResolver;
private final RedisCodec<Object, Object> redisCodec;
private final ParameterBinder parameterBinder = new ParameterBinder();
private final CommandOutputFactory outputFactory;
private final TypeContext typeContext;
public CommandSegmentCommandFactory(CommandSegments commandSegments, CommandMethod commandMethod,
RedisCodec<?, ?> redisCodec, CommandOutputFactoryResolver outputResolver) {
this.segments = commandSegments;
this.commandMethod = commandMethod;
this.redisCodec = (RedisCodec) redisCodec;
this.outputResolver = outputResolver;
this.typeContext = new TypeContext(redisCodec);
OutputSelector outputSelector = new OutputSelector(commandMethod.getActualReturnType(), redisCodec);
CommandOutputFactory factory = resolveCommandOutputFactory(outputSelector);
if (factory == null) {
throw new IllegalArgumentException(String.format("Cannot resolve CommandOutput for result type %s on method %s",
commandMethod.getActualReturnType(), commandMethod.getMethod()));
}
if (commandMethod.getParameters() instanceof ExecutionSpecificParameters) {
ExecutionSpecificParameters executionAwareParameters = (ExecutionSpecificParameters) commandMethod.getParameters();
if (commandMethod.isFutureExecution() && executionAwareParameters.hasTimeoutIndex()) {
throw new CommandCreationException(commandMethod,
"Asynchronous command methods do not support Timeout parameters");
}
}
this.outputFactory = factory;
}
protected CommandOutputFactoryResolver getOutputResolver() {
return outputResolver;
}
protected CommandOutputFactory resolveCommandOutputFactory(OutputSelector outputSelector) {
return outputResolver.resolveCommandOutput(outputSelector);
}
@Override
public RedisCommand<Object, Object, Object> createCommand(Object[] parameters) {
MethodParametersAccessor parametersAccessor = new CodecAwareMethodParametersAccessor(
new DefaultMethodParametersAccessor(commandMethod.getParameters(), parameters), typeContext);
CommandArgs<Object, Object> args = new CommandArgs<>(redisCodec);
CommandOutput<Object, Object, ?> output = outputFactory.create(redisCodec);
Command<Object, Object, ?> command = new Command<>(this.segments.getCommandType(), output, args);
parameterBinder.bind(args, redisCodec, segments, parametersAccessor);
return (Command) command;
}
}
| CommandSegmentCommandFactory |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/SummationWritable.java | {
"start": 1192,
"end": 3711
} | class ____ implements WritableComparable<SummationWritable>, Container<Summation> {
private Summation sigma;
public SummationWritable() {}
SummationWritable(Summation sigma) {this.sigma = sigma;}
/** {@inheritDoc} */
@Override
public String toString() {return getClass().getSimpleName() + sigma;}
/** {@inheritDoc} */
@Override
public Summation getElement() {return sigma;}
/** Read sigma from conf */
public static Summation read(Class<?> clazz, Configuration conf) {
return Summation.valueOf(conf.get(clazz.getSimpleName() + ".sigma"));
}
/** Write sigma to conf */
public static void write(Summation sigma, Class<?> clazz, Configuration conf) {
conf.set(clazz.getSimpleName() + ".sigma", sigma.toString());
}
/** Read Summation from DataInput */
static Summation read(DataInput in) throws IOException {
final SummationWritable s = new SummationWritable();
s.readFields(in);
return s.getElement();
}
/** {@inheritDoc} */
@Override
public void readFields(DataInput in) throws IOException {
final ArithmeticProgression N = ArithmeticProgressionWritable.read(in);
final ArithmeticProgression E = ArithmeticProgressionWritable.read(in);
sigma = new Summation(N, E);
if (in.readBoolean()) {
sigma.setValue(in.readDouble());
}
}
/** Write sigma to DataOutput */
public static void write(Summation sigma, DataOutput out) throws IOException {
ArithmeticProgressionWritable.write(sigma.N, out);
ArithmeticProgressionWritable.write(sigma.E, out);
final Double v = sigma.getValue();
if (v == null)
out.writeBoolean(false);
else {
out.writeBoolean(true);
out.writeDouble(v);
}
}
/** {@inheritDoc} */
@Override
public void write(DataOutput out) throws IOException {
write(sigma, out);
}
/** {@inheritDoc} */
@Override
public int compareTo(SummationWritable that) {
return this.sigma.compareTo(that.sigma);
}
/** {@inheritDoc} */
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
else if (obj != null && obj instanceof SummationWritable) {
final SummationWritable that = (SummationWritable)obj;
return this.compareTo(that) == 0;
}
throw new IllegalArgumentException(obj == null? "obj == null":
"obj.getClass()=" + obj.getClass());
}
/** Not supported */
@Override
public int hashCode() {
throw new UnsupportedOperationException();
}
/** A writable | SummationWritable |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java | {
"start": 1232,
"end": 7009
} | class ____ extends AbstractXContentSerializingTestCase<GetScriptLanguageResponse> {
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<ScriptLanguagesInfo, Void> PARSER = new ConstructingObjectParser<>(
"script_languages_info",
true,
(a) -> new ScriptLanguagesInfo(
new HashSet<>((List<String>) a[0]),
((List<Tuple<String, Set<String>>>) a[1]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2))
)
);
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<Tuple<String, Set<String>>, Void> LANGUAGE_CONTEXT_PARSER =
new ConstructingObjectParser<>("language_contexts", true, (m, name) -> new Tuple<>((String) m[0], Set.copyOf((List<String>) m[1])));
static {
PARSER.declareStringArray(constructorArg(), ScriptLanguagesInfo.TYPES_ALLOWED);
PARSER.declareObjectArray(constructorArg(), LANGUAGE_CONTEXT_PARSER, ScriptLanguagesInfo.LANGUAGE_CONTEXTS);
LANGUAGE_CONTEXT_PARSER.declareString(constructorArg(), ScriptLanguagesInfo.LANGUAGE);
LANGUAGE_CONTEXT_PARSER.declareStringArray(constructorArg(), ScriptLanguagesInfo.CONTEXTS);
}
private static int MAX_VALUES = 4;
private static final int MIN_LENGTH = 1;
private static final int MAX_LENGTH = 16;
@Override
protected GetScriptLanguageResponse createTestInstance() {
if (randomBoolean()) {
return new GetScriptLanguageResponse(new ScriptLanguagesInfo(Collections.emptySet(), Collections.emptyMap()));
}
return new GetScriptLanguageResponse(randomInstance());
}
@Override
protected GetScriptLanguageResponse doParseInstance(XContentParser parser) throws IOException {
return new GetScriptLanguageResponse(PARSER.parse(parser, null));
}
@Override
protected Writeable.Reader<GetScriptLanguageResponse> instanceReader() {
return GetScriptLanguageResponse::new;
}
@Override
protected GetScriptLanguageResponse mutateInstance(GetScriptLanguageResponse instance) {
switch (randomInt(2)) {
case 0:
// mutate typesAllowed
return new GetScriptLanguageResponse(
new ScriptLanguagesInfo(mutateStringSet(instance.info.typesAllowed), instance.info.languageContexts)
);
case 1:
// Add language
String language = randomValueOtherThanMany(
instance.info.languageContexts::containsKey,
() -> randomAlphaOfLengthBetween(MIN_LENGTH, MAX_LENGTH)
);
Map<String, Set<String>> languageContexts = new HashMap<>();
instance.info.languageContexts.forEach(languageContexts::put);
languageContexts.put(language, randomStringSet(randomIntBetween(1, MAX_VALUES)));
return new GetScriptLanguageResponse(new ScriptLanguagesInfo(instance.info.typesAllowed, languageContexts));
default:
// Mutate languageContexts
Map<String, Set<String>> lc = new HashMap<>();
if (instance.info.languageContexts.size() == 0) {
lc.put(randomAlphaOfLengthBetween(MIN_LENGTH, MAX_LENGTH), randomStringSet(randomIntBetween(1, MAX_VALUES)));
} else {
int toModify = randomInt(instance.info.languageContexts.size() - 1);
List<String> keys = new ArrayList<>(instance.info.languageContexts.keySet());
for (int i = 0; i < keys.size(); i++) {
String key = keys.get(i);
Set<String> value = instance.info.languageContexts.get(keys.get(i));
if (i == toModify) {
value = mutateStringSet(instance.info.languageContexts.get(keys.get(i)));
}
lc.put(key, value);
}
}
return new GetScriptLanguageResponse(new ScriptLanguagesInfo(instance.info.typesAllowed, lc));
}
}
private static ScriptLanguagesInfo randomInstance() {
Map<String, Set<String>> contexts = new HashMap<>();
for (String context : randomStringSet(randomIntBetween(1, MAX_VALUES))) {
contexts.put(context, randomStringSet(randomIntBetween(1, MAX_VALUES)));
}
return new ScriptLanguagesInfo(randomStringSet(randomInt(MAX_VALUES)), contexts);
}
private static Set<String> randomStringSet(int numInstances) {
Set<String> rand = Sets.newHashSetWithExpectedSize(numInstances);
for (int i = 0; i < numInstances; i++) {
rand.add(randomValueOtherThanMany(rand::contains, () -> randomAlphaOfLengthBetween(MIN_LENGTH, MAX_LENGTH)));
}
return rand;
}
private static Set<String> mutateStringSet(Set<String> strings) {
if (strings.isEmpty()) {
return Set.of(randomAlphaOfLengthBetween(MIN_LENGTH, MAX_LENGTH));
}
if (randomBoolean()) {
Set<String> updated = new HashSet<>(strings);
updated.add(randomValueOtherThanMany(updated::contains, () -> randomAlphaOfLengthBetween(MIN_LENGTH, MAX_LENGTH)));
return updated;
} else {
List<String> sorted = strings.stream().sorted().toList();
int toRemove = randomInt(sorted.size() - 1);
Set<String> updated = new HashSet<>();
for (int i = 0; i < sorted.size(); i++) {
if (i != toRemove) {
updated.add(sorted.get(i));
}
}
return updated;
}
}
}
| GetScriptLanguageResponseTests |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/BuildProfileTest.java | {
"start": 5963,
"end": 6287
} | class ____ implements ContainerResponseFilter {
@Override
public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
throws IOException {
responseContext.getHeaders().add("X-RF-6", "Value");
}
}
public static | ResponseFilter6 |
java | apache__camel | components/camel-ibm/camel-ibm-cos/src/main/java/org/apache/camel/component/ibm/cos/IBMCOSConfiguration.java | {
"start": 7618,
"end": 9285
} | class ____ use when storing objects (e.g., STANDARD, VAULT, COLD, FLEX)
*/
public void setStorageClass(String storageClass) {
this.storageClass = storageClass;
}
public boolean isIncludeBody() {
return includeBody;
}
/**
* Include the object body in the exchange
*/
public void setIncludeBody(boolean includeBody) {
this.includeBody = includeBody;
}
public IBMCOSOperations getOperation() {
return operation;
}
/**
* The operation to perform
*/
public void setOperation(IBMCOSOperations operation) {
this.operation = operation;
}
public boolean isAutocloseBody() {
return autocloseBody;
}
/**
* Whether to automatically close the object input stream after processing
*/
public void setAutocloseBody(boolean autocloseBody) {
this.autocloseBody = autocloseBody;
}
public boolean isAutoCreateBucket() {
return autoCreateBucket;
}
/**
* Automatically create the bucket if it doesn't exist
*/
public void setAutoCreateBucket(boolean autoCreateBucket) {
this.autoCreateBucket = autoCreateBucket;
}
public String getKeyName() {
return keyName;
}
/**
* The key name for the object
*/
public void setKeyName(String keyName) {
this.keyName = keyName;
}
public boolean isIncludeFolders() {
return includeFolders;
}
/**
* Include folders/directories when listing objects
*/
public void setIncludeFolders(boolean includeFolders) {
this.includeFolders = includeFolders;
}
}
| to |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_3200/Issue3227.java | {
"start": 1011,
"end": 1247
} | class ____ extends Parent<Integer>{
@Override
public Integer getCode() {
return code;
}
@Override
public void setCode(Integer code) {
this.code = code;
}
}
}
| Child |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/FhirEndpointBuilderFactory.java | {
"start": 1639,
"end": 27163
} | interface ____
extends
EndpointConsumerBuilder {
default AdvancedFhirEndpointConsumerBuilder advanced() {
return (AdvancedFhirEndpointConsumerBuilder) this;
}
/**
* Encoding to use for all request.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param encoding the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder encoding(String encoding) {
doSetProperty("encoding", encoding);
return this;
}
/**
* The FHIR Version to use.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: R4
* Group: common
*
* @param fhirVersion the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder fhirVersion(String fhirVersion) {
doSetProperty("fhirVersion", fhirVersion);
return this;
}
/**
* Sets the name of a parameter to be passed in the exchange In Body.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param inBody the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder inBody(String inBody) {
doSetProperty("inBody", inBody);
return this;
}
/**
* Will log every requests and responses.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param log the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder log(boolean log) {
doSetProperty("log", log);
return this;
}
/**
* Will log every requests and responses.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param log the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder log(String log) {
doSetProperty("log", log);
return this;
}
/**
* Pretty print all request.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param prettyPrint the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder prettyPrint(boolean prettyPrint) {
doSetProperty("prettyPrint", prettyPrint);
return this;
}
/**
* Pretty print all request.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param prettyPrint the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder prettyPrint(String prettyPrint) {
doSetProperty("prettyPrint", prettyPrint);
return this;
}
/**
* The FHIR server base URL.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param serverUrl the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder serverUrl(String serverUrl) {
doSetProperty("serverUrl", serverUrl);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param sendEmptyMessageWhenIdle the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder sendEmptyMessageWhenIdle(boolean sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param sendEmptyMessageWhenIdle the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder sendEmptyMessageWhenIdle(String sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* The proxy host.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: proxy
*
* @param proxyHost the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder proxyHost(String proxyHost) {
doSetProperty("proxyHost", proxyHost);
return this;
}
/**
* The proxy password.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: proxy
*
* @param proxyPassword the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder proxyPassword(String proxyPassword) {
doSetProperty("proxyPassword", proxyPassword);
return this;
}
/**
* The proxy port.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: proxy
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder proxyPort(Integer proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* The proxy port.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Group: proxy
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder proxyPort(String proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* The proxy username.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: proxy
*
* @param proxyUser the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder proxyUser(String proxyUser) {
doSetProperty("proxyUser", proxyUser);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffErrorThreshold the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder backoffErrorThreshold(int backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffErrorThreshold the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder backoffErrorThreshold(String backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffIdleThreshold the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder backoffIdleThreshold(int backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffIdleThreshold the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder backoffIdleThreshold(String backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffMultiplier the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder backoffMultiplier(int backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffMultiplier the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder backoffMultiplier(String backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option is a: <code>long</code> type.
*
* Default: 500
* Group: scheduler
*
* @param delay the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder delay(long delay) {
doSetProperty("delay", delay);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 500
* Group: scheduler
*
* @param delay the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder delay(String delay) {
doSetProperty("delay", delay);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*
* @param greedy the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder greedy(boolean greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*
* @param greedy the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder greedy(String greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option is a: <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*
* @param initialDelay the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder initialDelay(long initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*
* @param initialDelay the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder initialDelay(String initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option is a: <code>long</code> type.
*
* Default: 0
* Group: scheduler
*
* @param repeatCount the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder repeatCount(long repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 0
* Group: scheduler
*
* @param repeatCount the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder repeatCount(String repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option is a: <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*
* @param runLoggingLevel the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder runLoggingLevel(org.apache.camel.LoggingLevel runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option will be converted to a
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*
* @param runLoggingLevel the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder runLoggingLevel(String runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option is a:
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*
* @param scheduledExecutorService the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder scheduledExecutorService(ScheduledExecutorService scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option will be converted to a
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*
* @param scheduledExecutorService the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder scheduledExecutorService(String scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option is a: <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*
* @param scheduler the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder scheduler(Object scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option will be converted to a <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*
* @param scheduler the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder scheduler(String scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler. This is a multi-value
* option with prefix: scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder schedulerProperties(String key, Object value) {
doSetMultiValueProperty("schedulerProperties", "scheduler." + key, value);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler. This is a multi-value
* option with prefix: scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*
* @param values the values
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder schedulerProperties(Map values) {
doSetMultiValueProperties("schedulerProperties", "scheduler.", values);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param startScheduler the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder startScheduler(boolean startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param startScheduler the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder startScheduler(String startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option is a: <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*
* @param timeUnit the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder timeUnit(TimeUnit timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option will be converted to a
* <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*
* @param timeUnit the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder timeUnit(String timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param useFixedDelay the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder useFixedDelay(boolean useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param useFixedDelay the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder useFixedDelay(String useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* OAuth access token.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param accessToken the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder accessToken(String accessToken) {
doSetProperty("accessToken", accessToken);
return this;
}
/**
* Password to use for basic authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* Username to use for basic authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param username the value to set
* @return the dsl builder
*/
default FhirEndpointConsumerBuilder username(String username) {
doSetProperty("username", username);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the FHIR component.
*/
public | FhirEndpointConsumerBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/DateTimeUtils.java | {
"start": 1005,
"end": 18630
} | class ____ {
private DateTimeUtils() {
}
public static final String FORMAT_STRING_DATE = "yyyy-MM-dd";
public static final String FORMAT_STRING_TIME_WITH_OFFSET = "HH:mm:ssXXX";
public static final String FORMAT_STRING_TIME = "HH:mm:ss";
public static final String FORMAT_STRING_TIMESTAMP = "yyyy-MM-dd HH:mm:ss";
public static final String FORMAT_STRING_TIMESTAMP_WITH_MILLIS = FORMAT_STRING_TIMESTAMP + ".SSS";
public static final String FORMAT_STRING_TIMESTAMP_WITH_MICROS = FORMAT_STRING_TIMESTAMP + ".SSSSSS";
public static final String FORMAT_STRING_TIMESTAMP_WITH_NANOS = FORMAT_STRING_TIMESTAMP + ".SSSSSSSSS";
public static final String FORMAT_STRING_TIMESTAMP_WITH_MILLIS_AND_OFFSET = FORMAT_STRING_TIMESTAMP_WITH_MILLIS + "XXX";
public static final String FORMAT_STRING_TIMESTAMP_WITH_MICROS_AND_OFFSET = FORMAT_STRING_TIMESTAMP_WITH_MICROS + "XXX";
public static final String FORMAT_STRING_TIMESTAMP_WITH_NANOS_AND_OFFSET = FORMAT_STRING_TIMESTAMP_WITH_NANOS + "XXX";
public static final String FORMAT_STRING_TIMESTAMP_WITH_MICROS_AND_OFFSET_NOZ = FORMAT_STRING_TIMESTAMP_WITH_MICROS + "xxx";
public static final String FORMAT_STRING_TIMESTAMP_WITH_NANOS_AND_OFFSET_NOZ = FORMAT_STRING_TIMESTAMP_WITH_NANOS + "xxx";
public static final DateTimeFormatter DATE_TIME_FORMATTER_DATE = DateTimeFormatter.ofPattern( FORMAT_STRING_DATE, Locale.ENGLISH );
public static final DateTimeFormatter DATE_TIME_FORMATTER_TIME_WITH_OFFSET = DateTimeFormatter.ofPattern( FORMAT_STRING_TIME_WITH_OFFSET, Locale.ENGLISH );
public static final DateTimeFormatter DATE_TIME_FORMATTER_TIME = DateTimeFormatter.ofPattern( FORMAT_STRING_TIME, Locale.ENGLISH );
public static final DateTimeFormatter DATE_TIME_FORMATTER_TIMESTAMP_WITH_MILLIS = DateTimeFormatter.ofPattern(
FORMAT_STRING_TIMESTAMP_WITH_MILLIS,
Locale.ENGLISH
);
public static final DateTimeFormatter DATE_TIME_FORMATTER_TIMESTAMP_WITH_MICROS = DateTimeFormatter.ofPattern(
FORMAT_STRING_TIMESTAMP_WITH_MICROS,
Locale.ENGLISH
);
public static final DateTimeFormatter DATE_TIME_FORMATTER_TIMESTAMP_WITH_NANOS = DateTimeFormatter.ofPattern(
FORMAT_STRING_TIMESTAMP_WITH_NANOS,
Locale.ENGLISH
);
public static final DateTimeFormatter DATE_TIME_FORMATTER_TIMESTAMP_WITH_MILLIS_AND_OFFSET = DateTimeFormatter.ofPattern(
FORMAT_STRING_TIMESTAMP_WITH_MILLIS_AND_OFFSET,
Locale.ENGLISH
);
public static final DateTimeFormatter DATE_TIME_FORMATTER_TIMESTAMP_WITH_MICROS_AND_OFFSET = DateTimeFormatter.ofPattern(
FORMAT_STRING_TIMESTAMP_WITH_MICROS_AND_OFFSET,
Locale.ENGLISH
);
public static final DateTimeFormatter DATE_TIME_FORMATTER_TIMESTAMP_WITH_MICROS_AND_OFFSET_NOZ = DateTimeFormatter.ofPattern(
FORMAT_STRING_TIMESTAMP_WITH_MICROS_AND_OFFSET_NOZ,
Locale.ENGLISH
);
public static final DateTimeFormatter DATE_TIME_FORMATTER_TIMESTAMP_WITH_NANOS_AND_OFFSET = DateTimeFormatter.ofPattern(
FORMAT_STRING_TIMESTAMP_WITH_NANOS_AND_OFFSET,
Locale.ENGLISH
);
public static final DateTimeFormatter DATE_TIME_FORMATTER_TIMESTAMP_WITH_NANOS_AND_OFFSET_NOZ = DateTimeFormatter.ofPattern(
FORMAT_STRING_TIMESTAMP_WITH_NANOS_AND_OFFSET_NOZ,
Locale.ENGLISH
);
public static final String JDBC_ESCAPE_START_DATE = "{d '";
public static final String JDBC_ESCAPE_START_TIME = "{t '";
public static final String JDBC_ESCAPE_START_TIMESTAMP = "{ts '";
public static final String JDBC_ESCAPE_END = "'}";
/**
* Pattern used for parsing literal datetimes in HQL.
*
* Recognizes timestamps consisting of a date and time separated
* by either T or a space, and with an optional offset or time
* zone ID. Ideally we should accept both ISO and SQL standard
* zoned timestamp formats here.
*/
public static final DateTimeFormatter DATE_TIME = new DateTimeFormatterBuilder()
.parseCaseInsensitive()
.append( ISO_LOCAL_DATE )
.optionalStart().appendLiteral( ' ' ).optionalEnd()
.optionalStart().appendLiteral( 'T' ).optionalEnd()
.append( ISO_LOCAL_TIME )
.optionalStart().appendLiteral( ' ' ).optionalEnd()
.optionalStart().appendZoneOrOffsetId().optionalEnd()
.toFormatter();
private static final ThreadLocal<SimpleDateFormat> LOCAL_DATE_FORMAT =
ThreadLocal.withInitial( () -> new SimpleDateFormat( FORMAT_STRING_DATE, Locale.ENGLISH ) );
private static final ThreadLocal<SimpleDateFormat> LOCAL_TIME_FORMAT =
ThreadLocal.withInitial( () -> new SimpleDateFormat( FORMAT_STRING_TIME, Locale.ENGLISH ) );
private static final ThreadLocal<SimpleDateFormat> TIME_WITH_OFFSET_FORMAT =
ThreadLocal.withInitial( () -> new SimpleDateFormat( FORMAT_STRING_TIME_WITH_OFFSET, Locale.ENGLISH ) );
private static final ThreadLocal<SimpleDateFormat> TIMESTAMP_WITH_MILLIS_FORMAT =
ThreadLocal.withInitial( () -> new SimpleDateFormat( FORMAT_STRING_TIMESTAMP_WITH_MILLIS, Locale.ENGLISH ) );
/**
* Pattern used for parsing literal offset datetimes in HQL.
*
* Recognizes timestamps consisting of a date and time separated
* by either T or a space, and with a required offset. Ideally we
* should accept both ISO and SQL standard timestamp formats here.
*/
public static final DateTimeFormatter OFFSET_DATE_TIME = new DateTimeFormatterBuilder()
.parseCaseInsensitive()
.append( ISO_LOCAL_DATE )
.optionalStart().appendLiteral( ' ' ).optionalEnd()
.optionalStart().appendLiteral( 'T' ).optionalEnd()
.append( ISO_LOCAL_TIME )
.optionalStart().appendLiteral( ' ' ).optionalEnd()
.appendOffset("+HH:mm", "+00")
.toFormatter();
public static void appendAsTimestampWithNanos(
SqlAppender appender,
TemporalAccessor temporalAccessor,
boolean supportsOffset,
TimeZone jdbcTimeZone) {
appendAsTimestamp(
appender,
temporalAccessor,
supportsOffset,
jdbcTimeZone,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_NANOS,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_NANOS_AND_OFFSET
);
}
public static void appendAsTimestampWithNanos(
SqlAppender appender,
TemporalAccessor temporalAccessor,
boolean supportsOffset,
TimeZone jdbcTimeZone,
boolean allowZforZeroOffset) {
appendAsTimestamp(
appender,
temporalAccessor,
supportsOffset,
jdbcTimeZone,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_MICROS,
allowZforZeroOffset
? DATE_TIME_FORMATTER_TIMESTAMP_WITH_NANOS_AND_OFFSET
: DATE_TIME_FORMATTER_TIMESTAMP_WITH_NANOS_AND_OFFSET_NOZ
);
}
public static void appendAsTimestampWithMicros(
SqlAppender appender,
TemporalAccessor temporalAccessor,
boolean supportsOffset,
TimeZone jdbcTimeZone) {
appendAsTimestamp(
appender,
temporalAccessor,
supportsOffset,
jdbcTimeZone,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_MICROS,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_MICROS_AND_OFFSET
);
}
public static void appendAsTimestampWithMicros(
SqlAppender appender,
TemporalAccessor temporalAccessor,
boolean supportsOffset,
TimeZone jdbcTimeZone,
boolean allowZforZeroOffset) {
appendAsTimestamp(
appender,
temporalAccessor,
supportsOffset,
jdbcTimeZone,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_MICROS,
allowZforZeroOffset
? DATE_TIME_FORMATTER_TIMESTAMP_WITH_MICROS_AND_OFFSET
: DATE_TIME_FORMATTER_TIMESTAMP_WITH_MICROS_AND_OFFSET_NOZ
);
}
public static void appendAsTimestampWithMillis(
SqlAppender appender,
TemporalAccessor temporalAccessor,
boolean supportsOffset,
TimeZone jdbcTimeZone) {
appendAsTimestamp(
appender,
temporalAccessor,
supportsOffset,
jdbcTimeZone,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_MILLIS,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_MILLIS_AND_OFFSET
);
}
private static void appendAsTimestamp(
SqlAppender appender,
TemporalAccessor temporalAccessor,
boolean supportsOffset,
TimeZone jdbcTimeZone,
DateTimeFormatter format,
DateTimeFormatter formatWithOffset) {
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
if ( supportsOffset ) {
formatWithOffset.formatTo( temporalAccessor, appender );
}
else {
format.formatTo(
LocalDateTime.ofInstant(
Instant.from( temporalAccessor ),
jdbcTimeZone.toZoneId()
),
appender
);
}
}
else if ( temporalAccessor instanceof Instant instant ) {
if ( supportsOffset ) {
formatWithOffset.formatTo(
instant.atZone( jdbcTimeZone.toZoneId() ),
appender
);
}
else {
format.formatTo(
LocalDateTime.ofInstant( instant, jdbcTimeZone.toZoneId() ),
appender
);
}
}
else {
format.formatTo( temporalAccessor, appender );
}
}
public static void appendAsDate(SqlAppender appender, TemporalAccessor temporalAccessor) {
DATE_TIME_FORMATTER_DATE.formatTo( temporalAccessor, appender );
}
public static void appendAsTime(
SqlAppender appender,
TemporalAccessor temporalAccessor,
boolean supportsOffset,
TimeZone jdbcTimeZone) {
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
if ( supportsOffset ) {
DATE_TIME_FORMATTER_TIME_WITH_OFFSET.formatTo( temporalAccessor, appender );
}
else {
DATE_TIME_FORMATTER_TIME.formatTo( LocalTime.from( temporalAccessor ), appender );
}
}
else {
DATE_TIME_FORMATTER_TIME.formatTo( temporalAccessor, appender );
}
}
public static void appendAsLocalTime(SqlAppender appender, TemporalAccessor temporalAccessor) {
DATE_TIME_FORMATTER_TIME.formatTo( temporalAccessor, appender );
}
public static void appendAsTimestampWithMillis(SqlAppender appender, java.util.Date date, TimeZone jdbcTimeZone) {
final SimpleDateFormat simpleDateFormat = TIMESTAMP_WITH_MILLIS_FORMAT.get();
final TimeZone originalTimeZone = simpleDateFormat.getTimeZone();
try {
simpleDateFormat.setTimeZone( jdbcTimeZone );
appender.appendSql( simpleDateFormat.format( date ) );
}
finally {
simpleDateFormat.setTimeZone( originalTimeZone );
}
}
public static void appendAsTimestampWithMicros(SqlAppender appender, Date date, TimeZone jdbcTimeZone) {
if ( date instanceof Timestamp ) {
// java.sql.Timestamp supports nano sec
appendAsTimestamp(
appender,
date.toInstant().atZone( jdbcTimeZone.toZoneId() ),
false,
jdbcTimeZone,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_MICROS,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_MICROS_AND_OFFSET
);
}
else {
// java.util.Date supports only milli sec
final SimpleDateFormat simpleDateFormat = TIMESTAMP_WITH_MILLIS_FORMAT.get();
final TimeZone originalTimeZone = simpleDateFormat.getTimeZone();
try {
simpleDateFormat.setTimeZone( jdbcTimeZone );
appender.appendSql( simpleDateFormat.format( date ) );
}
finally {
simpleDateFormat.setTimeZone( originalTimeZone );
}
}
}
public static void appendAsTimestampWithNanos(SqlAppender appender, Date date, TimeZone jdbcTimeZone) {
if ( date instanceof Timestamp ) {
// java.sql.Timestamp supports nano sec
appendAsTimestamp(
appender,
date.toInstant().atZone( jdbcTimeZone.toZoneId() ),
false,
jdbcTimeZone,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_NANOS,
DATE_TIME_FORMATTER_TIMESTAMP_WITH_NANOS_AND_OFFSET
);
}
else {
// java.util.Date supports only milli sec
final SimpleDateFormat simpleDateFormat = TIMESTAMP_WITH_MILLIS_FORMAT.get();
final TimeZone originalTimeZone = simpleDateFormat.getTimeZone();
try {
simpleDateFormat.setTimeZone( jdbcTimeZone );
appender.appendSql( simpleDateFormat.format( date ) );
}
finally {
simpleDateFormat.setTimeZone( originalTimeZone );
}
}
}
public static void appendAsDate(SqlAppender appender, Date date) {
appender.appendSql( LOCAL_DATE_FORMAT.get().format( date ) );
}
/**
* @deprecated Use {@link #appendAsLocalTime(SqlAppender, Date)} instead
*/
@Deprecated
public static void appendAsTime(SqlAppender appender, java.util.Date date) {
appendAsLocalTime( appender, date );
}
public static void appendAsTime(SqlAppender appender, java.util.Date date, TimeZone jdbcTimeZone) {
final SimpleDateFormat simpleDateFormat = TIME_WITH_OFFSET_FORMAT.get();
final TimeZone originalTimeZone = simpleDateFormat.getTimeZone();
try {
simpleDateFormat.setTimeZone( jdbcTimeZone );
appender.appendSql( simpleDateFormat.format( date ) );
}
finally {
simpleDateFormat.setTimeZone( originalTimeZone );
}
}
public static void appendAsLocalTime(SqlAppender appender, Date date) {
appender.appendSql( LOCAL_TIME_FORMAT.get().format( date ) );
}
public static void appendAsTimestampWithMillis(
SqlAppender appender,
java.util.Calendar calendar,
TimeZone jdbcTimeZone) {
final SimpleDateFormat simpleDateFormat = TIMESTAMP_WITH_MILLIS_FORMAT.get();
final TimeZone originalTimeZone = simpleDateFormat.getTimeZone();
try {
simpleDateFormat.setTimeZone( jdbcTimeZone );
appender.appendSql( simpleDateFormat.format( calendar.getTime() ) );
}
finally {
simpleDateFormat.setTimeZone( originalTimeZone );
}
}
public static void appendAsDate(SqlAppender appender, java.util.Calendar calendar) {
final SimpleDateFormat simpleDateFormat = LOCAL_DATE_FORMAT.get();
final TimeZone originalTimeZone = simpleDateFormat.getTimeZone();
try {
simpleDateFormat.setTimeZone( calendar.getTimeZone() );
appender.appendSql( simpleDateFormat.format( calendar.getTime() ) );
}
finally {
simpleDateFormat.setTimeZone( originalTimeZone );
}
}
/**
* @deprecated Use {@link #appendAsLocalTime(SqlAppender, Calendar)} instead
*/
@Deprecated
public static void appendAsTime(SqlAppender appender, java.util.Calendar calendar) {
appendAsLocalTime( appender, calendar );
}
public static void appendAsTime(SqlAppender appender, java.util.Calendar calendar, TimeZone jdbcTimeZone) {
final SimpleDateFormat simpleDateFormat = TIME_WITH_OFFSET_FORMAT.get();
final TimeZone originalTimeZone = simpleDateFormat.getTimeZone();
try {
simpleDateFormat.setTimeZone( jdbcTimeZone );
appender.appendSql( simpleDateFormat.format( calendar.getTime() ) );
}
finally {
simpleDateFormat.setTimeZone( originalTimeZone );
}
}
public static void appendAsLocalTime(SqlAppender appender, Calendar calendar) {
appender.appendSql( LOCAL_TIME_FORMAT.get().format( calendar.getTime() ) );
}
/**
* Do the same conversion that databases do when they encounter a timestamp with a higher precision
* than what is supported by a column, which is to round the excess fractions.
*/
public static <T extends Temporal> T adjustToDefaultPrecision(T temporal, Dialect d) {
return adjustToPrecision( temporal, d.getDefaultTimestampPrecision(), d );
}
public static <T extends Temporal> T adjustToPrecision(T temporal, int precision, Dialect dialect) {
return dialect.doesRoundTemporalOnOverflow()
? roundToSecondPrecision( temporal, precision )
: truncateToPrecision( temporal, precision );
}
public static <T extends Temporal> T truncateToPrecision(T temporal, int precision) {
if ( precision >= 9 || !temporal.isSupported( ChronoField.NANO_OF_SECOND ) ) {
return temporal;
}
final long factor = pow10( 9 - precision );
//noinspection unchecked
return (T) temporal.with(
ChronoField.NANO_OF_SECOND,
temporal.get( ChronoField.NANO_OF_SECOND ) / factor * factor
);
}
/**
* Do the same conversion that databases do when they encounter a timestamp with a higher precision
* than what is supported by a column, which is to round the excess fractions.
*
* @deprecated Use {@link #adjustToDefaultPrecision(Temporal, Dialect)} instead
*/
@Deprecated(forRemoval = true, since = "6.6.1")
public static <T extends Temporal> T roundToDefaultPrecision(T temporal, Dialect d) {
final int defaultTimestampPrecision = d.getDefaultTimestampPrecision();
if ( defaultTimestampPrecision >= 9 || !temporal.isSupported( ChronoField.NANO_OF_SECOND ) ) {
return temporal;
}
//noinspection unchecked
return (T) temporal.with(
ChronoField.NANO_OF_SECOND,
roundToPrecision( temporal.get( ChronoField.NANO_OF_SECOND ), defaultTimestampPrecision )
);
}
public static <T extends Temporal> T roundToSecondPrecision(T temporal, int precision) {
if ( precision >= 9 || !temporal.isSupported( ChronoField.NANO_OF_SECOND ) ) {
return temporal;
}
if ( precision == 0 ) {
//noinspection unchecked
return temporal.get( ChronoField.NANO_OF_SECOND ) >= 500_000_000L
? (T) temporal.plus( 1, ChronoUnit.SECONDS ).with( ChronoField.NANO_OF_SECOND, 0L )
: (T) temporal.with( ChronoField.NANO_OF_SECOND, 0L );
}
final long nanos = roundToPrecision( temporal.get( ChronoField.NANO_OF_SECOND ), precision );
if ( nanos == 1000000000L ) {
//noinspection unchecked
return (T) temporal.plus( 1L, ChronoUnit.SECONDS ).with( ChronoField.NANO_OF_SECOND, 0L );
}
//noinspection unchecked
return (T) temporal.with( ChronoField.NANO_OF_SECOND, nanos );
}
public static long roundToPrecision(int nano, int precision) {
assert precision > 0 : "Can't round fractional seconds to less-than 0";
if ( precision >= 9 ) {
return nano;
}
final int precisionMask = pow10( 9 - precision );
final int nanosToRound = nano % precisionMask;
return nano - nanosToRound + ( nanosToRound >= ( precisionMask >> 1 ) ? precisionMask : 0 );
}
private static int pow10(int exponent) {
return switch ( exponent ) {
case 0 -> 1;
case 1 -> 10;
case 2 -> 100;
case 3 -> 1_000;
case 4 -> 10_000;
case 5 -> 100_000;
case 6 -> 1_000_000;
case 7 -> 10_000_000;
case 8 -> 100_000_000;
default -> (int) Math.pow( 10, exponent );
};
}
}
| DateTimeUtils |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/pool/SynchronizationTest.java | {
"start": 1040,
"end": 4468
} | class ____ extends AsyncTestBase {
private static Long iterationsForOneMilli;
private static long iterationsForOneMilli() {
Long val = iterationsForOneMilli;
if (val == null) {
val = Utils.calibrateBlackhole();
iterationsForOneMilli = val;
}
return val;
}
private static void burnCPU(long cpu) {
final long target_delay = Utils.ONE_MICRO_IN_NANO * cpu;
long num_iters = Math.round(target_delay * 1.0 * iterationsForOneMilli() / Utils.ONE_MILLI_IN_NANO);
Utils.blackholeCpu(num_iters);
}
@Test
public void testActionReentrancy() throws Exception {
AtomicBoolean isReentrant1 = new AtomicBoolean();
AtomicBoolean isReentrant2 = new AtomicBoolean();
Executor<Object> sync = new CombinerExecutor<>(new Object());
CountDownLatch latch = new CountDownLatch(2);
sync.submit(state1 -> {
AtomicBoolean inCallback = new AtomicBoolean();
inCallback.set(true);
try {
sync.submit(state2 -> {
isReentrant1.set(inCallback.get());
latch.countDown();
return new Task() {
@Override
public void run() {
isReentrant2.set(inCallback.get());
latch.countDown();
}
};
});
} finally {
inCallback.set(false);
}
return null;
});
awaitLatch(latch);
assertFalse(isReentrant1.get());
assertFalse(isReentrant2.get());
}
@Test
public void testActionReentrancy2() throws Exception {
List<Integer> log = new LinkedList<>();
Executor<Object> combiner1 = new CombinerExecutor<>(new Object());
Executor<Object> combiner2 = new CombinerExecutor<>(new Object());
int[] reentrancy = new int[2];
combiner1.submit(state1 -> taskOf(() -> {
assertEquals(0, reentrancy[0]++);
combiner1.submit(state2 -> taskOf(() -> {
assertEquals(0, reentrancy[0]++);
log.add(0);
reentrancy[0]--;
}));
combiner2.submit(state2 -> taskOf(() -> {
assertEquals(0, reentrancy[1]++);
log.add(1);
combiner1.submit(state3 -> taskOf(() -> {
assertEquals(0, reentrancy[0]++);
log.add(2);
reentrancy[0]--;
}));
combiner2.submit(state3 -> taskOf(() -> {
assertEquals(0, reentrancy[1]++);
log.add(3);
reentrancy[1]--;
}));
reentrancy[1]--;
}));
reentrancy[0]--;
}));
assertEquals(0, reentrancy[0]);
assertEquals(0, reentrancy[1]);
assertEquals(Arrays.asList(1, 3, 0, 2), log);
}
static Task taskOf(Runnable runnable) {
return new Task() {
@Override
public void run() {
runnable.run();
}
};
}
@Test
public void testFoo() throws Exception {
Assume.assumeFalse(io.vertx.core.impl.Utils.isWindows());
int numThreads = 8;
int numIter = 1_000 * 100;
Executor<Object> sync = new CombinerExecutor<>(new Object());
Executor.Action<Object> action = s -> {
burnCPU(10);
return null;
};
Thread[] threads = new Thread[numThreads];
for (int i = 0;i < numThreads;i++) {
threads[i] = new Thread(() -> {
for (int j = 0;j < numIter;j++) {
sync.submit(action);
}
});
}
for (Thread t : threads) {
t.start();
}
for (Thread t : threads) {
t.join();
}
}
public static | SynchronizationTest |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/model/capacity/Capacity.java | {
"start": 932,
"end": 3132
} | class ____ implements Serializable {
private static final long serialVersionUID = 77343194329627468L;
@JsonSerialize(using = ToStringSerializer.class)
private Long id;
private Integer quota;
private Integer usage;
private Integer maxSize;
private Integer maxAggrCount;
private Integer maxAggrSize;
private Timestamp gmtCreate;
private Timestamp gmtModified;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Integer getQuota() {
return quota;
}
public void setQuota(Integer quota) {
this.quota = quota;
}
public Integer getUsage() {
return usage;
}
public void setUsage(Integer usage) {
this.usage = usage;
}
public Integer getMaxSize() {
return maxSize;
}
public void setMaxSize(Integer maxSize) {
this.maxSize = maxSize;
}
public Integer getMaxAggrCount() {
return maxAggrCount;
}
public void setMaxAggrCount(Integer maxAggrCount) {
this.maxAggrCount = maxAggrCount;
}
public Integer getMaxAggrSize() {
return maxAggrSize;
}
public void setMaxAggrSize(Integer maxAggrSize) {
this.maxAggrSize = maxAggrSize;
}
public Timestamp getGmtCreate() {
if (gmtCreate == null) {
return null;
}
return new Timestamp(gmtCreate.getTime());
}
public void setGmtCreate(Timestamp gmtCreate) {
if (gmtCreate == null) {
this.gmtCreate = null;
} else {
this.gmtCreate = new Timestamp(gmtCreate.getTime());
}
}
public Timestamp getGmtModified() {
if (gmtModified == null) {
return null;
}
return new Timestamp(gmtModified.getTime());
}
public void setGmtModified(Timestamp gmtModified) {
if (gmtModified == null) {
this.gmtModified = null;
} else {
this.gmtModified = new Timestamp(gmtModified.getTime());
}
}
}
| Capacity |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/records/impl/pb/GetApplicationsHomeSubClusterRequestPBImpl.java | {
"start": 1696,
"end": 3920
} | class ____
extends GetApplicationsHomeSubClusterRequest {
private GetApplicationsHomeSubClusterRequestProto proto =
GetApplicationsHomeSubClusterRequestProto.getDefaultInstance();
private GetApplicationsHomeSubClusterRequestProto.Builder builder = null;
private boolean viaProto = false;
public GetApplicationsHomeSubClusterRequestPBImpl() {
builder = GetApplicationsHomeSubClusterRequestProto.newBuilder();
}
public GetApplicationsHomeSubClusterRequestPBImpl(
GetApplicationsHomeSubClusterRequestProto proto) {
this.proto = proto;
viaProto = true;
}
public GetApplicationsHomeSubClusterRequestProto getProto() {
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null) {
return false;
}
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = GetApplicationsHomeSubClusterRequestProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public SubClusterId getSubClusterId() {
GetApplicationsHomeSubClusterRequestProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasSubClusterId()) {
return null;
}
return convertFromProtoFormat(p.getSubClusterId());
}
@Override
public void setSubClusterId(SubClusterId subClusterId) {
maybeInitBuilder();
if (subClusterId == null) {
builder.clearSubClusterId();
return;
}
builder.setSubClusterId(convertToProtoFormat(subClusterId));
}
private SubClusterId convertFromProtoFormat(YarnServerFederationProtos.SubClusterIdProto sc) {
return new SubClusterIdPBImpl(sc);
}
private YarnServerFederationProtos.SubClusterIdProto convertToProtoFormat(SubClusterId sc) {
return ((SubClusterIdPBImpl) sc).getProto();
}
}
| GetApplicationsHomeSubClusterRequestPBImpl |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/function/DefaultEntityResponseBuilder.java | {
"start": 8073,
"end": 12511
} | class ____<T> extends AbstractServerResponse implements EntityResponse<T> {
private final T entity;
private final Type entityType;
public DefaultEntityResponse(HttpStatusCode statusCode, HttpHeaders headers,
MultiValueMap<String, Cookie> cookies, T entity, Type entityType) {
super(statusCode, headers, cookies);
this.entity = entity;
this.entityType = entityType;
}
@Override
public T entity() {
return this.entity;
}
@Override
protected @Nullable ModelAndView writeToInternal(HttpServletRequest servletRequest,
HttpServletResponse servletResponse, Context context)
throws ServletException, IOException {
writeEntityWithMessageConverters(this.entity, servletRequest,servletResponse, context);
return null;
}
@SuppressWarnings({ "unchecked", "resource", "rawtypes" })
protected void writeEntityWithMessageConverters(Object entity, HttpServletRequest request,
HttpServletResponse response, ServerResponse.Context context)
throws ServletException, IOException {
ServletServerHttpResponse serverResponse = new ServletServerHttpResponse(response);
MediaType contentType = getContentType(response);
Class<?> entityClass = entity.getClass();
Type entityType = this.entityType;
if (entityClass != InputStreamResource.class && Resource.class.isAssignableFrom(entityClass)) {
serverResponse.getHeaders().set(HttpHeaders.ACCEPT_RANGES, "bytes");
String rangeHeader = request.getHeader(HttpHeaders.RANGE);
if (rangeHeader != null) {
Resource resource = (Resource) entity;
try {
List<HttpRange> httpRanges = HttpRange.parseRanges(rangeHeader);
serverResponse.getServletResponse().setStatus(HttpStatus.PARTIAL_CONTENT.value());
entity = HttpRange.toResourceRegions(httpRanges, resource);
entityClass = entity.getClass();
entityType = RESOURCE_REGION_LIST_TYPE;
}
catch (IllegalArgumentException ex) {
serverResponse.getHeaders().set(HttpHeaders.CONTENT_RANGE, "bytes */" + resource.contentLength());
serverResponse.getServletResponse().setStatus(HttpStatus.REQUESTED_RANGE_NOT_SATISFIABLE.value());
}
}
}
for (HttpMessageConverter<?> messageConverter : context.messageConverters()) {
if (messageConverter instanceof GenericHttpMessageConverter genericMessageConverter) {
if (genericMessageConverter.canWrite(entityType, entityClass, contentType)) {
genericMessageConverter.write(entity, entityType, contentType, serverResponse);
return;
}
}
else if (messageConverter instanceof SmartHttpMessageConverter smartMessageConverter) {
ResolvableType resolvableType = ResolvableType.forType(entityType);
if (smartMessageConverter.canWrite(resolvableType, entityClass, contentType)) {
smartMessageConverter.write(entity, resolvableType, contentType, serverResponse, null);
return;
}
}
else if (messageConverter.canWrite(entityClass, contentType)) {
((HttpMessageConverter<Object>) messageConverter).write(entity, contentType, serverResponse);
return;
}
}
List<MediaType> producibleMediaTypes = producibleMediaTypes(context.messageConverters(), entityClass);
throw new HttpMediaTypeNotAcceptableException(producibleMediaTypes);
}
private static @Nullable MediaType getContentType(HttpServletResponse response) {
try {
return MediaType.parseMediaType(response.getContentType()).removeQualityValue();
}
catch (InvalidMediaTypeException ex) {
return null;
}
}
protected void tryWriteEntityWithMessageConverters(Object entity, HttpServletRequest request,
HttpServletResponse response, ServerResponse.Context context) throws ServletException, IOException {
try {
writeEntityWithMessageConverters(entity, request, response, context);
}
catch (IOException | ServletException ex) {
handleError(ex, request, response, context);
}
}
private static List<MediaType> producibleMediaTypes(
List<HttpMessageConverter<?>> messageConverters,
Class<?> entityClass) {
return messageConverters.stream()
.filter(messageConverter -> messageConverter.canWrite(entityClass, null))
.flatMap(messageConverter -> messageConverter.getSupportedMediaTypes(entityClass).stream())
.toList();
}
}
/**
* {@link EntityResponse} implementation for asynchronous {@link CompletionStage} bodies.
*/
private static | DefaultEntityResponse |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java | {
"start": 25189,
"end": 25862
} | interface ____", licenseService.getClass().getCanonicalName());
setLicenseService(licenseService);
setLicenseState(
new XPackLicenseState(
() -> getEpochMillisSupplier().getAsLong(),
LicenseUtils.getXPackLicenseStatus(licenseService.getLicense(), getClock())
)
);
} else {
setLicenseState(
new XPackLicenseState(
() -> getEpochMillisSupplier().getAsLong(),
new XPackLicenseStatus(License.OperationMode.TRIAL, true, null)
)
);
}
}
}
| MutableLicenseService |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseExecutionException.java | {
"start": 702,
"end": 1034
} | class ____ extends SearchException {
public FetchPhaseExecutionException(SearchShardTarget shardTarget, String msg, Throwable t) {
super(shardTarget, "Fetch Failed [" + msg + "]", t);
}
public FetchPhaseExecutionException(StreamInput in) throws IOException {
super(in);
}
}
| FetchPhaseExecutionException |
java | spring-projects__spring-security | test/src/test/java/org/springframework/security/test/web/servlet/showcase/csrf/CsrfShowcaseTests.java | {
"start": 3121,
"end": 3485
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
return http.build();
}
@Autowired
void configureGlobal(AuthenticationManagerBuilder auth) throws Exception {
// @formatter:off
auth
.inMemoryAuthentication()
.withUser("user").password("password").roles("USER");
// @formatter:on
}
}
}
| Config |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/aggregator/AbstractDistributedTest.java | {
"start": 1215,
"end": 2205
} | class ____ extends ContextTestSupport {
protected CamelContext context2;
protected ProducerTemplate template2;
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
context.setUseMDCLogging(true);
context2 = new DefaultCamelContext();
context2.setUseMDCLogging(true);
template2 = context2.createProducerTemplate();
context2.start();
template2.start();
// add routes after CamelContext has been started
context2.addRoutes(createRouteBuilder2());
}
@Override
@AfterEach
public void tearDown() throws Exception {
context2.stop();
template2.stop();
super.tearDown();
}
protected MockEndpoint getMockEndpoint2(String uri) {
return context2.getEndpoint(uri, MockEndpoint.class);
}
protected RoutesBuilder createRouteBuilder2() throws Exception {
return createRouteBuilder();
}
}
| AbstractDistributedTest |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/mutiny/MultiStringBean.java | {
"start": 281,
"end": 559
} | class ____ {
@Channel(StringProducer.STRING_STREAM)
Multi<String> strings;
public List<String> getStrings(Duration duration) {
return strings.collect()
.asList()
.await()
.atMost(duration);
}
}
| MultiStringBean |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/test/MockUtils.java | {
"start": 1003,
"end": 1108
} | class ____ can be used to mock a {@link Scannable} {@link ConnectableFlux}.
*/
public static abstract | that |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/util/Supplier.java | {
"start": 1503,
"end": 1663
} | interface ____<T> extends java.util.function.Supplier<T> {
/**
* Gets a value.
*
* @return a value
*/
@Override
T get();
}
| Supplier |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/LobUnfetchedPropertyTest.java | {
"start": 5525,
"end": 6032
} | class ____ {
private int id;
private Clob clob;
@Id
@GeneratedValue
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
@Column(name = "filedata", length = 1024 * 1024)
@Lob
@Basic(fetch = FetchType.LAZY)
public Clob getClob() {
return clob;
}
public void setClob(Clob clob) {
this.clob = clob;
}
}
@Entity(name = "FileNClob")
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, includeLazy = false)
public static | FileClob |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java | {
"start": 1651,
"end": 7607
} | class ____ extends ValuesSourceAggregationBuilder<DiversifiedAggregationBuilder> {
public static final String NAME = "diversified_sampler";
public static final ValuesSourceRegistry.RegistryKey<DiversifiedAggregatorSupplier> REGISTRY_KEY =
new ValuesSourceRegistry.RegistryKey<>(NAME, DiversifiedAggregatorSupplier.class);
public static final int MAX_DOCS_PER_VALUE_DEFAULT = 1;
public static final ObjectParser<DiversifiedAggregationBuilder, String> PARSER = ObjectParser.fromBuilder(
NAME,
DiversifiedAggregationBuilder::new
);
static {
ValuesSourceAggregationBuilder.declareFields(PARSER, true, false, false);
PARSER.declareInt(DiversifiedAggregationBuilder::shardSize, SamplerAggregator.SHARD_SIZE_FIELD);
PARSER.declareInt(DiversifiedAggregationBuilder::maxDocsPerValue, SamplerAggregator.MAX_DOCS_PER_VALUE_FIELD);
PARSER.declareString(DiversifiedAggregationBuilder::executionHint, SamplerAggregator.EXECUTION_HINT_FIELD);
}
public static void registerAggregators(ValuesSourceRegistry.Builder builder) {
DiversifiedAggregatorFactory.registerAggregators(builder);
}
private int shardSize = SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE;
private int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT;
private String executionHint = null;
public DiversifiedAggregationBuilder(String name) {
super(name);
}
protected DiversifiedAggregationBuilder(DiversifiedAggregationBuilder clone, Builder factoriesBuilder, Map<String, Object> metadata) {
super(clone, factoriesBuilder, metadata);
this.shardSize = clone.shardSize;
this.maxDocsPerValue = clone.maxDocsPerValue;
this.executionHint = clone.executionHint;
}
@Override
protected ValuesSourceType defaultValueSourceType() {
return CoreValuesSourceType.KEYWORD;
}
@Override
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metadata) {
return new DiversifiedAggregationBuilder(this, factoriesBuilder, metadata);
}
/**
* Read from a stream.
*/
public DiversifiedAggregationBuilder(StreamInput in) throws IOException {
super(in);
shardSize = in.readVInt();
maxDocsPerValue = in.readVInt();
executionHint = in.readOptionalString();
}
@Override
protected void innerWriteTo(StreamOutput out) throws IOException {
out.writeVInt(shardSize);
out.writeVInt(maxDocsPerValue);
out.writeOptionalString(executionHint);
}
/**
* Set the max num docs to be returned from each shard.
*/
public DiversifiedAggregationBuilder shardSize(int shardSize) {
if (shardSize < 0) {
throw new IllegalArgumentException(
"[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]"
);
}
this.shardSize = shardSize;
return this;
}
/**
* Set the max num docs to be returned per value.
*/
public DiversifiedAggregationBuilder maxDocsPerValue(int maxDocsPerValue) {
if (maxDocsPerValue < 0) {
throw new IllegalArgumentException(
"[maxDocsPerValue] must be greater than or equal to 0. Found [" + maxDocsPerValue + "] in [" + name + "]"
);
}
this.maxDocsPerValue = maxDocsPerValue;
return this;
}
/**
* Set the execution hint.
*/
public DiversifiedAggregationBuilder executionHint(String executionHint) {
this.executionHint = executionHint;
return this;
}
@Override
public BucketCardinality bucketCardinality() {
return BucketCardinality.ONE;
}
@Override
protected ValuesSourceAggregatorFactory innerBuild(
AggregationContext context,
ValuesSourceConfig config,
AggregatorFactory parent,
Builder subFactoriesBuilder
) throws IOException {
DiversifiedAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config);
return new DiversifiedAggregatorFactory(
name,
config,
shardSize,
maxDocsPerValue,
executionHint,
context,
parent,
subFactoriesBuilder,
metadata,
aggregatorSupplier
);
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(SamplerAggregator.SHARD_SIZE_FIELD.getPreferredName(), shardSize);
builder.field(SamplerAggregator.MAX_DOCS_PER_VALUE_FIELD.getPreferredName(), maxDocsPerValue);
if (executionHint != null) {
builder.field(SamplerAggregator.EXECUTION_HINT_FIELD.getPreferredName(), executionHint);
}
return builder;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), shardSize, maxDocsPerValue, executionHint);
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null || getClass() != obj.getClass()) return false;
if (super.equals(obj) == false) return false;
DiversifiedAggregationBuilder other = (DiversifiedAggregationBuilder) obj;
return Objects.equals(shardSize, other.shardSize)
&& Objects.equals(maxDocsPerValue, other.maxDocsPerValue)
&& Objects.equals(executionHint, other.executionHint);
}
@Override
public String getType() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
@Override
public boolean supportsParallelCollection(ToLongFunction<String> fieldCardinalityResolver) {
return false;
}
}
| DiversifiedAggregationBuilder |
java | quarkusio__quarkus | extensions/oidc-client-filter/deployment/src/test/java/io/quarkus/oidc/client/filter/ExtendedOidcClientRequestFilter.java | {
"start": 154,
"end": 228
} | class ____ extends OidcClientRequestFilter {
}
| ExtendedOidcClientRequestFilter |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java | {
"start": 72190,
"end": 72990
} | class ____ extends OperationSimplifier {
AddSubSimplifier(BinaryComparison comparison) {
super(comparison);
}
@Override
boolean isOpUnsafe() {
// no ADD/SUB with floating fields
if (operation.dataType().isRational()) {
return true;
}
if (operation.symbol().equals(SUB.symbol()) && opRight instanceof Literal == false) { // such as: 1 - x > -MAX
// if next simplification step would fail on overflow anyways, skip the optimisation already
return tryFolding(new Sub(EMPTY, opLeft, bcLiteral)) == null;
}
return false;
}
}
private static | AddSubSimplifier |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java | {
"start": 1919,
"end": 2177
} | class ____<T> {
private static final Logger log = LoggerFactory.getLogger(JsonSerDeser.class);
private final Class<T> classType;
private final ObjectMapper mapper;
/**
* Create an instance bound to a specific type
* @param classType | JsonSerDeser |
java | apache__camel | components/camel-openstack/src/main/java/org/apache/camel/component/openstack/swift/producer/ContainerProducer.java | {
"start": 1685,
"end": 9524
} | class ____ extends AbstractOpenstackProducer {
public ContainerProducer(SwiftEndpoint endpoint, OSClient client) {
super(endpoint, client);
}
@Override
public void process(Exchange exchange) throws Exception {
String operation = getOperation(exchange);
switch (operation) {
case OpenstackConstants.CREATE:
doCreate(exchange);
break;
case OpenstackConstants.GET:
doGet(exchange);
break;
case OpenstackConstants.GET_ALL:
doGetAll(exchange);
break;
case OpenstackConstants.UPDATE:
doUpdate(exchange);
break;
case OpenstackConstants.DELETE:
doDelete(exchange);
break;
case SwiftConstants.GET_METADATA:
doGetMetadata(exchange);
break;
case SwiftConstants.CREATE_UPDATE_METADATA:
doUpdateMetadata(exchange);
break;
case SwiftConstants.DELETE_METADATA:
doDeleteMetadata(exchange);
break;
default:
throw new IllegalArgumentException("Unsupported operation " + operation);
}
}
private void doCreate(Exchange exchange) {
final Message msg = exchange.getIn();
final String name = msg.getHeader(OpenstackConstants.NAME, msg.getHeader(SwiftConstants.CONTAINER_NAME, String.class),
String.class);
StringHelper.notEmpty(name, "Container name");
final CreateUpdateContainerOptions options = messageToCreateUpdateOptions(msg);
final ActionResponse out = os.objectStorage().containers().create(name, options);
checkFailure(out, exchange, "Create container " + name);
}
private void doGet(Exchange exchange) {
final Message msg = exchange.getIn();
final ContainerListOptions options = messageToListOptions(msg);
final List<? extends SwiftContainer> out = os.objectStorage().containers().list(options);
msg.setBody(out);
}
private void doGetAll(Exchange exchange) {
final Message msg = exchange.getIn();
final List<? extends SwiftContainer> out = os.objectStorage().containers().list();
msg.setBody(out);
}
private void doUpdate(Exchange exchange) {
final Message msg = exchange.getIn();
final String name = msg.getHeader(OpenstackConstants.NAME, msg.getHeader(SwiftConstants.CONTAINER_NAME, String.class),
String.class);
StringHelper.notEmpty(name, "Container name");
final CreateUpdateContainerOptions options = messageToCreateUpdateOptions(msg);
final ActionResponse out = os.objectStorage().containers().update(name, options);
checkFailure(out, exchange, "Update container " + name);
}
private void doDelete(Exchange exchange) {
final Message msg = exchange.getIn();
final String name = msg.getHeader(OpenstackConstants.NAME, msg.getHeader(SwiftConstants.CONTAINER_NAME, String.class),
String.class);
StringHelper.notEmpty(name, "Container name");
final ActionResponse out = os.objectStorage().containers().delete(name);
checkFailure(out, exchange, "Delete container " + name);
}
private void doGetMetadata(Exchange exchange) {
final Message msg = exchange.getIn();
final String name = msg.getHeader(OpenstackConstants.NAME, msg.getHeader(SwiftConstants.CONTAINER_NAME, String.class),
String.class);
StringHelper.notEmpty(name, "Container name");
msg.setBody(os.objectStorage().containers().getMetadata(name));
}
private void doDeleteMetadata(Exchange exchange) {
final Message msg = exchange.getIn();
final String name = msg.getHeader(OpenstackConstants.NAME, msg.getHeader(SwiftConstants.CONTAINER_NAME, String.class),
String.class);
StringHelper.notEmpty(name, "Container name");
boolean success = os.objectStorage().containers().deleteMetadata(name, msg.getBody(Map.class));
if (!success) {
exchange.setException(new OpenstackException("Removing metadata was not successful"));
}
}
private void doUpdateMetadata(Exchange exchange) {
final Message msg = exchange.getIn();
final String name = msg.getHeader(OpenstackConstants.NAME, msg.getHeader(SwiftConstants.CONTAINER_NAME, String.class),
String.class);
StringHelper.notEmpty(name, "Container name");
boolean success = os.objectStorage().containers().updateMetadata(name, msg.getBody(Map.class));
if (!success) {
exchange.setException(new OpenstackException("Updating metadata was not successful"));
}
}
private CreateUpdateContainerOptions messageToCreateUpdateOptions(Message message) {
CreateUpdateContainerOptions options = message.getBody(CreateUpdateContainerOptions.class);
if (options == null) {
Map<String, Object> headers = message.getHeaders();
if (headers.containsKey(SwiftConstants.CONTAINER_METADATA_PREFIX)) {
options = getCreateUpdateOptions(options)
.metadata(message.getHeader(SwiftConstants.CONTAINER_METADATA_PREFIX, Map.class));
}
if (headers.containsKey(SwiftConstants.VERSIONS_LOCATION)) {
options = getCreateUpdateOptions(options)
.versionsLocation(message.getHeader(SwiftConstants.VERSIONS_LOCATION, String.class));
}
if (headers.containsKey(SwiftConstants.CONTAINER_READ)) {
options = getCreateUpdateOptions(options)
.accessRead(message.getHeader(SwiftConstants.CONTAINER_READ, String.class));
}
if (headers.containsKey(SwiftConstants.CONTAINER_WRITE)) {
options = getCreateUpdateOptions(options)
.accessWrite(message.getHeader(SwiftConstants.CONTAINER_WRITE, String.class));
}
}
return options;
}
private CreateUpdateContainerOptions getCreateUpdateOptions(CreateUpdateContainerOptions options) {
return options == null ? CreateUpdateContainerOptions.create() : options;
}
private ContainerListOptions messageToListOptions(Message message) {
ContainerListOptions options = message.getBody(ContainerListOptions.class);
if (options == null) {
Map headers = message.getHeaders();
if (headers.containsKey(SwiftConstants.LIMIT)) {
options = getListOptions(options).limit(message.getHeader(SwiftConstants.LIMIT, Integer.class));
}
if (headers.containsKey(SwiftConstants.MARKER)) {
options = getListOptions(options).marker(message.getHeader(SwiftConstants.MARKER, String.class));
}
if (headers.containsKey(SwiftConstants.END_MARKER)) {
options = getListOptions(options).endMarker(message.getHeader(SwiftConstants.END_MARKER, String.class));
}
if (headers.containsKey(SwiftConstants.DELIMITER)) {
options = getListOptions(options).delimiter(message.getHeader(SwiftConstants.DELIMITER, Character.class));
}
if (headers.containsKey(SwiftConstants.PATH)) {
options = getListOptions(options).path(message.getHeader(SwiftConstants.PATH, String.class));
}
}
return options;
}
private ContainerListOptions getListOptions(ContainerListOptions options) {
return options == null ? ContainerListOptions.create() : options;
}
}
| ContainerProducer |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/booleanarray/BooleanArrayAssert_contains_with_Boolean_array_Test.java | {
"start": 1205,
"end": 1916
} | class ____ extends BooleanArrayAssertBaseTest {
@Test
void should_fail_if_values_is_null() {
// GIVEN
Boolean[] values = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertions.contains(values));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("values").create());
}
@Override
protected BooleanArrayAssert invoke_api_method() {
return assertions.contains(new Boolean[] { true, false });
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertContains(getInfo(assertions), getActual(assertions), arrayOf(true, false));
}
}
| BooleanArrayAssert_contains_with_Boolean_array_Test |
java | micronaut-projects__micronaut-core | http-client-tck/src/main/java/io/micronaut/http/client/tck/tests/ExceptionOnErrorStatusTest.java | {
"start": 1333,
"end": 2277
} | class ____ {
private static final String SPEC_NAME = "ExceptionOnErrorStatusTest";
@ParameterizedTest(name = "blocking={0}")
@ValueSource(booleans = {true, false})
void exceptionOnErrorStatus(boolean blocking) throws IOException {
asserts(SPEC_NAME,
Map.of(
"micronaut.http.client.exception-on-error-status", StringUtils.FALSE,
BLOCKING_CLIENT_PROPERTY, blocking
),
HttpRequest.GET("/unprocessable"),
(server, request) -> AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.UNPROCESSABLE_ENTITY)
.body("{\"message\":\"Cannot make it\"}")
.build()));
}
@Requires(property = "spec.name", value = SPEC_NAME)
@Controller("/unprocessable")
@SuppressWarnings("checkstyle:MissingJavadocType")
static | ExceptionOnErrorStatusTest |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/processor/src/main/java/org/jboss/resteasy/reactive/common/processor/AdditionalReaders.java | {
"start": 62,
"end": 128
} | class ____ extends AdditionalReaderWriterCommon {
}
| AdditionalReaders |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/InfluencersQueryBuilder.java | {
"start": 1251,
"end": 2899
} | class ____ {
public static final int DEFAULT_SIZE = 100;
private InfluencersQuery influencersQuery = new InfluencersQuery();
public InfluencersQueryBuilder from(int from) {
influencersQuery.from = from;
return this;
}
public InfluencersQueryBuilder size(int size) {
influencersQuery.size = size;
return this;
}
public InfluencersQueryBuilder includeInterim(boolean include) {
influencersQuery.includeInterim = include;
return this;
}
public InfluencersQueryBuilder influencerScoreThreshold(Double influencerScoreFilter) {
influencersQuery.influencerScoreFilter = influencerScoreFilter;
return this;
}
public InfluencersQueryBuilder sortField(String sortField) {
influencersQuery.sortField = sortField;
return this;
}
public InfluencersQueryBuilder sortDescending(boolean sortDescending) {
influencersQuery.sortDescending = sortDescending;
return this;
}
/**
* If startTime >= 0 the parameter is not set
*/
public InfluencersQueryBuilder start(String startTime) {
influencersQuery.start = startTime;
return this;
}
/**
* If endTime >= 0 the parameter is not set
*/
public InfluencersQueryBuilder end(String endTime) {
influencersQuery.end = endTime;
return this;
}
public InfluencersQueryBuilder.InfluencersQuery build() {
return influencersQuery;
}
public void clear() {
influencersQuery = new InfluencersQueryBuilder.InfluencersQuery();
}
public | InfluencersQueryBuilder |
java | apache__logging-log4j2 | log4j-1.2-api/src/test/java/org/apache/log4j/LoggingTest.java | {
"start": 1112,
"end": 1452
} | class ____ {
@Test
void testParent() {
final Logger logger = Logger.getLogger("org.apache.test.logging.Test");
final Category parent = logger.getParent();
assertNotNull(parent, "No parent Logger");
assertEquals("org.apache.test.logging", parent.getName(), "Incorrect parent logger");
}
}
| LoggingTest |
java | grpc__grpc-java | netty/src/main/java/io/grpc/netty/ProtocolNegotiator.java | {
"start": 857,
"end": 1709
} | interface ____ {
/**
* The HTTP/2 scheme to be used when sending {@code HEADERS}.
*/
AsciiString scheme();
/**
* Creates a new handler to control the protocol negotiation. Once the negotiation has completed
* successfully, the provided handler is installed. Must call {@code
* grpcHandler.onHandleProtocolNegotiationCompleted()} at certain point if the negotiation has
* completed successfully.
*/
ChannelHandler newHandler(GrpcHttp2ConnectionHandler grpcHandler);
/**
* Releases resources held by this negotiator. Called when the Channel transitions to terminated
* or when InternalServer is shutdown (depending on client or server). That means handlers
* returned by {@link #newHandler} can outlive their parent negotiator on server-side, but not
* on client-side.
*/
void close();
| ProtocolNegotiator |
java | apache__logging-log4j2 | log4j-osgi-test/src/test/java/org/apache/logging/log4j/osgi/tests/FelixLoadApiBundleTest.java | {
"start": 987,
"end": 1135
} | class ____ extends AbstractLoadBundleTest {
public FelixLoadApiBundleTest() {
super(new FrameworkFactory());
}
}
| FelixLoadApiBundleTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesFormatTests.java | {
"start": 4039,
"end": 4646
} | class ____ extends ES87TSDBDocValuesFormatTests {
protected final Codec codec = new Elasticsearch92Lucene103Codec() {
final ES819TSDBDocValuesFormat docValuesFormat = new ES819TSDBDocValuesFormat(
ESTestCase.randomIntBetween(2, 4096),
ESTestCase.randomIntBetween(1, 512),
random().nextBoolean(),
randomBinaryCompressionMode(),
true
);
@Override
public DocValuesFormat getDocValuesFormatForField(String field) {
return docValuesFormat;
}
};
public static | ES819TSDBDocValuesFormatTests |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/physical/stream/FlinkMarkChangelogNormalizeProgram.java | {
"start": 10621,
"end": 11468
} | class ____ {
private final StreamPhysicalChangelogNormalize changelogNormalize;
private final RexNode conditions;
private ChangelogNormalizeContext(
StreamPhysicalChangelogNormalize changelogNormalize, RexNode conditions) {
this.changelogNormalize = changelogNormalize;
this.conditions = conditions;
}
public static ChangelogNormalizeContext of(
StreamPhysicalChangelogNormalize changelogNormalize, RexNode conditions) {
return new ChangelogNormalizeContext(changelogNormalize, conditions);
}
public StreamPhysicalChangelogNormalize getChangelogNormalize() {
return changelogNormalize;
}
public RexNode getConditions() {
return conditions;
}
}
}
| ChangelogNormalizeContext |
java | apache__logging-log4j2 | log4j-api-test/src/test/java/org/apache/logging/log4j/AbstractLoggerTest.java | {
"start": 2497,
"end": 37561
} | class ____ {
private static final StringBuilder CHAR_SEQ = new StringBuilder("CharSeq");
// TODO add proper tests for ReusableMessage
@SuppressWarnings("ThrowableInstanceNeverThrown")
private static final Throwable t = new UnsupportedOperationException("Test");
private static final Class<AbstractLogger> obj = AbstractLogger.class;
private static final String pattern = "{}, {}";
private static final String p1 = "Long Beach";
private static final String p2 = "California";
private static final Message charSeq = new SimpleMessage(CHAR_SEQ);
private static final Message simple = new SimpleMessage("Hello");
private static final Message object = new ObjectMessage(obj);
private static final Message param = new ParameterizedMessage(pattern, p1, p2);
private final Marker MARKER = MarkerManager.getMarker("TEST");
private static final String MARKER_NAME = "TEST";
private static final LogEvent[] EVENTS = new LogEvent[] {
new LogEvent(null, simple, null),
new LogEvent(MARKER_NAME, simple, null),
new LogEvent(null, simple, t),
new LogEvent(MARKER_NAME, simple, t),
new LogEvent(null, object, null),
new LogEvent(MARKER_NAME, object, null),
new LogEvent(null, object, t),
new LogEvent(MARKER_NAME, object, t),
new LogEvent(null, param, null),
new LogEvent(MARKER_NAME, param, null),
new LogEvent(null, simple, null),
new LogEvent(null, simple, t),
new LogEvent(MARKER_NAME, simple, null),
new LogEvent(MARKER_NAME, simple, t),
new LogEvent(MARKER_NAME, simple, null),
new LogEvent(null, charSeq, null),
new LogEvent(null, charSeq, t),
new LogEvent(MARKER_NAME, charSeq, null),
new LogEvent(MARKER_NAME, charSeq, t),
};
@Test
void testDebug() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.DEBUG);
logger.setCurrentEvent(EVENTS[0]);
logger.debug("Hello");
logger.debug((Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.debug(MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.debug("Hello", t);
logger.debug((Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.debug(MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.debug(obj);
logger.setCurrentEvent(EVENTS[5]);
logger.debug(MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.debug(obj, t);
logger.debug((Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.debug(MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.debug(pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.debug(MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.debug(simple);
logger.debug((Marker) null, simple);
logger.debug((Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.debug(simple, t);
logger.debug((Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.debug(MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.debug(MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.debug(MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.debug(CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.debug(CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.debug(MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.debug(MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testError() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.ERROR);
logger.setCurrentEvent(EVENTS[0]);
logger.error("Hello");
logger.error((Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.error(MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.error("Hello", t);
logger.error((Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.error(MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.error(obj);
logger.setCurrentEvent(EVENTS[5]);
logger.error(MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.error(obj, t);
logger.error((Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.error(MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.error(pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.error(MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.error(simple);
logger.error((Marker) null, simple);
logger.error((Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.error(simple, t);
logger.error((Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.error(MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.error(MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.error(MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.error(CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.error(CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.error(MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.error(MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testFatal() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.FATAL);
logger.setCurrentEvent(EVENTS[0]);
logger.fatal("Hello");
logger.fatal((Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.fatal(MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.fatal("Hello", t);
logger.fatal((Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.fatal(MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.fatal(obj);
logger.setCurrentEvent(EVENTS[5]);
logger.fatal(MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.fatal(obj, t);
logger.fatal((Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.fatal(MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.fatal(pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.fatal(MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.fatal(simple);
logger.fatal((Marker) null, simple);
logger.fatal((Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.fatal(simple, t);
logger.fatal((Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.fatal(MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.fatal(MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.fatal(MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.fatal(CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.fatal(CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.fatal(MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.fatal(MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testInfo() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.INFO);
logger.setCurrentEvent(EVENTS[0]);
logger.info("Hello");
logger.info((Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.info(MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.info("Hello", t);
logger.info((Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.info(MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.info(obj);
logger.setCurrentEvent(EVENTS[5]);
logger.info(MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.info(obj, t);
logger.info((Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.info(MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.info(pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.info(MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.info(simple);
logger.info((Marker) null, simple);
logger.info((Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.info(simple, t);
logger.info((Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.info(MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.info(MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.info(MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.info(CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.info(CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.info(MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.info(MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testLogDebug() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.DEBUG);
logger.setCurrentEvent(EVENTS[0]);
logger.log(Level.DEBUG, "Hello");
logger.log(Level.DEBUG, (Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.log(Level.DEBUG, MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.log(Level.DEBUG, "Hello", t);
logger.log(Level.DEBUG, (Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.log(Level.DEBUG, MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.log(Level.DEBUG, obj);
logger.setCurrentEvent(EVENTS[5]);
logger.log(Level.DEBUG, MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.log(Level.DEBUG, obj, t);
logger.log(Level.DEBUG, (Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.log(Level.DEBUG, MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.log(Level.DEBUG, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.log(Level.DEBUG, MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.log(Level.DEBUG, simple);
logger.log(Level.DEBUG, (Marker) null, simple);
logger.log(Level.DEBUG, (Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.log(Level.DEBUG, simple, t);
logger.log(Level.DEBUG, (Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.log(Level.DEBUG, MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.log(Level.DEBUG, MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.log(Level.DEBUG, MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.log(Level.DEBUG, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.log(Level.DEBUG, CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.log(Level.DEBUG, MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.log(Level.DEBUG, MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testLogError() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.ERROR);
logger.setCurrentEvent(EVENTS[0]);
logger.log(Level.ERROR, "Hello");
logger.log(Level.ERROR, (Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.log(Level.ERROR, MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.log(Level.ERROR, "Hello", t);
logger.log(Level.ERROR, (Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.log(Level.ERROR, MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.log(Level.ERROR, obj);
logger.setCurrentEvent(EVENTS[5]);
logger.log(Level.ERROR, MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.log(Level.ERROR, obj, t);
logger.log(Level.ERROR, (Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.log(Level.ERROR, MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.log(Level.ERROR, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.log(Level.ERROR, MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.log(Level.ERROR, simple);
logger.log(Level.ERROR, (Marker) null, simple);
logger.log(Level.ERROR, (Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.log(Level.ERROR, simple, t);
logger.log(Level.ERROR, (Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.log(Level.ERROR, MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.log(Level.ERROR, MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.log(Level.ERROR, MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.log(Level.ERROR, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.log(Level.ERROR, CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.log(Level.ERROR, MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.log(Level.ERROR, MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testLogFatal() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.FATAL);
logger.setCurrentEvent(EVENTS[0]);
logger.log(Level.FATAL, "Hello");
logger.log(Level.FATAL, (Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.log(Level.FATAL, MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.log(Level.FATAL, "Hello", t);
logger.log(Level.FATAL, (Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.log(Level.FATAL, MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.log(Level.FATAL, obj);
logger.setCurrentEvent(EVENTS[5]);
logger.log(Level.FATAL, MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.log(Level.FATAL, obj, t);
logger.log(Level.FATAL, (Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.log(Level.FATAL, MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.log(Level.FATAL, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.log(Level.FATAL, MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.log(Level.FATAL, simple);
logger.log(Level.FATAL, (Marker) null, simple);
logger.log(Level.FATAL, (Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.log(Level.FATAL, simple, t);
logger.log(Level.FATAL, (Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.log(Level.FATAL, MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.log(Level.FATAL, MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.log(Level.FATAL, MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.log(Level.FATAL, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.log(Level.FATAL, CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.log(Level.FATAL, MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.log(Level.FATAL, MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testLogInfo() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.INFO);
logger.setCurrentEvent(EVENTS[0]);
logger.log(Level.INFO, "Hello");
logger.log(Level.INFO, (Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.log(Level.INFO, MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.log(Level.INFO, "Hello", t);
logger.log(Level.INFO, (Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.log(Level.INFO, MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.log(Level.INFO, obj);
logger.setCurrentEvent(EVENTS[5]);
logger.log(Level.INFO, MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.log(Level.INFO, obj, t);
logger.log(Level.INFO, (Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.log(Level.INFO, MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.log(Level.INFO, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.log(Level.INFO, MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.log(Level.INFO, simple);
logger.log(Level.INFO, (Marker) null, simple);
logger.log(Level.INFO, (Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.log(Level.INFO, simple, t);
logger.log(Level.INFO, (Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.log(Level.INFO, MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.log(Level.INFO, MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.log(Level.INFO, MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.log(Level.INFO, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.log(Level.INFO, CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.log(Level.INFO, MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.log(Level.INFO, MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testLogTrace() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.TRACE);
logger.setCurrentEvent(EVENTS[0]);
logger.log(Level.TRACE, "Hello");
logger.log(Level.TRACE, (Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.log(Level.TRACE, MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.log(Level.TRACE, "Hello", t);
logger.log(Level.TRACE, (Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.log(Level.TRACE, MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.log(Level.TRACE, obj);
logger.setCurrentEvent(EVENTS[5]);
logger.log(Level.TRACE, MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.log(Level.TRACE, obj, t);
logger.log(Level.TRACE, (Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.log(Level.TRACE, MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.log(Level.TRACE, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.log(Level.TRACE, MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.log(Level.TRACE, simple);
logger.log(Level.TRACE, (Marker) null, simple);
logger.log(Level.TRACE, (Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.log(Level.TRACE, simple, t);
logger.log(Level.TRACE, (Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.log(Level.TRACE, MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.log(Level.TRACE, MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.log(Level.TRACE, MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.log(Level.TRACE, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.log(Level.TRACE, CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.log(Level.TRACE, MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.log(Level.TRACE, MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testLogWarn() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.WARN);
logger.setCurrentEvent(EVENTS[0]);
logger.log(Level.WARN, "Hello");
logger.log(Level.WARN, (Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.log(Level.WARN, MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.log(Level.WARN, "Hello", t);
logger.log(Level.WARN, (Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.log(Level.WARN, MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.log(Level.WARN, obj);
logger.setCurrentEvent(EVENTS[5]);
logger.log(Level.WARN, MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.log(Level.WARN, obj, t);
logger.log(Level.WARN, (Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.log(Level.WARN, MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.log(Level.WARN, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.log(Level.WARN, MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.log(Level.WARN, simple);
logger.log(Level.WARN, (Marker) null, simple);
logger.log(Level.WARN, (Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.log(Level.WARN, simple, t);
logger.log(Level.WARN, (Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.log(Level.WARN, MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.log(Level.WARN, MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.log(Level.WARN, MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.log(Level.WARN, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.log(Level.WARN, CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.log(Level.WARN, MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.log(Level.WARN, MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testTrace() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.TRACE);
logger.setCurrentEvent(EVENTS[0]);
logger.trace("Hello");
logger.trace((Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.trace(MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.trace("Hello", t);
logger.trace((Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.trace(MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.trace(obj);
logger.setCurrentEvent(EVENTS[5]);
logger.trace(MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.trace(obj, t);
logger.trace((Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.trace(MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.trace(pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.trace(MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.trace(simple);
logger.trace((Marker) null, simple);
logger.trace((Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.trace(simple, t);
logger.trace((Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.trace(MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.trace(MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.trace(MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.trace(CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.trace(CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.trace(MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.trace(MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testWarn() {
final CountingLogger logger = new CountingLogger();
logger.setCurrentLevel(Level.WARN);
logger.setCurrentEvent(EVENTS[0]);
logger.warn("Hello");
logger.warn((Marker) null, "Hello");
logger.setCurrentEvent(EVENTS[1]);
logger.warn(MARKER, "Hello");
logger.setCurrentEvent(EVENTS[2]);
logger.warn("Hello", t);
logger.warn((Marker) null, "Hello", t);
logger.setCurrentEvent(EVENTS[3]);
logger.warn(MARKER, "Hello", t);
logger.setCurrentEvent(EVENTS[4]);
logger.warn(obj);
logger.setCurrentEvent(EVENTS[5]);
logger.warn(MARKER, obj);
logger.setCurrentEvent(EVENTS[6]);
logger.warn(obj, t);
logger.warn((Marker) null, obj, t);
logger.setCurrentEvent(EVENTS[7]);
logger.warn(MARKER, obj, t);
logger.setCurrentEvent(EVENTS[8]);
logger.warn(pattern, p1, p2);
logger.setCurrentEvent(EVENTS[9]);
logger.warn(MARKER, pattern, p1, p2);
logger.setCurrentEvent(EVENTS[10]);
logger.warn(simple);
logger.warn((Marker) null, simple);
logger.warn((Marker) null, simple, null);
logger.setCurrentEvent(EVENTS[11]);
logger.warn(simple, t);
logger.warn((Marker) null, simple, t);
logger.setCurrentEvent(EVENTS[12]);
logger.warn(MARKER, simple, null);
logger.setCurrentEvent(EVENTS[13]);
logger.warn(MARKER, simple, t);
logger.setCurrentEvent(EVENTS[14]);
logger.warn(MARKER, simple);
logger.setCurrentEvent(EVENTS[15]);
logger.warn(CHAR_SEQ);
logger.setCurrentEvent(EVENTS[16]);
logger.warn(CHAR_SEQ, t);
logger.setCurrentEvent(EVENTS[17]);
logger.warn(MARKER, CHAR_SEQ);
logger.setCurrentEvent(EVENTS[18]);
logger.warn(MARKER, CHAR_SEQ, t);
assertEquals(4, logger.getCharSeqCount(), "log(CharSeq) invocations");
assertEquals(5, logger.getObjectCount(), "log(Object) invocations");
}
@Test
void testMessageWithThrowable() {
final ThrowableExpectingLogger logger = new ThrowableExpectingLogger(true);
final ThrowableMessage message = new ThrowableMessage(t);
logger.debug(message);
logger.error(message);
logger.fatal(message);
logger.info(message);
logger.trace(message);
logger.warn(message);
logger.log(Level.INFO, message);
logger.debug(MARKER, message);
logger.error(MARKER, message);
logger.fatal(MARKER, message);
logger.info(MARKER, message);
logger.trace(MARKER, message);
logger.warn(MARKER, message);
logger.log(Level.INFO, MARKER, message);
}
@Test
void testMessageWithoutThrowable() {
final ThrowableExpectingLogger logger = new ThrowableExpectingLogger(false);
final ThrowableMessage message = new ThrowableMessage(null);
logger.debug(message);
logger.error(message);
logger.fatal(message);
logger.info(message);
logger.trace(message);
logger.warn(message);
logger.log(Level.INFO, message);
logger.debug(MARKER, message);
logger.error(MARKER, message);
logger.fatal(MARKER, message);
logger.info(MARKER, message);
logger.trace(MARKER, message);
logger.warn(MARKER, message);
logger.log(Level.INFO, MARKER, message);
}
@Test
void testMessageSupplierWithThrowable() {
final ThrowableExpectingLogger logger = new ThrowableExpectingLogger(true);
final ThrowableMessage message = new ThrowableMessage(t);
final MessageSupplier supplier = () -> message;
logger.debug(supplier);
logger.error(supplier);
logger.fatal(supplier);
logger.info(supplier);
logger.trace(supplier);
logger.warn(supplier);
logger.log(Level.INFO, supplier);
logger.debug(MARKER, supplier);
logger.error(MARKER, supplier);
logger.fatal(MARKER, supplier);
logger.info(MARKER, supplier);
logger.trace(MARKER, supplier);
logger.warn(MARKER, supplier);
logger.log(Level.INFO, MARKER, supplier);
}
@Test
void testMessageSupplierWithoutThrowable() {
final ThrowableExpectingLogger logger = new ThrowableExpectingLogger(false);
final ThrowableMessage message = new ThrowableMessage(null);
final MessageSupplier supplier = () -> message;
logger.debug(supplier);
logger.error(supplier);
logger.fatal(supplier);
logger.info(supplier);
logger.trace(supplier);
logger.warn(supplier);
logger.log(Level.INFO, supplier);
logger.debug(MARKER, supplier);
logger.error(MARKER, supplier);
logger.fatal(MARKER, supplier);
logger.info(MARKER, supplier);
logger.trace(MARKER, supplier);
logger.warn(MARKER, supplier);
logger.log(Level.INFO, MARKER, supplier);
}
@Test
void testSupplierWithThrowable() {
final ThrowableExpectingLogger logger = new ThrowableExpectingLogger(true);
final ThrowableMessage message = new ThrowableMessage(t);
final Supplier<Message> supplier = () -> message;
logger.debug(supplier);
logger.error(supplier);
logger.fatal(supplier);
logger.info(supplier);
logger.trace(supplier);
logger.warn(supplier);
logger.log(Level.INFO, supplier);
logger.debug(MARKER, supplier);
logger.error(MARKER, supplier);
logger.fatal(MARKER, supplier);
logger.info(MARKER, supplier);
logger.trace(MARKER, supplier);
logger.warn(MARKER, supplier);
logger.log(Level.INFO, MARKER, supplier);
}
@Test
void testSupplierWithoutThrowable() {
final ThrowableExpectingLogger logger = new ThrowableExpectingLogger(false);
final ThrowableMessage message = new ThrowableMessage(null);
final Supplier<Message> supplier = () -> message;
logger.debug(supplier);
logger.error(supplier);
logger.fatal(supplier);
logger.info(supplier);
logger.trace(supplier);
logger.warn(supplier);
logger.log(Level.INFO, supplier);
logger.debug(MARKER, supplier);
logger.error(MARKER, supplier);
logger.fatal(MARKER, supplier);
logger.info(MARKER, supplier);
logger.trace(MARKER, supplier);
logger.warn(MARKER, supplier);
logger.log(Level.INFO, MARKER, supplier);
}
@Test
@ResourceLock("log4j2.StatusLogger")
void testMessageThrows() {
final ThrowableExpectingLogger logger = new ThrowableExpectingLogger(false);
logger.error(new TestMessage(
() -> {
throw new IllegalStateException("Oops!");
},
"Message Format"));
final List<StatusData> statusDatalist = StatusLogger.getLogger().getStatusData();
final StatusData mostRecent = statusDatalist.get(statusDatalist.size() - 1);
assertEquals(Level.WARN, mostRecent.getLevel());
assertThat(
mostRecent.getFormattedStatus(),
containsString("org.apache.logging.log4j.spi.AbstractLogger caught "
+ "java.lang.IllegalStateException logging TestMessage: Message Format"));
}
@Test
@ResourceLock("log4j2.StatusLogger")
void testMessageThrowsAndNullFormat() {
final ThrowableExpectingLogger logger = new ThrowableExpectingLogger(false);
logger.error(new TestMessage(
() -> {
throw new IllegalStateException("Oops!");
},
null /* format */));
final List<StatusData> statusDatalist = StatusLogger.getLogger().getStatusData();
final StatusData mostRecent = statusDatalist.get(statusDatalist.size() - 1);
assertEquals(Level.WARN, mostRecent.getLevel());
assertThat(
mostRecent.getFormattedStatus(),
containsString("org.apache.logging.log4j.spi.AbstractLogger caught "
+ "java.lang.IllegalStateException logging TestMessage: "));
}
private static final | AbstractLoggerTest |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/processor/AnnotationBasedComponentModelProcessor.java | {
"start": 12843,
"end": 13110
} | class ____ to be generated or not
*/
protected abstract boolean requiresGenerationOfDecoratorClass();
@Override
public int getPriority() {
return 1100;
}
protected TypeFactory getTypeFactory() {
return typeFactory;
}
}
| needs |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java | {
"start": 2989,
"end": 3054
} | class ____ testing quota-related commands */
@Timeout(120)
public | for |
java | apache__kafka | connect/runtime/src/test/java/org/apache/kafka/connect/runtime/RestartRequestTest.java | {
"start": 1145,
"end": 5713
} | class ____ {
private static final String CONNECTOR_NAME = "foo";
@Test
public void forciblyRestartConnectorOnly() {
RestartRequest restartRequest = new RestartRequest(CONNECTOR_NAME, false, false);
assertTrue(restartRequest.forceRestartConnectorOnly());
restartRequest = new RestartRequest(CONNECTOR_NAME, false, true);
assertFalse(restartRequest.forceRestartConnectorOnly());
restartRequest = new RestartRequest(CONNECTOR_NAME, true, false);
assertFalse(restartRequest.forceRestartConnectorOnly());
restartRequest = new RestartRequest(CONNECTOR_NAME, true, true);
assertFalse(restartRequest.forceRestartConnectorOnly());
}
@Test
public void restartOnlyFailedConnector() {
RestartRequest restartRequest = new RestartRequest(CONNECTOR_NAME, true, false);
assertTrue(restartRequest.shouldRestartConnector(createConnectorStatus(AbstractStatus.State.FAILED)));
assertFalse(restartRequest.shouldRestartConnector(createConnectorStatus(AbstractStatus.State.RUNNING)));
assertFalse(restartRequest.shouldRestartConnector(createConnectorStatus(AbstractStatus.State.PAUSED)));
}
@Test
public void restartAnyStatusConnector() {
RestartRequest restartRequest = new RestartRequest(CONNECTOR_NAME, false, false);
assertTrue(restartRequest.shouldRestartConnector(createConnectorStatus(AbstractStatus.State.FAILED)));
assertTrue(restartRequest.shouldRestartConnector(createConnectorStatus(AbstractStatus.State.RUNNING)));
assertTrue(restartRequest.shouldRestartConnector(createConnectorStatus(AbstractStatus.State.PAUSED)));
}
@Test
public void restartOnlyFailedTasks() {
RestartRequest restartRequest = new RestartRequest(CONNECTOR_NAME, true, true);
assertTrue(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.FAILED)));
assertFalse(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.RUNNING)));
assertFalse(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.PAUSED)));
}
@Test
public void restartAnyStatusTasks() {
RestartRequest restartRequest = new RestartRequest(CONNECTOR_NAME, false, true);
assertTrue(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.FAILED)));
assertTrue(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.RUNNING)));
assertTrue(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.PAUSED)));
}
@Test
public void doNotRestartTasks() {
RestartRequest restartRequest = new RestartRequest(CONNECTOR_NAME, false, false);
assertFalse(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.FAILED)));
assertFalse(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.RUNNING)));
restartRequest = new RestartRequest(CONNECTOR_NAME, true, false);
assertFalse(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.FAILED)));
assertFalse(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.RUNNING)));
}
@Test
public void compareImpact() {
RestartRequest onlyFailedConnector = new RestartRequest(CONNECTOR_NAME, true, false);
RestartRequest failedConnectorAndTasks = new RestartRequest(CONNECTOR_NAME, true, true);
RestartRequest onlyConnector = new RestartRequest(CONNECTOR_NAME, false, false);
RestartRequest connectorAndTasks = new RestartRequest(CONNECTOR_NAME, false, true);
List<RestartRequest> restartRequests = Arrays.asList(connectorAndTasks, onlyConnector, onlyFailedConnector, failedConnectorAndTasks);
Collections.sort(restartRequests);
assertEquals(onlyFailedConnector, restartRequests.get(0));
assertEquals(failedConnectorAndTasks, restartRequests.get(1));
assertEquals(onlyConnector, restartRequests.get(2));
assertEquals(connectorAndTasks, restartRequests.get(3));
RestartRequest onlyFailedDiffConnector = new RestartRequest(CONNECTOR_NAME + "foo", true, false);
assertTrue(onlyFailedConnector.compareTo(onlyFailedDiffConnector) != 0);
}
private TaskStatus createTaskStatus(AbstractStatus.State state) {
return new TaskStatus(null, state, null, 0);
}
private ConnectorStatus createConnectorStatus(AbstractStatus.State state) {
return new ConnectorStatus(null, state, null, 0);
}
}
| RestartRequestTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java | {
"start": 2046,
"end": 9823
} | class ____<T extends InternalGeoGrid<?>> extends BucketsAggregator {
protected final int requiredSize;
protected final int shardSize;
protected final ValuesSource.Numeric valuesSource;
protected final LongKeyedBucketOrds bucketOrds;
@SuppressWarnings("this-escape")
protected GeoGridAggregator(
String name,
AggregatorFactories factories,
Function<LongConsumer, ValuesSource.Numeric> valuesSource,
int requiredSize,
int shardSize,
AggregationContext aggregationContext,
Aggregator parent,
CardinalityUpperBound cardinality,
Map<String, Object> metadata
) throws IOException {
super(name, factories, aggregationContext, parent, CardinalityUpperBound.MANY, metadata);
this.valuesSource = valuesSource.apply(this::addRequestCircuitBreakerBytes);
this.requiredSize = requiredSize;
this.shardSize = shardSize;
bucketOrds = LongKeyedBucketOrds.build(bigArrays(), cardinality);
}
@Override
public ScoreMode scoreMode() {
if (valuesSource != null && valuesSource.needsScores()) {
return ScoreMode.COMPLETE;
}
return super.scoreMode();
}
@Override
public LeafBucketCollector getLeafCollector(final AggregationExecutionContext aggCtx, final LeafBucketCollector sub)
throws IOException {
final SortedNumericLongValues values = valuesSource.longValues(aggCtx.getLeafReaderContext());
final LongValues singleton = SortedNumericLongValues.unwrapSingleton(values);
return singleton != null ? getLeafCollector(singleton, sub) : getLeafCollector(values, sub);
}
private LeafBucketCollector getLeafCollector(final LongValues values, final LeafBucketCollector sub) {
return new LeafBucketCollectorBase(sub, null) {
@Override
public void collect(int doc, long owningBucketOrd) throws IOException {
if (values.advanceExact(doc)) {
final long val = values.longValue();
long bucketOrdinal = bucketOrds.add(owningBucketOrd, val);
if (bucketOrdinal < 0) { // already seen
bucketOrdinal = -1 - bucketOrdinal;
collectExistingBucket(sub, doc, bucketOrdinal);
} else {
collectBucket(sub, doc, bucketOrdinal);
}
}
}
};
}
private LeafBucketCollector getLeafCollector(final SortedNumericLongValues values, final LeafBucketCollector sub) {
return new LeafBucketCollectorBase(sub, null) {
@Override
public void collect(int doc, long owningBucketOrd) throws IOException {
if (values.advanceExact(doc)) {
final int valuesCount = values.docValueCount();
long previous = Long.MAX_VALUE;
for (int i = 0; i < valuesCount; ++i) {
final long val = values.nextValue();
if (previous != val || i == 0) {
long bucketOrdinal = bucketOrds.add(owningBucketOrd, val);
if (bucketOrdinal < 0) { // already seen
bucketOrdinal = -1 - bucketOrdinal;
collectExistingBucket(sub, doc, bucketOrdinal);
} else {
collectBucket(sub, doc, bucketOrdinal);
}
previous = val;
}
}
}
}
};
}
protected abstract T buildAggregation(String name, int requiredSize, List<InternalGeoGridBucket> buckets, Map<String, Object> metadata);
/**
* This method is used to return a re-usable instance of the bucket when building
* the aggregation.
* @return a new {@link InternalGeoGridBucket} implementation with empty parameters
*/
protected abstract InternalGeoGridBucket newEmptyBucket();
@Override
public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException {
try (ObjectArray<InternalGeoGridBucket[]> topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size())) {
try (IntArray bucketsSizePerOrd = bigArrays().newIntArray(owningBucketOrds.size())) {
long ordsToCollect = 0;
for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) {
int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)), shardSize);
ordsToCollect += size;
bucketsSizePerOrd.set(ordIdx, size);
}
try (LongArray ordsArray = bigArrays().newLongArray(ordsToCollect)) {
long ordsCollected = 0;
for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) {
try (
BucketPriorityQueue<BucketAndOrd<InternalGeoGridBucket>, InternalGeoGridBucket> ordered =
new BucketPriorityQueue<>(bucketsSizePerOrd.get(ordIdx), bigArrays(), b -> b.bucket)
) {
BucketAndOrd<InternalGeoGridBucket> spare = null;
LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx));
while (ordsEnum.next()) {
if (spare == null) {
checkRealMemoryCBForInternalBucket();
spare = new BucketAndOrd<>(newEmptyBucket());
}
// need a special function to keep the source bucket
// up-to-date so it can get the appropriate key
spare.bucket.hashAsLong = ordsEnum.value();
spare.bucket.docCount = bucketDocCount(ordsEnum.ord());
spare.ord = ordsEnum.ord();
spare = ordered.insertWithOverflow(spare);
}
final int orderedSize = (int) ordered.size();
final InternalGeoGridBucket[] buckets = new InternalGeoGridBucket[orderedSize];
for (int i = orderedSize - 1; i >= 0; --i) {
BucketAndOrd<InternalGeoGridBucket> bucketBucketAndOrd = ordered.pop();
buckets[i] = bucketBucketAndOrd.bucket;
ordsArray.set(ordsCollected + i, bucketBucketAndOrd.ord);
}
topBucketsPerOrd.set(ordIdx, buckets);
ordsCollected += orderedSize;
}
}
assert ordsCollected == ordsArray.size();
buildSubAggsForAllBuckets(topBucketsPerOrd, ordsArray, (b, aggs) -> b.aggregations = aggs);
}
}
return buildAggregations(
Math.toIntExact(owningBucketOrds.size()),
ordIdx -> buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd.get(ordIdx)), metadata())
);
}
}
@Override
public InternalAggregation buildEmptyAggregation() {
return buildAggregation(name, requiredSize, Collections.emptyList(), metadata());
}
@Override
public void doClose() {
Releasables.close(bucketOrds);
}
}
| GeoGridAggregator |
java | google__error-prone | core/src/test/java/com/google/errorprone/matchers/HasIdentifierTest.java | {
"start": 4735,
"end": 6214
} | class ____ extends Scanner {
abstract void assertDone();
}
private Scanner methodHasIdentifierMatching(boolean shouldMatch, Matcher<Tree> toMatch) {
ScannerTest test =
new ScannerTest() {
private boolean matched = false;
@Override
public Void visitMethod(MethodTree node, VisitorState visitorState) {
visitorState = visitorState.withPath(getCurrentPath());
if (toMatch.matches(node, visitorState)) {
matched = true;
}
return super.visitMethod(node, visitorState);
}
@Override
public void assertDone() {
assertThat(shouldMatch).isEqualTo(matched);
}
};
tests.add(test);
return test;
}
private Scanner literalHasIdentifierMatching(boolean shouldMatch, Matcher<Tree> toMatch) {
ScannerTest test =
new ScannerTest() {
private boolean matched = false;
@Override
public Void visitLiteral(LiteralTree node, VisitorState visitorState) {
visitorState = visitorState.withPath(getCurrentPath());
if (toMatch.matches(node, visitorState)) {
matched = true;
}
return super.visitLiteral(node, visitorState);
}
@Override
void assertDone() {
assertThat(shouldMatch).isEqualTo(matched);
}
};
tests.add(test);
return test;
}
}
| ScannerTest |
java | google__gson | gson/src/test/java/com/google/gson/functional/DelegateTypeAdapterTest.java | {
"start": 2439,
"end": 3061
} | class ____ implements TypeAdapterFactory {
int numReads = 0;
int numWrites = 0;
@Override
public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> type) {
TypeAdapter<T> delegate = gson.getDelegateAdapter(this, type);
return new TypeAdapter<>() {
@Override
public void write(JsonWriter out, T value) throws IOException {
++numWrites;
delegate.write(out, value);
}
@Override
public T read(JsonReader in) throws IOException {
++numReads;
return delegate.read(in);
}
};
}
}
}
| StatsTypeAdapterFactory |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/ContextLocalPropagationTest.java | {
"start": 2996,
"end": 3173
} | interface ____ {
@GET
@Path("toClient")
String get();
@GET
@Path("toClient2")
Uni<String> getUni();
}
public static | Client |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/operators/QueueDisposable.java | {
"start": 758,
"end": 2515
} | interface ____ {@link SimpleQueue} and {@link Disposable} and allows negotiating
* the fusion mode between subsequent operators of the {@link io.reactivex.rxjava3.core.Observable Observable} base reactive type.
* <p>
* The negotiation happens in subscription time when the upstream
* calls the {@code onSubscribe} with an instance of this interface. The
* downstream has then the obligation to call {@link #requestFusion(int)}
* with the appropriate mode before calling {@code request()}.
* <p>
* In <b>synchronous fusion</b>, all upstream values are either already available or is generated
* when {@link #poll()} is called synchronously. When the {@link #poll()} returns {@code null},
* that is the indication if a terminated stream. In this mode, the upstream won't call the onXXX methods.
* <p>
* In <b>asynchronous fusion</b>, upstream values may become available to {@link #poll()} eventually.
* Upstream signals {@code onError()} and {@code onComplete()} as usual, however,
* {@code onNext} will be called with {@code null} instead of the actual value.
* Downstream should treat such onNext as indication that {@link #poll()} can be called.
* <p>
* The general rules for consuming the {@link SimpleQueue} interface:
* <ul>
* <li> {@link #poll()} and {@link #clear()} has to be called sequentially (from within a serializing drain-loop).</li>
* <li>In addition, callers of {@link #poll()} should be prepared to catch exceptions.</li>
* <li>Due to how computation attaches to the {@link #poll()}, {@link #poll()} may return
* {@code null} even if a preceding {@link #isEmpty()} returned false.</li>
* </ul>
* <p>
* Implementations should only allow calling the following methods and the rest of the
* {@link SimpleQueue} | extending |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java | {
"start": 62768,
"end": 62950
} | class ____ {
@org.jspecify.annotations.Nullable
private final Object method(boolean b) {
return b ? null : 0;
}
| T |
java | apache__maven | compat/maven-compat/src/main/java/org/apache/maven/repository/metadata/MetadataSource.java | {
"start": 1101,
"end": 1381
} | interface ____ {
String ROLE = MetadataSource.class.getName();
MetadataResolution retrieve(
ArtifactMetadata artifact, ArtifactRepository localRepository, List<ArtifactRepository> remoteRepositories)
throws MetadataRetrievalException;
}
| MetadataSource |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-qute/deployment/src/test/java/io/quarkus/resteasy/reactive/qute/deployment/TemplateInstanceNonBlockingEnabledTest.java | {
"start": 604,
"end": 1296
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(TestResource.class)
.addAsResource(new StringAsset("quarkus.rest.qute.template-instance-non-blocking-type=true"),
"application.properties")
.addAsResource(new StringAsset("Blocking allowed: {blockingAllowed}"), "templates/item.txt"));
@Test
public void test() {
when().get("/test").then().statusCode(200).body(Matchers.is("Blocking allowed: false"));
}
@Path("test")
public static | TemplateInstanceNonBlockingEnabledTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e5/a/Person.java | {
"start": 340,
"end": 539
} | class ____ {
@Id String firstName;
@Id String lastName;
public Person() {
}
public Person(String firstName, String lastName) {
this.firstName = firstName;
this.lastName = lastName;
}
}
| Person |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/jdk8/ParallelCollector.java | {
"start": 2136,
"end": 5696
} | class ____<T, A, R> extends DeferredScalarSubscription<R> {
private static final long serialVersionUID = -5370107872170712765L;
final ParallelCollectorInnerSubscriber<T, A, R>[] subscribers;
final AtomicReference<SlotPair<A>> current = new AtomicReference<>();
final AtomicInteger remaining = new AtomicInteger();
final AtomicThrowable error = new AtomicThrowable();
final Function<A, R> finisher;
ParallelCollectorSubscriber(Subscriber<? super R> subscriber, int n, Collector<T, A, R> collector) {
super(subscriber);
this.finisher = collector.finisher();
@SuppressWarnings("unchecked")
ParallelCollectorInnerSubscriber<T, A, R>[] a = new ParallelCollectorInnerSubscriber[n];
for (int i = 0; i < n; i++) {
a[i] = new ParallelCollectorInnerSubscriber<>(this, collector.supplier().get(), collector.accumulator(), collector.combiner());
}
this.subscribers = a;
remaining.lazySet(n);
}
SlotPair<A> addValue(A value) {
for (;;) {
SlotPair<A> curr = current.get();
if (curr == null) {
curr = new SlotPair<>();
if (!current.compareAndSet(null, curr)) {
continue;
}
}
int c = curr.tryAcquireSlot();
if (c < 0) {
current.compareAndSet(curr, null);
continue;
}
if (c == 0) {
curr.first = value;
} else {
curr.second = value;
}
if (curr.releaseSlot()) {
current.compareAndSet(curr, null);
return curr;
}
return null;
}
}
@Override
public void cancel() {
for (ParallelCollectorInnerSubscriber<T, A, R> inner : subscribers) {
inner.cancel();
}
}
void innerError(Throwable ex) {
if (error.compareAndSet(null, ex)) {
cancel();
downstream.onError(ex);
} else {
if (ex != error.get()) {
RxJavaPlugins.onError(ex);
}
}
}
void innerComplete(A value, BinaryOperator<A> combiner) {
for (;;) {
SlotPair<A> sp = addValue(value);
if (sp != null) {
try {
value = combiner.apply(sp.first, sp.second);
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
innerError(ex);
return;
}
} else {
break;
}
}
if (remaining.decrementAndGet() == 0) {
SlotPair<A> sp = current.get();
current.lazySet(null);
R result;
try {
result = Objects.requireNonNull(finisher.apply(sp.first), "The finisher returned a null value");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
innerError(ex);
return;
}
complete(result);
}
}
}
static final | ParallelCollectorSubscriber |
java | spring-projects__spring-boot | buildSrc/src/test/java/org/springframework/boot/build/architecture/configurationproperties/DeprecatedConfigurationPropertySince.java | {
"start": 812,
"end": 1100
} | class ____ {
private String property;
@TestDeprecatedConfigurationProperty(reason = "no longer used")
@Deprecated
public String getProperty() {
return this.property;
}
public void setProperty(String property) {
this.property = property;
}
}
| DeprecatedConfigurationPropertySince |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/objectid/ObjectId825BTest.java | {
"start": 2145,
"end": 2565
} | class ____ extends AbstractEntity {
protected java.util.ArrayList<CTV> var;
public CTC() { }
public java.util.ArrayList<CTV> getVar() {
if (var == null) {
var = new ArrayList<CTV>();
}
return new ArrayList<CTV>(var);
}
public void setVar(java.util.ArrayList<CTV> var) {
this.var = var;
}
}
static | CTC |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdaterForAttributes.java | {
"start": 6940,
"end": 16827
} | class ____
extends NodeAttributesProvider {
public DummyNodeAttributesProvider() {
super("DummyNodeAttributesProvider");
// disable the fetch timer.
setIntervalTime(-1);
}
@Override
protected void cleanUp() throws Exception {
// fake implementation, nothing to cleanup
}
@Override
public TimerTask createTimerTask() {
return new TimerTask() {
@Override
public void run() {
setDescriptors(Collections.unmodifiableSet(new HashSet<>(0)));
}
};
}
}
private YarnConfiguration createNMConfigForDistributeNodeAttributes() {
YarnConfiguration conf = new YarnConfiguration();
return conf;
}
@Test
@Timeout(value = 20)
public void testNodeStatusUpdaterForNodeAttributes()
throws InterruptedException, IOException, TimeoutException {
final ResourceTrackerForAttributes resourceTracker =
new ResourceTrackerForAttributes();
nm = new NodeManager() {
@Override
protected NodeAttributesProvider createNodeAttributesProvider(
Configuration conf) throws IOException {
return dummyAttributesProviderRef;
}
@Override
protected NodeStatusUpdater createNodeStatusUpdater(
Context context, Dispatcher dispatcher,
NodeHealthCheckerService healthChecker) {
return new NodeStatusUpdaterImpl(context, dispatcher, healthChecker,
metrics) {
@Override
protected ResourceTracker getRMClient() {
return resourceTracker;
}
@Override
protected void stopRMProxy() {
return;
}
};
}
};
YarnConfiguration conf = createNMConfigForDistributeNodeAttributes();
conf.setLong(YarnConfiguration.NM_NODE_ATTRIBUTES_RESYNC_INTERVAL, 2000);
conf.set(YarnConfiguration.NM_LOCALIZER_ADDRESS,
"0.0.0.0:" + ServerSocketUtil.getPort(8040, 10));
nm.init(conf);
resourceTracker.resetNMHeartbeatReceiveFlag();
nm.start();
resourceTracker.waitTillRegister();
assertTrue(NodeLabelUtil
.isNodeAttributesEquals(dummyAttributesProviderRef.getDescriptors(),
resourceTracker.attributes));
resourceTracker.waitTillHeartbeat(); // wait till the first heartbeat
resourceTracker.resetNMHeartbeatReceiveFlag();
// heartbeat with updated attributes
NodeAttribute attribute1 = NodeAttribute
.newInstance(NodeAttribute.PREFIX_DISTRIBUTED, "Attr1",
NodeAttributeType.STRING, "V1");
dummyAttributesProviderRef.setDescriptors(ImmutableSet.of(attribute1));
sendOutofBandHeartBeat();
resourceTracker.waitTillHeartbeat();
assertTrue(NodeLabelUtil
.isNodeAttributesEquals(dummyAttributesProviderRef.getDescriptors(),
resourceTracker.attributes));
resourceTracker.resetNMHeartbeatReceiveFlag();
// heartbeat without updating attributes
sendOutofBandHeartBeat();
resourceTracker.waitTillHeartbeat();
resourceTracker.resetNMHeartbeatReceiveFlag();
assertNull(resourceTracker.attributes, "If no change in attributes"
+ " then null should be sent as part of request");
// provider return with null attributes
dummyAttributesProviderRef.setDescriptors(null);
sendOutofBandHeartBeat();
resourceTracker.waitTillHeartbeat();
assertNotNull(resourceTracker.attributes, "If provider sends null"
+ " then empty label set should be sent and not null");
assertTrue(resourceTracker.attributes.isEmpty(),
"If provider sends null then empty attributes should be sent");
resourceTracker.resetNMHeartbeatReceiveFlag();
// Since the resync interval is set to 2 sec in every alternate heartbeat
// the attributes will be send along with heartbeat.
// In loop we sleep for 1 sec
// so that every sec 1 heartbeat is send.
int nullAttributes = 0;
int nonNullAttributes = 0;
dummyAttributesProviderRef.setDescriptors(ImmutableSet.of(attribute1));
for (int i = 0; i < 5; i++) {
sendOutofBandHeartBeat();
resourceTracker.waitTillHeartbeat();
if (null == resourceTracker.attributes) {
nullAttributes++;
} else {
assertTrue(NodeLabelUtil.isNodeAttributesEquals(ImmutableSet.of(attribute1),
resourceTracker.attributes),
"In heartbeat PI attributes should be send");
nonNullAttributes++;
}
resourceTracker.resetNMHeartbeatReceiveFlag();
Thread.sleep(1000);
}
assertTrue(nullAttributes > 1,
"More than one heartbeat with empty attributes expected");
assertTrue(nonNullAttributes > 1,
"More than one heartbeat with attributes expected");
nm.stop();
}
@Test
@Timeout(value = 20)
public void testInvalidNodeAttributesFromProvider()
throws InterruptedException, IOException, TimeoutException {
final ResourceTrackerForAttributes resourceTracker =
new ResourceTrackerForAttributes();
nm = new NodeManager() {
@Override protected NodeAttributesProvider createNodeAttributesProvider(
Configuration conf) throws IOException {
return dummyAttributesProviderRef;
}
@Override protected NodeStatusUpdater createNodeStatusUpdater(
Context context, Dispatcher dispatcher,
NodeHealthCheckerService healthChecker) {
return new NodeStatusUpdaterImpl(context, dispatcher, healthChecker,
metrics) {
@Override protected ResourceTracker getRMClient() {
return resourceTracker;
}
@Override protected void stopRMProxy() {
return;
}
};
}
};
YarnConfiguration conf = createNMConfigForDistributeNodeAttributes();
conf.set(YarnConfiguration.NM_LOCALIZER_ADDRESS,
"0.0.0.0:" + ServerSocketUtil.getPort(8040, 10));
nm.init(conf);
resourceTracker.resetNMHeartbeatReceiveFlag();
nm.start();
resourceTracker.waitTillRegister();
assertTrue(NodeLabelUtil
.isNodeAttributesEquals(dummyAttributesProviderRef.getDescriptors(),
resourceTracker.attributes));
resourceTracker.waitTillHeartbeat(); // wait till the first heartbeat
resourceTracker.resetNMHeartbeatReceiveFlag();
// update attribute1
NodeAttribute attribute1 = NodeAttribute
.newInstance(NodeAttribute.PREFIX_DISTRIBUTED, "Attr1",
NodeAttributeType.STRING, "V1");
dummyAttributesProviderRef.setDescriptors(ImmutableSet.of(attribute1));
sendOutofBandHeartBeat();
resourceTracker.waitTillHeartbeat();
assertTrue(NodeLabelUtil.isNodeAttributesEquals(ImmutableSet.of(attribute1),
resourceTracker.attributes));
resourceTracker.resetNMHeartbeatReceiveFlag();
// update attribute2
NodeAttribute attribute2 = NodeAttribute
.newInstance(NodeAttribute.PREFIX_DISTRIBUTED, "Attr2",
NodeAttributeType.STRING, "V2");
dummyAttributesProviderRef.setDescriptors(ImmutableSet.of(attribute2));
sendOutofBandHeartBeat();
resourceTracker.waitTillHeartbeat();
assertTrue(NodeLabelUtil.isNodeAttributesEquals(ImmutableSet.of(attribute2),
resourceTracker.attributes));
resourceTracker.resetNMHeartbeatReceiveFlag();
// update attribute2 & attribute2
dummyAttributesProviderRef
.setDescriptors(ImmutableSet.of(attribute1, attribute2));
sendOutofBandHeartBeat();
resourceTracker.waitTillHeartbeat();
assertTrue(NodeLabelUtil
.isNodeAttributesEquals(ImmutableSet.of(attribute1, attribute2),
resourceTracker.attributes));
resourceTracker.resetNMHeartbeatReceiveFlag();
// heartbeat with invalid attributes
NodeAttribute invalidAttribute = NodeAttribute
.newInstance("_.P", "Attr1", NodeAttributeType.STRING, "V1");
dummyAttributesProviderRef
.setDescriptors(ImmutableSet.of(invalidAttribute));
sendOutofBandHeartBeat();
resourceTracker.waitTillHeartbeat();
assertNull(resourceTracker.attributes,
"On Invalid Attributes we need to retain earlier attributes, HB"
+ " needs to send null");
resourceTracker.resetNMHeartbeatReceiveFlag();
// on next heartbeat same invalid attributes will be given by the provider,
// but again validation check and reset RM with invalid attributes set
// should not happen
sendOutofBandHeartBeat();
resourceTracker.waitTillHeartbeat();
assertNull(resourceTracker.attributes,
"NodeStatusUpdater need not send repeatedly empty attributes on"
+ " invalid attributes from provider ");
resourceTracker.resetNMHeartbeatReceiveFlag();
}
/**
* This is to avoid race condition in the test case. NodeStatusUpdater
* heartbeat thread after sending the heartbeat needs some time to process the
* response and then go wait state. But in the test case once the main test
* thread returns back after resourceTracker.waitTillHeartbeat() we proceed
* with next sendOutofBandHeartBeat before heartbeat thread is blocked on
* wait.
* @throws InterruptedException
* @throws IOException
*/
private void sendOutofBandHeartBeat()
throws InterruptedException, IOException {
int i = 0;
do {
State statusUpdaterThreadState =
((NodeStatusUpdaterImpl) nm.getNodeStatusUpdater())
.getStatusUpdaterThreadState();
if (statusUpdaterThreadState.equals(Thread.State.TIMED_WAITING)
|| statusUpdaterThreadState.equals(Thread.State.WAITING)) {
nm.getNodeStatusUpdater().sendOutofBandHeartBeat();
break;
}
if (++i <= 10) {
Thread.sleep(50);
} else {
throw new IOException("Waited for 500 ms"
+ " but NodeStatusUpdaterThread not in waiting state");
}
} while (true);
}
}
| DummyNodeAttributesProvider |
java | google__jimfs | jimfs/src/test/java/com/google/common/jimfs/AbstractJimfsIntegrationTest.java | {
"start": 1133,
"end": 2187
} | class ____ {
protected FileSystem fs;
@Before
public void setUp() throws IOException {
fs = createFileSystem();
}
@After
public void tearDown() throws IOException {
fs.close();
}
/** Creates the file system to use in the tests. */
protected abstract FileSystem createFileSystem();
// helpers
protected Path path(String first, String... more) {
return fs.getPath(first, more);
}
protected Object getFileKey(String path, LinkOption... options) throws IOException {
return Files.getAttribute(path(path), "fileKey", options);
}
protected PathSubject assertThatPath(String path, LinkOption... options) {
return assertThatPath(path(path), options);
}
protected static PathSubject assertThatPath(Path path, LinkOption... options) {
PathSubject subject = assert_().about(paths()).that(path);
if (options.length != 0) {
subject = subject.noFollowLinks();
}
return subject;
}
/** Tester for testing changes in file times. */
protected static final | AbstractJimfsIntegrationTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.