language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/restore/DistributeStateHandlerHelper.java
|
{
"start": 1840,
"end": 7608
}
|
class ____ implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(DistributeStateHandlerHelper.class);
private final IncrementalLocalKeyedStateHandle stateHandle;
private final RestoredDBInstance restoredDbInstance;
private final int keyGroupPrefixBytes;
private final KeyGroupRange keyGroupRange;
private final String operatorIdentifier;
private final int index;
/**
* Creates a helper for processing a single state handle. The database instance is created in
* the constructor to enable proper resource management and separation of concerns.
*
* @param stateHandle the state handle to process
* @param columnFamilyOptionsFactory factory for creating column family options
* @param dbOptions database options
* @param ttlCompactFiltersManager TTL compact filters manager (can be null)
* @param writeBufferManagerCapacity write buffer manager capacity (can be null)
* @param keyGroupPrefixBytes number of key group prefix bytes for SST file range checking
* @param keyGroupRange target key group range (for logging)
* @param operatorIdentifier operator identifier (for logging)
* @param index current processing index (for logging)
* @throws Exception on any database opening error
*/
public DistributeStateHandlerHelper(
IncrementalLocalKeyedStateHandle stateHandle,
List<StateMetaInfoSnapshot> stateMetaInfoSnapshots,
Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory,
DBOptions dbOptions,
RocksDbTtlCompactFiltersManager ttlCompactFiltersManager,
Long writeBufferManagerCapacity,
int keyGroupPrefixBytes,
KeyGroupRange keyGroupRange,
String operatorIdentifier,
int index)
throws Exception {
this.stateHandle = stateHandle;
this.keyGroupPrefixBytes = keyGroupPrefixBytes;
this.keyGroupRange = keyGroupRange;
this.operatorIdentifier = operatorIdentifier;
this.index = index;
final String logLineSuffix = createLogLineSuffix();
LOG.debug("Opening temporary database : {}", logLineSuffix);
// Open database using restored instance helper method
this.restoredDbInstance =
RestoredDBInstance.restoreTempDBInstanceFromLocalState(
stateHandle,
stateMetaInfoSnapshots,
columnFamilyOptionsFactory,
dbOptions,
ttlCompactFiltersManager,
writeBufferManagerCapacity);
}
/**
* Distributes state handle data by checking SST file ranges and exporting column families.
* Returns Left if successfully exported, Right if the handle was skipped.
*
* @param exportCfBasePath base path for export
* @param exportedColumnFamiliesOut output parameter for exported column families
* @return Either.Left containing key group range if successfully exported, Either.Right
* containing the skipped state handle otherwise
* @throws Exception on any export error
*/
public Either<KeyGroupRange, IncrementalLocalKeyedStateHandle> tryDistribute(
Path exportCfBasePath,
Map<RegisteredStateMetaInfoBase.Key, List<ExportImportFilesMetaData>>
exportedColumnFamiliesOut)
throws Exception {
final String logLineSuffix = createLogLineSuffix();
List<ColumnFamilyHandle> tmpColumnFamilyHandles = restoredDbInstance.columnFamilyHandles;
LOG.debug("Checking actual keys of sst files {}", logLineSuffix);
// Check SST file range
RocksDBIncrementalCheckpointUtils.RangeCheckResult rangeCheckResult =
RocksDBIncrementalCheckpointUtils.checkSstDataAgainstKeyGroupRange(
restoredDbInstance.db, keyGroupPrefixBytes, stateHandle.getKeyGroupRange());
LOG.info("{} {}", rangeCheckResult, logLineSuffix);
if (rangeCheckResult.allInRange()) {
LOG.debug("Start exporting {}", logLineSuffix);
List<RegisteredStateMetaInfoBase> registeredStateMetaInfoBases =
restoredDbInstance.stateMetaInfoSnapshots.stream()
.map(RegisteredStateMetaInfoBase::fromMetaInfoSnapshot)
.collect(Collectors.toList());
// Export all the Column Families and store the result in exportedColumnFamiliesOut
RocksDBIncrementalCheckpointUtils.exportColumnFamilies(
restoredDbInstance.db,
tmpColumnFamilyHandles,
registeredStateMetaInfoBases,
exportCfBasePath,
exportedColumnFamiliesOut);
LOG.debug("Done exporting {}", logLineSuffix);
return Either.Left(stateHandle.getKeyGroupRange());
} else {
LOG.debug("Skipped export {}", logLineSuffix);
return Either.Right(stateHandle);
}
}
@Override
public void close() throws Exception {
restoredDbInstance.close();
}
/** Creates a consistent log line suffix for logging operations. */
private String createLogLineSuffix() {
return " for state handle at index "
+ index
+ " with proclaimed key-group range "
+ stateHandle.getKeyGroupRange().prettyPrintInterval()
+ " for backend with range "
+ keyGroupRange.prettyPrintInterval()
+ " in operator "
+ operatorIdentifier
+ ".";
}
}
|
DistributeStateHandlerHelper
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_doesNotContainAnyWhitespaces_Test.java
|
{
"start": 1145,
"end": 2044
}
|
class ____ {
@ParameterizedTest
@NullSource
@ValueSource(strings = {
"",
"a",
"bc"
})
void should_pass_if_actual_does_not_contain_any_whitespaces(String actual) {
// WHEN/THEN
assertThat(actual).doesNotContainAnyWhitespaces();
}
@ParameterizedTest
@ValueSource(strings = {
" ",
"\t", // tab
"\n", // line feed
"\r", // carriage return
" \n\r ",
"a ",
"a b",
"a b",
"a\tb", // tab
"a\nb", // line feed
"a\rb", // carriage return
"a \n\r b"
})
void should_fail_if_actual_contains_whitespaces(String actual) {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).doesNotContainAnyWhitespaces());
// THEN
then(assertionError).hasMessage(shouldNotContainAnyWhitespaces(actual).create());
}
}
|
CharSequenceAssert_doesNotContainAnyWhitespaces_Test
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/propertyconfigurer/consumer2/PropertySourcesConfigurerTest.java
|
{
"start": 1737,
"end": 3424
}
|
class ____ {
@BeforeAll
public static void beforeAll() {
DubboBootstrap.reset();
}
@AfterAll
public static void afterAll() {
DubboBootstrap.reset();
}
@Test
void testEarlyInit() {
ClassPathXmlApplicationContext providerContext = new ClassPathXmlApplicationContext(
"org/apache/dubbo/config/spring/propertyconfigurer/provider/dubbo-provider.xml");
try {
providerContext.start();
// consumer app
// Resolve placeholder by PropertySourcesPlaceholderConfigurer in dubbo-consumer.xml, without import
// property source.
AnnotationConfigApplicationContext context =
new AnnotationConfigApplicationContext(ConsumerConfiguration.class);
try {
context.start();
HelloService service = (HelloService) context.getBean("demoService");
String result = service.sayHello("world");
Assertions.assertEquals(
"Hello world, response from provider: " + InetSocketAddress.createUnresolved("127.0.0.1", 0),
result);
} finally {
context.close();
}
} finally {
providerContext.close();
}
}
@Configuration
@EnableDubbo(scanBasePackages = "org.apache.dubbo.config.spring.propertyconfigurer.consumer2")
@ComponentScan(value = {"org.apache.dubbo.config.spring.propertyconfigurer.consumer2"})
@ImportResource("classpath:/org/apache/dubbo/config/spring/propertyconfigurer/consumer2/dubbo-consumer.xml")
static
|
PropertySourcesConfigurerTest
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/support/EntityManagerBeanDefinitionRegistrarPostProcessorUnitTests.java
|
{
"start": 1462,
"end": 3535
}
|
class ____ {
@Test // DATAJPA-453
void findsBeanDefinitionInParentBeanFactory() {
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.registerBeanDefinition("factory", new RootBeanDefinition(LocalContainerEntityManagerFactoryBean.class));
ConfigurableListableBeanFactory childFactory = new DefaultListableBeanFactory(beanFactory);
BeanFactoryPostProcessor processor = new EntityManagerBeanDefinitionRegistrarPostProcessor();
processor.postProcessBeanFactory(childFactory);
assertThat(beanFactory.getBeanDefinitionCount()).isEqualTo(2);
}
@Test // DATAJPA-1005, DATAJPA-1045
void discoversFactoryBeanReturningConcreteEntityManagerFactoryType() {
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(StubEntityManagerFactoryBean.class);
builder.addConstructorArgValue(SpecialEntityManagerFactory.class);
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.registerBeanDefinition("factory", builder.getBeanDefinition());
BeanFactoryPostProcessor processor = new EntityManagerBeanDefinitionRegistrarPostProcessor();
processor.postProcessBeanFactory(beanFactory);
assertThat(beanFactory.getBeanDefinitionCount()).isEqualTo(2);
}
@Test // gh-2699
void avoidsDuplicateBeanRegistrations() {
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(StubEntityManagerFactoryBean.class);
builder.addConstructorArgValue(SpecialEntityManagerFactory.class);
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.registerBeanDefinition("factory", builder.getBeanDefinition());
beanFactory.registerBeanDefinition("jpaSharedEM_AWC_factory",
BeanDefinitionBuilder.rootBeanDefinition(Object.class).getBeanDefinition());
BeanFactoryPostProcessor processor = new EntityManagerBeanDefinitionRegistrarPostProcessor();
processor.postProcessBeanFactory(beanFactory);
assertThat(beanFactory.getBeanDefinitionCount()).isEqualTo(2);
}
|
EntityManagerBeanDefinitionRegistrarPostProcessorUnitTests
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/util/ByteBufferUtil.java
|
{
"start": 4837,
"end": 5268
}
|
class ____ {
@Synthetic final int offset;
@Synthetic final int limit;
@Synthetic final byte[] data;
// PMD.ArrayIsStoredDirectly Copying would be prohibitively expensive and/or lead to OOMs.
@SuppressWarnings("PMD.ArrayIsStoredDirectly")
SafeArray(@NonNull byte[] data, int offset, int limit) {
this.data = data;
this.offset = offset;
this.limit = limit;
}
}
private static
|
SafeArray
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/fixes/SuggestedFixesTest.java
|
{
"start": 69224,
"end": 70423
}
|
class ____ {}
// BUG: Diagnostic contains: [QualifyTypeLocalClassChecker] Object
new Object() {};
}
Test() { // in constructor
// BUG: Diagnostic contains: [QualifyTypeLocalClassChecker] Object
new Object() {};
}
static Object staticMethod() {
// BUG: Diagnostic contains: [QualifyTypeLocalClassChecker] Object
return new Object() {};
}
Object instanceMethod() {
// BUG: Diagnostic contains: [QualifyTypeLocalClassChecker] Object
return new Object() {};
}
void lambda() {
Supplier<Object> consumer =
() -> {
// BUG: Diagnostic contains: [QualifyTypeLocalClassChecker] Object
return new Object() {};
};
}
}
""")
.doTest();
}
/** Test checker that adds @SuppressWarnings when compilation succeeds in the current unit. */
@BugPattern(summary = "", severity = ERROR)
public static final
|
InInstanceInitializer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparisonTests.java
|
{
"start": 860,
"end": 3689
}
|
class ____ extends ESTestCase {
public void testSerializationOfBinaryComparisonOperation() throws IOException {
for (BinaryComparisonOperation op : BinaryComparisonOperation.values()) {
BinaryComparisonOperation newOp = copyWriteable(
op,
new NamedWriteableRegistry(List.of()),
BinaryComparisonOperation::readFromStream
);
assertEquals(op, newOp);
}
}
/**
* Test that a serialized
* {@code BinaryComparisonOperation}
* from {@code org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison}
* can be read back as a
* {@link BinaryComparisonOperation}
*/
public void testCompatibleWithQLBinaryComparisonOperation() throws IOException {
validateCompatibility(
org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.EQ,
BinaryComparisonOperation.EQ
);
validateCompatibility(
org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.NEQ,
BinaryComparisonOperation.NEQ
);
validateCompatibility(
org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.GT,
BinaryComparisonOperation.GT
);
validateCompatibility(
org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.GTE,
BinaryComparisonOperation.GTE
);
validateCompatibility(
org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.LT,
BinaryComparisonOperation.LT
);
validateCompatibility(
org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.LTE,
BinaryComparisonOperation.LTE
);
}
private static void validateCompatibility(
org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation original,
BinaryComparisonOperation expected
) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.setTransportVersion(TransportVersion.current());
output.writeEnum(original);
try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), new NamedWriteableRegistry(List.of()))) {
in.setTransportVersion(TransportVersion.current());
BinaryComparisonOperation newOp = BinaryComparisonOperation.readFromStream(in);
assertEquals(expected, newOp);
}
}
}
}
|
EsqlBinaryComparisonTests
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/web/reactive/server/samples/ExchangeMutatorTests.java
|
{
"start": 2384,
"end": 2557
}
|
class ____ {
@GetMapping("/userIdentity")
public String handle(Principal principal) {
return "Hello " + principal.getName() + "!";
}
}
private static
|
TestController
|
java
|
resilience4j__resilience4j
|
resilience4j-feign/src/test/java/io/github/resilience4j/feign/test/TestFeignDecorator.java
|
{
"start": 274,
"end": 1189
}
|
class ____ implements FeignDecorator {
private volatile boolean called = false;
private volatile CheckedFunction<Object[], Object> alternativeFunction;
public boolean isCalled() {
return called;
}
public void setCalled(boolean called) {
this.called = called;
}
public CheckedFunction<Object[], Object> getAlternativeFunction() {
return alternativeFunction;
}
public void setAlternativeFunction(CheckedFunction<Object[], Object> alternativeFunction) {
this.alternativeFunction = alternativeFunction;
}
@Override
public CheckedFunction<Object[], Object> decorate(
CheckedFunction<Object[], Object> invocationCall,
Method method, MethodHandler methodHandler,
Target<?> target) {
called = true;
return alternativeFunction != null ? alternativeFunction : invocationCall;
}
}
|
TestFeignDecorator
|
java
|
micronaut-projects__micronaut-core
|
function/src/main/java/io/micronaut/function/executor/AbstractFunctionExecutor.java
|
{
"start": 912,
"end": 1018
}
|
class ____<I, O, C> extends AbstractExecutor<C> implements FunctionExecutor<I, O> {
}
|
AbstractFunctionExecutor
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlJsonArrayFunctionWrapper.java
|
{
"start": 1310,
"end": 2022
}
|
class ____ extends SqlJsonArrayFunction {
@Override
public RelDataType inferReturnType(SqlOperatorBinding opBinding) {
RelDataType returnType = VARCHAR_NOT_NULL.inferReturnType(opBinding);
if (returnType == null) {
throw new IllegalArgumentException(
"Cannot infer return type for "
+ opBinding.getOperator()
+ "; operand types: "
+ opBinding.collectOperandTypes());
} else {
return returnType;
}
}
@Override
public SqlReturnTypeInference getReturnTypeInference() {
return VARCHAR_NOT_NULL;
}
}
|
SqlJsonArrayFunctionWrapper
|
java
|
alibaba__nacos
|
api/src/main/java/com/alibaba/nacos/api/ai/model/mcp/registry/StdioTransport.java
|
{
"start": 1021,
"end": 1211
}
|
class ____ {
private String type = "stdio";
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}
|
StdioTransport
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UngroupedOverloadsTest.java
|
{
"start": 18819,
"end": 19169
}
|
class ____ {
void foo() {}
void bar() {}
static void foo(int x) {}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void staticAndNonStaticInterspersed() {
compilationHelper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneColumns.java
|
{
"start": 1930,
"end": 10759
}
|
class ____ extends Rule<LogicalPlan, LogicalPlan> {
@Override
public LogicalPlan apply(LogicalPlan plan) {
return pruneColumns(plan, plan.outputSet().asBuilder(), false);
}
private static LogicalPlan pruneColumns(LogicalPlan plan, AttributeSet.Builder used, boolean inlineJoin) {
Holder<Boolean> forkPresent = new Holder<>(false);
// while going top-to-bottom (upstream)
return plan.transformDown(p -> {
// Note: It is NOT required to do anything special for binary plans like JOINs, except INLINE STATS. It is perfectly fine that
// transformDown descends first into the left side, adding all kinds of attributes to the `used` set, and then descends into
// the right side - even though the `used` set will contain stuff only used in the left hand side. That's because any attribute
// that is used in the left hand side must have been created in the left side as well. Even field attributes belonging to the
// same index fields will have different name ids in the left and right hand sides - as in the extreme example
// `FROM lookup_idx | LOOKUP JOIN lookup_idx ON key_field`.
// TODO: revisit with every new command
// skip nodes that simply pass the input through and use no references
if (p instanceof Limit || p instanceof Sample) {
return p;
}
if (p instanceof Fork) {
forkPresent.set(true);
}
// pruning columns for Fork branches can have the side effect of having misaligned outputs
if (forkPresent.get()) {
return p;
}
var recheck = new Holder<Boolean>();
// analyze the unused items against dedicated 'producer' nodes such as Eval and Aggregate
// perform a loop to retry checking if the current node is completely eliminated
do {
recheck.set(false);
p = switch (p) {
case Aggregate agg -> pruneColumnsInAggregate(agg, used, inlineJoin);
case InlineJoin inj -> pruneColumnsInInlineJoinRight(inj, used, recheck);
case Eval eval -> pruneColumnsInEval(eval, used, recheck);
case Project project -> inlineJoin ? pruneColumnsInProject(project, used) : p;
case EsRelation esr -> pruneColumnsInEsRelation(esr, used);
default -> p;
};
} while (recheck.get());
used.addAll(p.references());
// preserve the state before going to the next node
return p;
});
}
private static LogicalPlan pruneColumnsInAggregate(Aggregate aggregate, AttributeSet.Builder used, boolean inlineJoin) {
LogicalPlan p = aggregate;
var remaining = pruneUnusedAndAddReferences(aggregate.aggregates(), used);
if (remaining == null) {
return p;
}
if (remaining.isEmpty()) {
if (inlineJoin) {
p = emptyLocalRelation(aggregate);
} else if (aggregate.groupings().isEmpty()) {
// We still need to have a plan that produces 1 row per group.
p = new LocalRelation(
aggregate.source(),
List.of(Expressions.attribute(aggregate.aggregates().getFirst())),
LocalSupplier.of(new Page(BlockUtils.constantBlock(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, null, 1)))
);
} else {
// Aggs cannot produce pages with 0 columns, so retain one grouping.
Attribute attribute = Expressions.attribute(aggregate.groupings().getFirst());
NamedExpression firstAggregate = aggregate.aggregates().getFirst();
remaining = List.of(new Alias(firstAggregate.source(), firstAggregate.name(), attribute, attribute.id()));
p = aggregate.with(aggregate.groupings(), remaining);
}
} else {
// not expecting high groups cardinality, nested loops in lists should be fine, no need for a HashSet
if (inlineJoin && aggregate.groupings().containsAll(remaining)) {
// An InlineJoin right-hand side aggregation output had everything pruned, except for (some of the) groupings, which are
// already part of the IJ output (from the left-hand side): the agg can just be dropped entirely.
p = emptyLocalRelation(aggregate);
} else { // not an INLINEJOIN or there are actually aggregates to compute
p = aggregate.with(aggregate.groupings(), remaining);
}
}
return p;
}
private static LogicalPlan pruneColumnsInInlineJoinRight(InlineJoin ij, AttributeSet.Builder used, Holder<Boolean> recheck) {
LogicalPlan p = ij;
used.addAll(ij.references());
var right = pruneColumns(ij.right(), used, true);
if (right.output().isEmpty() || isLocalEmptyRelation(right)) {
// InlineJoin updates the order of the output, so even if the computation is dropped, the groups need to be pulled to the end.
// So we keep just the left side of the join (i.e. drop the computations), but place a Project on top to keep the right order.
List<Attribute> newOutput = new ArrayList<>(ij.output());
AttributeSet leftOutputSet = ij.left().outputSet();
newOutput.removeIf(attr -> leftOutputSet.contains(attr) == false);
p = new Project(ij.source(), ij.left(), newOutput);
recheck.set(true);
} else if (right != ij.right()) {
// if the right side has been updated, replace it
p = ij.replaceRight(right);
}
return p;
}
private static LogicalPlan pruneColumnsInEval(Eval eval, AttributeSet.Builder used, Holder<Boolean> recheck) {
LogicalPlan p = eval;
var remaining = pruneUnusedAndAddReferences(eval.fields(), used);
// no fields, no eval
if (remaining != null) {
if (remaining.isEmpty()) {
p = eval.child();
recheck.set(true);
} else {
p = new Eval(eval.source(), eval.child(), remaining);
}
}
return p;
}
// Note: only run when the Project is a descendent of an InlineJoin.
private static LogicalPlan pruneColumnsInProject(Project project, AttributeSet.Builder used) {
LogicalPlan p = project;
var remaining = pruneUnusedAndAddReferences(project.projections(), used);
if (remaining != null) {
p = remaining.isEmpty() ? emptyLocalRelation(project) : new Project(project.source(), project.child(), remaining);
}
return p;
}
private static LogicalPlan pruneColumnsInEsRelation(EsRelation esr, AttributeSet.Builder used) {
LogicalPlan p = esr;
if (esr.indexMode() == IndexMode.LOOKUP) {
// Normally, pruning EsRelation has no effect because InsertFieldExtraction only extracts the required fields, anyway.
// However, InsertFieldExtraction can't be currently used in LOOKUP JOIN right index,
// it works differently as we extract all fields (other than the join key) that the EsRelation has.
var remaining = pruneUnusedAndAddReferences(esr.output(), used);
if (remaining != null) {
p = esr.withAttributes(remaining);
}
}
return p;
}
private static LogicalPlan emptyLocalRelation(UnaryPlan plan) {
// create an empty local relation with no attributes
return skipPlan(plan);
}
private static boolean isLocalEmptyRelation(LogicalPlan plan) {
return plan instanceof LocalRelation local && local.hasEmptySupplier();
}
/**
* Prunes attributes from the `named` list that are not found in the given set (builder).
* Returns null if no pruning occurred.
* As a side effect, the references of the kept attributes are added to the input set (builder) -- irrespective of the return value.
*/
private static <N extends NamedExpression> List<N> pruneUnusedAndAddReferences(List<N> named, AttributeSet.Builder used) {
var clone = new ArrayList<>(named);
for (var it = clone.listIterator(clone.size()); it.hasPrevious();) {
N prev = it.previous();
var attr = prev.toAttribute();
if (used.contains(attr)) {
used.addAll(prev.references());
} else {
it.remove();
}
}
return clone.size() != named.size() ? clone : null;
}
}
|
PruneColumns
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/configuration/internal/metadata/reader/ComponentAuditingData.java
|
{
"start": 482,
"end": 1480
}
|
class ____ extends PropertyAuditingData implements AuditedPropertiesHolder {
private final Map<String, PropertyAuditingData> properties;
public ComponentAuditingData() {
this.properties = newHashMap();
}
@Override
public boolean isEmpty() {
return properties.isEmpty();
}
@Override
public void addPropertyAuditingData(String propertyName, PropertyAuditingData auditingData) {
properties.put( propertyName, auditingData );
}
@Override
public PropertyAuditingData getPropertyAuditingData(String propertyName) {
return properties.get( propertyName );
}
@Override
public boolean contains(String propertyName) {
return properties.containsKey( propertyName );
}
public Set<String> getPropertyNames() {
return properties.keySet();
}
public Iterable<String> getNonSyntheticPropertyNames() {
return properties.entrySet().stream()
.filter( e -> !e.getValue().isSynthetic() )
.map( Map.Entry::getKey )
.collect( Collectors.toList() );
}
}
|
ComponentAuditingData
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/http/TooLongHttpContentException.java
|
{
"start": 880,
"end": 1545
}
|
class ____ extends TooLongFrameException {
private static final long serialVersionUID = 3238341182129476117L;
/**
* Creates a new instance.
*/
public TooLongHttpContentException() {
}
/**
* Creates a new instance.
*/
public TooLongHttpContentException(String message, Throwable cause) {
super(message, cause);
}
/**
* Creates a new instance.
*/
public TooLongHttpContentException(String message) {
super(message);
}
/**
* Creates a new instance.
*/
public TooLongHttpContentException(Throwable cause) {
super(cause);
}
}
|
TooLongHttpContentException
|
java
|
dropwizard__dropwizard
|
dropwizard-util/src/test/java/io/dropwizard/util/ThrowablesTest.java
|
{
"start": 232,
"end": 2876
}
|
class ____ {
@Test
void findsNothingFromNull() {
assertThat(findThrowableInChain(t -> true, null)).isEmpty();
}
@Test
void findsSimpleException() {
final RuntimeException e = new RuntimeException();
assertThat(findThrowableInChain(t -> t instanceof RuntimeException, e)).contains(e);
assertThat(findThrowableInChain(t -> false, e)).isEmpty();
}
@Test
void findsChainedException() {
final RuntimeException first = new RuntimeException("first");
final RuntimeException second = new RuntimeException("second", first);
final RuntimeException third = new RuntimeException("third", second);
assertThat(findThrowableInChain(t -> "third".equals(t.getMessage()), third)).contains(third);
assertThat(findThrowableInChain(t -> "second".equals(t.getMessage()), third)).contains(second);
assertThat(findThrowableInChain(t -> "first".equals(t.getMessage()), third)).contains(first);
assertThat(findThrowableInChain(t -> false, third)).isEmpty();
}
@Test
void ignoresCircularChains() {
// fifth -> fourth -> third -> second -> first
// ^ /
// \-----------------
final LateBoundCauseException first = new LateBoundCauseException("first");
final RuntimeException second = new RuntimeException("second", first);
final RuntimeException third = new RuntimeException("third", second);
first.setCause(third);
final RuntimeException fourth = new RuntimeException("fourth", third);
final RuntimeException fifth = new RuntimeException("fifth", fourth);
assertThat(findThrowableInChain(t -> "fifth".equals(t.getMessage()), fifth)).contains(fifth);
assertThat(findThrowableInChain(t -> "fourth".equals(t.getMessage()), fifth)).contains(fourth);
assertThat(findThrowableInChain(t -> "third".equals(t.getMessage()), fifth)).contains(third);
assertThat(findThrowableInChain(t -> "second".equals(t.getMessage()), fifth)).contains(second);
assertThat(findThrowableInChain(t -> "first".equals(t.getMessage()), fifth)).contains(first);
assertThat(findThrowableInChain(t -> false, fifth)).isEmpty();
// Starting in the loop
assertThat(findThrowableInChain(t -> "third".equals(t.getMessage()), second)).contains(third);
assertThat(findThrowableInChain(t -> "fourth".equals(t.getMessage()), second)).isEmpty();
}
/**
* An Exception which allows the cause to be overridden after creation
*/
private static
|
ThrowablesTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassPostConstructAndAutowiringTests.java
|
{
"start": 3168,
"end": 3299
}
|
class ____ {
TestBean testBean;
@Autowired
void setTestBean(TestBean testBean) {
this.testBean = testBean;
}
}
}
|
Config2
|
java
|
reactor__reactor-core
|
reactor-core/src/jcstress/java/reactor/core/publisher/FluxBufferTimeoutStressTest.java
|
{
"start": 1484,
"end": 1683
}
|
class ____ {
@JCStressTest
@Outcome(id = "1, 1", expect = Expect.ACCEPTABLE, desc = "")
@Outcome(id = "2, 1", expect = Expect.ACCEPTABLE, desc = "")
@State
public static
|
FluxBufferTimeoutStressTest
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/inject/AssistedInjectAndInjectOnConstructors.java
|
{
"start": 1766,
"end": 1884
}
|
class ____ extends BugChecker implements ClassTreeMatcher {
/**
* Matches if a
|
AssistedInjectAndInjectOnConstructors
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/SocketOptions.java
|
{
"start": 11490,
"end": 18062
}
|
class ____ {
private int count = DEFAULT_COUNT;
private boolean enabled = DEFAULT_SO_KEEPALIVE;
private Duration idle = DEFAULT_IDLE;
private Duration interval = DEFAULT_INTERVAL;
private Builder() {
}
/**
* Set the maximum number of keepalive probes TCP should send before dropping the connection. Defaults to {@code 9}.
* See also {@link #DEFAULT_COUNT} and {@code TCP_KEEPCNT}.
*
* @param count the maximum number of keepalive probes TCP
* @return {@code this}
*/
public KeepAliveOptions.Builder count(int count) {
LettuceAssert.isTrue(count >= 0, "Count must be greater 0");
this.count = count;
return this;
}
/**
* Enable TCP keepalive. Defaults to disabled. See {@link #DEFAULT_SO_KEEPALIVE}.
*
* @return {@code this}
* @see java.net.SocketOptions#SO_KEEPALIVE
*/
public KeepAliveOptions.Builder enable() {
return enable(true);
}
/**
* Disable TCP keepalive. Defaults to disabled. See {@link #DEFAULT_SO_KEEPALIVE}.
*
* @return {@code this}
* @see java.net.SocketOptions#SO_KEEPALIVE
*/
public KeepAliveOptions.Builder disable() {
return enable(false);
}
/**
* Enable TCP keepalive. Defaults to {@code false}. See {@link #DEFAULT_SO_KEEPALIVE}.
*
* @param enabled whether to enable TCP keepalive.
* @return {@code this}
* @see java.net.SocketOptions#SO_KEEPALIVE
*/
public KeepAliveOptions.Builder enable(boolean enabled) {
this.enabled = enabled;
return this;
}
/**
* The time the connection needs to remain idle before TCP starts sending keepalive probes if keepalive is enabled.
* Defaults to {@code 2 hours}. See also @link {@link #DEFAULT_IDLE} and {@code TCP_KEEPIDLE}.
* <p>
* The time granularity of is seconds.
*
* @param idle connection idle time, must be greater {@literal 0}.
* @return {@code this}
*/
public KeepAliveOptions.Builder idle(Duration idle) {
LettuceAssert.notNull(idle, "Idle time must not be null");
LettuceAssert.isTrue(!idle.isNegative(), "Idle time must not be negative");
this.idle = idle;
return this;
}
/**
* The time between individual keepalive probes. Defaults to {@code 75 second}. See also {@link #DEFAULT_INTERVAL}
* and {@code TCP_KEEPINTVL}.
* <p>
* The time granularity of is seconds.
*
* @param interval connection interval time, must be greater {@literal 0}
* @return {@code this}
*/
public KeepAliveOptions.Builder interval(Duration interval) {
LettuceAssert.notNull(interval, "Interval time must not be null");
LettuceAssert.isTrue(!interval.isNegative(), "Interval time must not be negative");
this.interval = interval;
return this;
}
/**
* Create a new instance of {@link KeepAliveOptions}
*
* @return new instance of {@link KeepAliveOptions}
*/
public KeepAliveOptions build() {
return new KeepAliveOptions(this);
}
}
/**
* Returns a builder to create new {@link KeepAliveOptions} whose settings are replicated from the current
* {@link KeepAliveOptions}.
*
* @return a {@link KeepAliveOptions.Builder} to create new {@link KeepAliveOptions} whose settings are replicated from
* the current {@link KeepAliveOptions}
*/
public KeepAliveOptions.Builder mutate() {
KeepAliveOptions.Builder builder = builder();
builder.enabled = this.isEnabled();
builder.count = this.getCount();
builder.idle = this.getIdle();
builder.interval = this.getInterval();
return builder;
}
/**
* Returns the maximum number of keepalive probes TCP should send before dropping the connection. Defaults to {@code 9}.
* See also {@link #DEFAULT_COUNT} and {@code TCP_KEEPCNT}.
*
* @return the maximum number of keepalive probes TCP should send before dropping the connection.
*/
public int getCount() {
return count;
}
/**
* Returns whether to enable TCP keepalive.
*
* @return whether to enable TCP keepalive
* @see java.net.SocketOptions#SO_KEEPALIVE
*/
public boolean isEnabled() {
return enabled;
}
/**
* The time the connection needs to remain idle before TCP starts sending keepalive probes if keepalive is enabled.
* Defaults to {@code 2 hours}. See also @link {@link #DEFAULT_IDLE} and {@code TCP_KEEPIDLE}.
* <p>
* The time granularity of is seconds.
*
* @return the time the connection needs to remain idle before TCP starts sending keepalive probes.
*/
public Duration getIdle() {
return idle;
}
/**
* The time between individual keepalive probes. Defaults to {@code 1 second}. See also {@link #DEFAULT_INTERVAL} and
* {@code TCP_KEEPINTVL}.
* <p>
* The time granularity of is seconds.
*
* @return the time the connection needs to remain idle before TCP starts sending keepalive probes.
*/
public Duration getInterval() {
return interval;
}
}
/**
* TCP_USER_TIMEOUT comes from <a href="https://datatracker.ietf.org/doc/html/rfc5482">RFC5482</a> , configuring this
* parameter can allow the user TCP to initiate a reconnection to solve this problem when the network is abnormal.
* <p>
* The timeout is currently only supported with epoll and io_uring native transports.
*
* @since 6.2.7
*/
public static
|
Builder
|
java
|
square__okhttp
|
module-tests/src/test/java/okhttp3/modules/test/JavaModuleTest.java
|
{
"start": 1098,
"end": 2201
}
|
class ____ {
@Test
public void testVisibility() {
// Just check we can run code that depends on OkHttp types
OkHttpCaller.callOkHttp(HttpUrl.get("https://square.com/robots.txt"));
}
@Test
public void testMockWebServer() throws IOException {
MockWebServer server = new MockWebServer();
server.enqueue(new MockResponse(200, Headers.of(), "Hello, Java9!"));
server.start();
// Just check we can run code that depends on OkHttp types
Call call = OkHttpCaller.callOkHttp(server.url("/"));
try (Response response = call.execute();) {
System.out.println(response.body().string());
}
}
@Test
public void testModules() {
Module okHttpModule = OkHttpClient.class.getModule();
assertEquals("okhttp3", okHttpModule.getName());
assertTrue(okHttpModule.getPackages().contains("okhttp3"));
Module loggingInterceptorModule = HttpLoggingInterceptor.class.getModule();
assertEquals("okhttp3.logging", loggingInterceptorModule.getName());
assertTrue(loggingInterceptorModule.getPackages().contains("okhttp3.logging"));
}
}
|
JavaModuleTest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/file/FilerProducerAbsoluteTest.java
|
{
"start": 1134,
"end": 1912
}
|
class ____ extends ContextTestSupport {
private String path;
@Test
public void testProduceAbsoluteFile() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.expectedFileExists(path + "/hello.txt");
template.sendBodyAndHeader("direct:report", "Hello World", Exchange.FILE_NAME, "hello.txt");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
path = testDirectory("absolute").toAbsolutePath().toString();
return new RouteBuilder() {
public void configure() {
from("direct:report").to("file://" + path, "mock:result");
}
};
}
}
|
FilerProducerAbsoluteTest
|
java
|
quarkusio__quarkus
|
independent-projects/tools/devtools-testing/src/main/java/io/quarkus/devtools/testing/codestarts/QuarkusCodestartTest.java
|
{
"start": 8184,
"end": 8600
}
|
class ____ (using `org.acme.ClassName` also works, it will be replaced by the
* project package name)
* @throws Throwable
*/
public void checkGeneratedTestSource(String className) throws Throwable {
for (Language language : languages) {
checkGeneratedTestSource(language, className);
}
}
/**
* It will validate (compare and check package name) the
|
name
|
java
|
apache__maven
|
compat/maven-embedder/src/main/java/org/fusesource/jansi/Ansi.java
|
{
"start": 4023,
"end": 4503
}
|
enum ____ {
FORWARD(0, "FORWARD"),
BACKWARD(1, "BACKWARD"),
ALL(2, "ALL");
private final int value;
private final String name;
Erase(int index, String name) {
this.value = index;
this.name = name;
}
@Override
public String toString() {
return name;
}
public int value() {
return value;
}
}
@FunctionalInterface
public
|
Erase
|
java
|
apache__flink
|
flink-metrics/flink-metrics-core/src/main/java/org/apache/flink/metrics/MetricGroup.java
|
{
"start": 1394,
"end": 7950
}
|
interface ____ {
// ------------------------------------------------------------------------
// Spans
// ------------------------------------------------------------------------
@Experimental
default void addSpan(SpanBuilder spanBuilder) {}
// ------------------------------------------------------------------------
// Events
// ------------------------------------------------------------------------
@Experimental
default void addEvent(EventBuilder eventBuilder) {}
// ------------------------------------------------------------------------
// Metrics
// ------------------------------------------------------------------------
/**
* Creates and registers a new {@link org.apache.flink.metrics.Counter} with Flink.
*
* @param name name of the counter
* @return the created counter
*/
default Counter counter(int name) {
return counter(String.valueOf(name));
}
/**
* Creates and registers a new {@link org.apache.flink.metrics.Counter} with Flink.
*
* @param name name of the counter
* @return the created counter
*/
Counter counter(String name);
/**
* Registers a {@link org.apache.flink.metrics.Counter} with Flink.
*
* @param name name of the counter
* @param counter counter to register
* @param <C> counter type
* @return the given counter
*/
default <C extends Counter> C counter(int name, C counter) {
return counter(String.valueOf(name), counter);
}
/**
* Registers a {@link org.apache.flink.metrics.Counter} with Flink.
*
* @param name name of the counter
* @param counter counter to register
* @param <C> counter type
* @return the given counter
*/
<C extends Counter> C counter(String name, C counter);
/**
* Registers a new {@link org.apache.flink.metrics.Gauge} with Flink.
*
* @param name name of the gauge
* @param gauge gauge to register
* @param <T> return type of the gauge
* @return the given gauge
*/
default <T, G extends Gauge<T>> G gauge(int name, G gauge) {
return gauge(String.valueOf(name), gauge);
}
/**
* Registers a new {@link org.apache.flink.metrics.Gauge} with Flink.
*
* @param name name of the gauge
* @param gauge gauge to register
* @param <T> return type of the gauge
* @return the given gauge
*/
<T, G extends Gauge<T>> G gauge(String name, G gauge);
/**
* Registers a new {@link Histogram} with Flink.
*
* @param name name of the histogram
* @param histogram histogram to register
* @param <H> histogram type
* @return the registered histogram
*/
<H extends Histogram> H histogram(String name, H histogram);
/**
* Registers a new {@link Histogram} with Flink.
*
* @param name name of the histogram
* @param histogram histogram to register
* @param <H> histogram type
* @return the registered histogram
*/
default <H extends Histogram> H histogram(int name, H histogram) {
return histogram(String.valueOf(name), histogram);
}
/**
* Registers a new {@link Meter} with Flink.
*
* @param name name of the meter
* @param meter meter to register
* @param <M> meter type
* @return the registered meter
*/
<M extends Meter> M meter(String name, M meter);
/**
* Registers a new {@link Meter} with Flink.
*
* @param name name of the meter
* @param meter meter to register
* @param <M> meter type
* @return the registered meter
*/
default <M extends Meter> M meter(int name, M meter) {
return meter(String.valueOf(name), meter);
}
// ------------------------------------------------------------------------
// Groups
// ------------------------------------------------------------------------
/**
* Creates a new MetricGroup and adds it to this groups sub-groups.
*
* @param name name of the group
* @return the created group
*/
default MetricGroup addGroup(int name) {
return addGroup(String.valueOf(name));
}
/**
* Creates a new MetricGroup and adds it to this groups sub-groups.
*
* @param name name of the group
* @return the created group
*/
MetricGroup addGroup(String name);
/**
* Creates a new key-value MetricGroup pair. The key group is added to this groups sub-groups,
* while the value group is added to the key group's sub-groups. This method returns the value
* group.
*
* <p>The only difference between calling this method and {@code
* group.addGroup(key).addGroup(value)} is that {@link #getAllVariables()} of the value group
* return an additional {@code "<key>"="value"} pair.
*
* @param key name of the first group
* @param value name of the second group
* @return the second created group
*/
MetricGroup addGroup(String key, String value);
// ------------------------------------------------------------------------
// Scope
// ------------------------------------------------------------------------
/**
* Gets the scope as an array of the scope components, for example {@code ["host-7",
* "taskmanager-2", "window_word_count", "my-mapper"]}.
*
* @see #getMetricIdentifier(String)
* @see #getMetricIdentifier(String, CharacterFilter)
*/
String[] getScopeComponents();
/**
* Returns a map of all variables and their associated value, for example {@code
* {"<host>"="host-7", "<tm_id>"="taskmanager-2"}}.
*
* @return map of all variables and their associated value
*/
Map<String, String> getAllVariables();
/**
* Returns the fully qualified metric name, for example {@code
* "host-7.taskmanager-2.window_word_count.my-mapper.metricName"}.
*
* @param metricName metric name
* @return fully qualified metric name
*/
String getMetricIdentifier(String metricName);
/**
* Returns the fully qualified metric name, for example {@code
* "host-7.taskmanager-2.window_word_count.my-mapper.metricName"}.
*
* @param metricName metric name
* @param filter character filter which is applied to the scope components if not null.
* @return fully qualified metric name
*/
String getMetricIdentifier(String metricName, CharacterFilter filter);
}
|
MetricGroup
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestAbfsHttpOperation.java
|
{
"start": 1150,
"end": 5064
}
|
class ____ {
@Test
public void testMaskingAndEncoding()
throws MalformedURLException, UnsupportedEncodingException {
testIfMaskAndEncodeSuccessful("Where sig is the only query param",
"http://www.testurl.net?sig=abcd", "http://www.testurl.net?sig=XXXXX");
testIfMaskAndEncodeSuccessful("Where oid is the only query param",
"http://www.testurl.net?saoid=abcdef",
"http://www.testurl.net?saoid=abcXXX");
testIfMaskAndEncodeSuccessful("Where sig is the first query param, oid is last",
"http://www.testurl.net?sig=abcd&abc=xyz&saoid=pqrs456",
"http://www.testurl.net?sig=XXXXX&abc=xyz&saoid=pqrsXXX");
testIfMaskAndEncodeSuccessful(
"Where sig/oid are neither first nor last query param",
"http://www.testurl.net?lmn=abc&sig=abcd&suoid=mnop789&abc=xyz",
"http://www.testurl.net?lmn=abc&sig=XXXXX&suoid=mnopXXX&abc=xyz");
testIfMaskAndEncodeSuccessful("Where sig is the last query param, oid is first",
"http://www.testurl.net?skoid=pqrs123&abc=xyz&sig=abcd",
"http://www.testurl.net?skoid=pqrsXXX&abc=xyz&sig=XXXXX");
testIfMaskAndEncodeSuccessful("Where sig/oid query param are not present",
"http://www.testurl.net?abc=xyz", "http://www.testurl.net?abc=xyz");
testIfMaskAndEncodeSuccessful(
"Where sig/oid query param are not present but mysig and myoid",
"http://www.testurl.net?abc=xyz&mysig=qwerty&mysaoid=uvw",
"http://www.testurl.net?abc=xyz&mysig=qwerty&mysaoid=uvw");
testIfMaskAndEncodeSuccessful(
"Where sig/oid query param is not present but sigmy and oidmy",
"http://www.testurl.net?abc=xyz&sigmy=qwerty&skoidmy=uvw",
"http://www.testurl.net?abc=xyz&sigmy=qwerty&skoidmy=uvw");
testIfMaskAndEncodeSuccessful(
"Where sig/oid query param is not present but values sig and oid",
"http://www.testurl.net?abc=xyz&mnop=sig&pqr=saoid",
"http://www.testurl.net?abc=xyz&mnop=sig&pqr=saoid");
testIfMaskAndEncodeSuccessful(
"Where sig/oid query param is not present but a value ends with sig/oid",
"http://www.testurl.net?abc=xyzsaoid&mnop=abcsig",
"http://www.testurl.net?abc=xyzsaoid&mnop=abcsig");
testIfMaskAndEncodeSuccessful(
"Where sig/oid query param is not present but a value starts with sig/oid",
"http://www.testurl.net?abc=saoidxyz&mnop=sigabc",
"http://www.testurl.net?abc=saoidxyz&mnop=sigabc");
}
@Test
public void testUrlWithNullValues()
throws MalformedURLException, UnsupportedEncodingException {
testIfMaskAndEncodeSuccessful("Where param to be masked has null value",
"http://www.testurl.net?abc=xyz&saoid=&mnop=abcsig",
"http://www.testurl.net?abc=xyz&saoid=&mnop=abcsig");
testIfMaskAndEncodeSuccessful("Where visible param has null value",
"http://www.testurl.net?abc=xyz&pqr=&mnop=abcd",
"http://www.testurl.net?abc=xyz&pqr=&mnop=abcd");
testIfMaskAndEncodeSuccessful("Where last param has null value",
"http://www.testurl.net?abc=xyz&pqr=&mnop=",
"http://www.testurl.net?abc=xyz&pqr=&mnop=");
}
private void testIfMaskAndEncodeSuccessful(final String scenario,
final String url, final String expectedMaskedUrl)
throws UnsupportedEncodingException, MalformedURLException {
assertThat(UriUtils.getMaskedUrl(new URL(url)))
.describedAs(url + " (" + scenario + ") after masking should be: "
+ expectedMaskedUrl).isEqualTo(expectedMaskedUrl);
final String expectedMaskedEncodedUrl = URLEncoder
.encode(expectedMaskedUrl, "UTF-8");
assertThat(UriUtils.encodedUrlStr(expectedMaskedUrl))
.describedAs(
url + " (" + scenario + ") after masking and encoding should "
+ "be: " + expectedMaskedEncodedUrl)
.isEqualTo(expectedMaskedEncodedUrl);
}
}
|
TestAbfsHttpOperation
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/JobKillCommitter.java
|
{
"start": 2850,
"end": 2972
}
|
class ____ a way for the mapper function to fail by
* intentionally throwing an IOException
*/
public static
|
provides
|
java
|
elastic__elasticsearch
|
x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java
|
{
"start": 36388,
"end": 41159
}
|
class ____ implements AutoscalingDeciderContext {
private final ClusterState state;
private final AutoscalingCapacity currentCapacity;
private final Set<DiscoveryNode> nodes;
private final ClusterInfo info;
private final Set<DiscoveryNodeRole> roles;
private TestAutoscalingDeciderContext(ClusterState state, Set<DiscoveryNodeRole> roles, AutoscalingCapacity currentCapacity) {
this.state = state;
this.currentCapacity = currentCapacity;
this.nodes = state.nodes().stream().filter(n -> roles.stream().anyMatch(n.getRoles()::contains)).collect(toSet());
this.roles = roles;
this.info = createClusterInfo(state);
}
@Override
public ClusterState state() {
return state;
}
@Override
public AutoscalingCapacity currentCapacity() {
return currentCapacity;
}
@Override
public Set<DiscoveryNode> nodes() {
return nodes;
}
@Override
public Set<DiscoveryNodeRole> roles() {
return roles;
}
@Override
public ClusterInfo info() {
return info;
}
@Override
public SnapshotShardSizeInfo snapshotShardSizeInfo() {
return null;
}
@Override
public void ensureNotCancelled() {
}
}
private static ClusterInfo createClusterInfo(ClusterState state) {
// we make a simple setup to detect the right decisions are made. The unmovable calculation is tested in more detail elsewhere.
// the diskusage is set such that the disk threshold decider never rejects an allocation.
Map<String, DiskUsage> diskUsages = state.nodes()
.stream()
.collect(toUnmodifiableMap(DiscoveryNode::getId, node -> new DiskUsage(node.getId(), null, "the_path", 1000, 1000)));
return new ClusterInfo() {
@Override
public Map<String, DiskUsage> getNodeLeastAvailableDiskUsages() {
return diskUsages;
}
@Override
public Map<String, DiskUsage> getNodeMostAvailableDiskUsages() {
return diskUsages;
}
@Override
public String getDataPath(ShardRouting shardRouting) {
return "the_path";
}
@Override
public Long getShardSize(ShardId shardId, boolean primary) {
return 1L;
}
};
}
private static ClusterState addRandomIndices(int minShards, int maxShardCopies, ClusterState state) {
String[] tierSettingNames = new String[] { DataTier.TIER_PREFERENCE };
int shards = randomIntBetween(minShards, 20);
Metadata.Builder builder = Metadata.builder();
RoutingTable.Builder routingTableBuilder = RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY);
while (shards > 0) {
IndexMetadata indexMetadata = IndexMetadata.builder("test" + "-" + shards)
.settings(settings(IndexVersion.current()).put(randomFrom(tierSettingNames), "data_hot"))
.numberOfShards(randomIntBetween(1, 5))
.numberOfReplicas(randomIntBetween(0, maxShardCopies - 1))
.build();
builder.put(indexMetadata, false);
routingTableBuilder.addAsNew(indexMetadata);
shards -= indexMetadata.getNumberOfShards() * (indexMetadata.getNumberOfReplicas() + 1);
}
return ClusterState.builder(state).metadata(builder).routingTable(routingTableBuilder.build()).build();
}
static ClusterState addDataNodes(DiscoveryNodeRole role, String prefix, ClusterState state, int nodes) {
DiscoveryNodes.Builder builder = DiscoveryNodes.builder(state.nodes());
IntStream.range(0, nodes).mapToObj(i -> newDataNode(role, prefix + "_" + i)).forEach(builder::add);
return ClusterState.builder(state).nodes(builder).build();
}
static DiscoveryNode newDataNode(DiscoveryNodeRole role, String nodeName) {
return DiscoveryNodeUtils.builder(nodeName).name(nodeName).externalId(UUIDs.randomBase64UUID()).roles(Set.of(role)).build();
}
private static String randomNodeId(RoutingNodes routingNodes, DiscoveryNodeRole role) {
return randomFrom(routingNodes.stream().map(RoutingNode::node).filter(n -> n.getRoles().contains(role)).collect(toSet())).getId();
}
private static Set<ShardId> shardIds(Iterable<ShardRouting> candidateShards) {
return StreamSupport.stream(candidateShards.spliterator(), false).map(ShardRouting::shardId).collect(toSet());
}
}
|
TestAutoscalingDeciderContext
|
java
|
apache__flink
|
flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PriorityThreadsDispatcher.java
|
{
"start": 1932,
"end": 3060
}
|
class ____ extends DispatcherConfigurator {
/**
* @param config passed automatically by Pekko, should contain information about threads
* priority
* @param prerequisites passed automatically by Pekko
*/
public PriorityThreadsDispatcher(Config config, DispatcherPrerequisites prerequisites) {
super(
config,
createPriorityThreadDispatcherPrerequisites(
prerequisites, config.getInt("thread-priority")));
}
private static DispatcherPrerequisites createPriorityThreadDispatcherPrerequisites(
DispatcherPrerequisites prerequisites, int newThreadPriority) {
return new DefaultDispatcherPrerequisites(
new PrioritySettingThreadFactory(prerequisites.threadFactory(), newThreadPriority),
prerequisites.eventStream(),
prerequisites.scheduler(),
prerequisites.dynamicAccess(),
prerequisites.settings(),
prerequisites.mailboxes(),
prerequisites.defaultExecutionContext());
}
}
|
PriorityThreadsDispatcher
|
java
|
grpc__grpc-java
|
stub/src/main/java/io/grpc/stub/annotations/RpcMethod.java
|
{
"start": 1437,
"end": 1712
}
|
interface ____ {
/**
* The fully qualified method name. This should match the name as returned by
* {@link MethodDescriptor#generateFullMethodName(String, String)}.
*/
String fullMethodName();
/**
* The request type of the method. The request type
|
RpcMethod
|
java
|
apache__flink
|
flink-connectors/flink-connector-files/src/test/java/org/apache/flink/connector/file/sink/committer/FileCommitterTest.java
|
{
"start": 5526,
"end": 6544
}
|
class ____ extends NoOpBucketWriter {
private final List<RecordingPendingFile> recoveredPendingFiles = new ArrayList<>();
private int numCleanUp;
@Override
public BucketWriter.PendingFile recoverPendingFile(
InProgressFileWriter.PendingFileRecoverable pendingFileRecoverable)
throws IOException {
RecordingPendingFile pendingFile = new RecordingPendingFile();
recoveredPendingFiles.add(pendingFile);
return pendingFile;
}
@Override
public boolean cleanupInProgressFileRecoverable(
InProgressFileWriter.InProgressFileRecoverable inProgressFileRecoverable)
throws IOException {
numCleanUp++;
return true;
}
public List<RecordingPendingFile> getRecoveredPendingFiles() {
return recoveredPendingFiles;
}
public int getNumCleanUp() {
return numCleanUp;
}
}
}
|
StubBucketWriter
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/language/XPathLanguageEndpointTest.java
|
{
"start": 1121,
"end": 2480
}
|
class ____ extends ContextTestSupport {
@Test
public void testXPath() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "<foo>Hello World</foo>");
assertMockEndpointsSatisfied();
// test converter also works with shorthand names
QName qn = context.getTypeConverter().convertTo(QName.class, "NODESET");
Assertions.assertEquals(XPathConstants.NODESET, qn);
qn = context.getTypeConverter().convertTo(QName.class, "nodeset");
Assertions.assertEquals(XPathConstants.NODESET, qn);
qn = context.getTypeConverter().convertTo(QName.class, "BOOLEAN");
Assertions.assertEquals(XPathConstants.BOOLEAN, qn);
qn = context.getTypeConverter().convertTo(QName.class, "boolean");
Assertions.assertEquals(XPathConstants.BOOLEAN, qn);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.setHeader(Exchange.LANGUAGE_SCRIPT, constant("/foo/text()"))
.to("language:xpath?allowTemplateFromHeader=true")
.to("mock:result");
}
};
}
}
|
XPathLanguageEndpointTest
|
java
|
quarkusio__quarkus
|
extensions/smallrye-graphql-client/deployment/src/test/java/io/quarkus/smallrye/graphql/client/deployment/TypesafeGraphQLRecursionTest.java
|
{
"start": 468,
"end": 1168
}
|
class ____ {
private final static String EXPECTED_THROWN_MESSAGE = "field recursion found";
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(RecursiveApi.class, Team.class, Hero.class))
.assertException(t -> {
assertEquals(IllegalStateException.class, t.getClass());
assertTrue(t.getMessage().equals(EXPECTED_THROWN_MESSAGE),
"Wrong thrown error message.\nExpected:" + EXPECTED_THROWN_MESSAGE + "\nActual:" +
t.getMessage());
});
private static
|
TypesafeGraphQLRecursionTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/indices/AssociatedIndexDescriptorTests.java
|
{
"start": 671,
"end": 3843
}
|
class ____ extends ESTestCase {
/**
* Tests the various validation rules that are applied when creating a new associated index descriptor.
*/
public void testValidation() {
{
Exception ex = expectThrows(NullPointerException.class, () -> new AssociatedIndexDescriptor(null, randomAlphaOfLength(5)));
assertThat(ex.getMessage(), containsString("must not be null"));
}
{
Exception ex = expectThrows(IllegalArgumentException.class, () -> new AssociatedIndexDescriptor("", randomAlphaOfLength(5)));
assertThat(ex.getMessage(), containsString("must at least 2 characters in length"));
}
{
Exception ex = expectThrows(IllegalArgumentException.class, () -> new AssociatedIndexDescriptor(".", randomAlphaOfLength(5)));
assertThat(ex.getMessage(), containsString("must at least 2 characters in length"));
}
{
Exception ex = expectThrows(
IllegalArgumentException.class,
() -> new AssociatedIndexDescriptor(randomAlphaOfLength(10), randomAlphaOfLength(5))
);
assertThat(ex.getMessage(), containsString("must start with the character [.]"));
}
{
Exception ex = expectThrows(IllegalArgumentException.class, () -> new AssociatedIndexDescriptor(".*", randomAlphaOfLength(5)));
assertThat(ex.getMessage(), containsString("must not start with the character sequence [.*] to prevent conflicts"));
}
{
Exception ex = expectThrows(
IllegalArgumentException.class,
() -> new AssociatedIndexDescriptor(".*" + randomAlphaOfLength(10), randomAlphaOfLength(5))
);
assertThat(ex.getMessage(), containsString("must not start with the character sequence [.*] to prevent conflicts"));
}
}
public void testSpecialCharactersAreReplacedWhenConvertingToAutomaton() {
CharacterRunAutomaton automaton = new CharacterRunAutomaton(AssociatedIndexDescriptor.buildAutomaton(".associated-index*"));
// None of these should match, ever.
assertFalse(automaton.run(".my-associated-index"));
assertFalse(automaton.run("my.associated-index"));
assertFalse(automaton.run("some-other-index"));
// These should only fail if the trailing `*` doesn't get properly replaced with `.*`
assertTrue("if the trailing * isn't replaced, suffixes won't match properly", automaton.run(".associated-index-1"));
assertTrue("if the trailing * isn't replaced, suffixes won't match properly", automaton.run(".associated-index-asdf"));
// These should only fail if the leading `.` doesn't get properly replaced with `\\.`
assertFalse("if the leading dot isn't replaced, it can match date math", automaton.run("<associated-index-{now/d}>"));
assertFalse("if the leading dot isn't replaced, it can match any single-char prefix", automaton.run("Oassociated-index"));
assertFalse("the leading dot got dropped", automaton.run("associated-index-1"));
}
}
|
AssociatedIndexDescriptorTests
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/ImportAutoConfigurationTests.java
|
{
"start": 3930,
"end": 4037
}
|
class ____ {
}
@Configuration(proxyBeanMethods = false)
@AutoConfigureAfter(ConfigB.class)
static
|
ConfigB
|
java
|
alibaba__nacos
|
console/src/main/java/com/alibaba/nacos/console/proxy/ServerStateProxy.java
|
{
"start": 951,
"end": 1968
}
|
class ____ {
private final ServerStateHandler serverStateHandler;
public ServerStateProxy(ServerStateHandler serverStateHandler) {
this.serverStateHandler = serverStateHandler;
}
/**
* Get the current state of the server.
*
* @return the server state as a Map
*/
public Map<String, String> getServerState() throws NacosException {
return serverStateHandler.getServerState();
}
/**
* Get the announcement content based on the language.
*
* @param language the language for the announcement
* @return the announcement content as a String
*/
public String getAnnouncement(String language) {
return serverStateHandler.getAnnouncement(language);
}
/**
* Get the console UI guide information.
*
* @return the console UI guide information as a String
*/
public String getConsoleUiGuide() {
return serverStateHandler.getConsoleUiGuide();
}
}
|
ServerStateProxy
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java
|
{
"start": 3623,
"end": 4301
}
|
class ____ extends DistanceCalculator {
protected CartesianDistanceCalculator() {
super(SpatialCoordinateTypes.CARTESIAN, CoordinateEncoder.CARTESIAN);
}
@Override
protected double distance(Point left, Point right) {
// Cast coordinates to float to mimic Lucene behaviour, so we get identical results
final double diffX = (double) ((float) left.getX()) - (double) ((float) right.getX());
final double diffY = (double) ((float) left.getY()) - (double) ((float) right.getY());
return Math.sqrt(diffX * diffX + diffY * diffY);
}
}
/**
* This
|
CartesianDistanceCalculator
|
java
|
apache__rocketmq
|
common/src/main/java/org/apache/rocketmq/common/LifecycleAwareServiceThread.java
|
{
"start": 944,
"end": 1940
}
|
class ____ extends ServiceThread {
private final AtomicBoolean started = new AtomicBoolean(false);
@Override
public void run() {
started.set(true);
synchronized (started) {
started.notifyAll();
}
run0();
}
public abstract void run0();
/**
* Take spurious wakeup into account.
*
* @param timeout amount of time in milliseconds
* @throws InterruptedException if interrupted
*/
public void awaitStarted(long timeout) throws InterruptedException {
long expire = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(timeout);
synchronized (started) {
while (!started.get()) {
long duration = expire - System.nanoTime();
if (duration < TimeUnit.MILLISECONDS.toNanos(1)) {
break;
}
started.wait(TimeUnit.NANOSECONDS.toMillis(duration));
}
}
}
}
|
LifecycleAwareServiceThread
|
java
|
quarkusio__quarkus
|
extensions/quartz/deployment/src/test/java/io/quarkus/quartz/test/ConditionalExecutionTest.java
|
{
"start": 632,
"end": 1562
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Jobs.class));
@Test
public void testExecution() {
try {
// Wait until Jobs#doSomething() is executed at least 1x and skipped 1x
if (IsDisabled.SKIPPED_LATCH.await(10, TimeUnit.SECONDS)) {
assertEquals(1, Jobs.COUNTER.getCount());
IsDisabled.DISABLED.set(false);
} else {
fail("Job#foo not skipped in 10 seconds!");
}
if (!Jobs.COUNTER.await(10, TimeUnit.SECONDS)) {
fail("Job#foo not executed in 10 seconds!");
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException(e);
}
}
static
|
ConditionalExecutionTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/double_/DoubleAssert_isNotInfinite_Test.java
|
{
"start": 792,
"end": 1114
}
|
class ____ extends DoubleAssertBaseTest {
@Override
protected DoubleAssert invoke_api_method() {
return assertions.isNotInfinite();
}
@Override
protected void verify_internal_effects() {
verify(doubles).assertIsNotInfinite(getInfo(assertions), getActual(assertions));
}
}
|
DoubleAssert_isNotInfinite_Test
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRPCCompatibility.java
|
{
"start": 2274,
"end": 2431
}
|
interface ____ extends
OldProtobufRpcProto.BlockingInterface {
}
@ProtocolInfo(protocolName = "testProto", protocolVersion = 2)
public
|
OldRpcService
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/config/ObjectFactoryCreatingFactoryBean.java
|
{
"start": 2571,
"end": 2741
}
|
class ____ might look
* something like this:
*
* <pre class="code">package a.b.c;
*
* import org.springframework.beans.factory.ObjectFactory;
*
* public
|
implementation
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/lucene/spatial/Extent.java
|
{
"start": 786,
"end": 10531
}
|
class ____ {
public int top;
public int bottom;
public int negLeft;
public int negRight;
public int posLeft;
public int posRight;
private static final byte NONE_SET = 0;
private static final byte POSITIVE_SET = 1;
private static final byte NEGATIVE_SET = 2;
private static final byte CROSSES_LAT_AXIS = 3;
private static final byte ALL_SET = 4;
Extent() {
this.top = Integer.MIN_VALUE;
this.bottom = Integer.MAX_VALUE;
this.negLeft = Integer.MAX_VALUE;
this.negRight = Integer.MIN_VALUE;
this.posLeft = Integer.MAX_VALUE;
this.posRight = Integer.MIN_VALUE;
}
Extent(int top, int bottom, int negLeft, int negRight, int posLeft, int posRight) {
this.top = top;
this.bottom = bottom;
this.negLeft = negLeft;
this.negRight = negRight;
this.posLeft = posLeft;
this.posRight = posRight;
}
@SuppressWarnings("HiddenField")
public void reset(int top, int bottom, int negLeft, int negRight, int posLeft, int posRight) {
this.top = top;
this.bottom = bottom;
this.negLeft = negLeft;
this.negRight = negRight;
this.posLeft = posLeft;
this.posRight = posRight;
}
/**
* Adds the extent of two points representing a bounding box's bottom-left
* and top-right points. The bounding box must not cross the dateline.
*
* @param bottomLeftX the bottom-left x-coordinate
* @param bottomLeftY the bottom-left y-coordinate
* @param topRightX the top-right x-coordinate
* @param topRightY the top-right y-coordinate
*/
public void addRectangle(int bottomLeftX, int bottomLeftY, int topRightX, int topRightY) {
assert bottomLeftX <= topRightX;
assert bottomLeftY <= topRightY;
this.bottom = Math.min(this.bottom, bottomLeftY);
this.top = Math.max(this.top, topRightY);
if (bottomLeftX < 0 && topRightX < 0) {
this.negLeft = Math.min(this.negLeft, bottomLeftX);
this.negRight = Math.max(this.negRight, topRightX);
} else if (bottomLeftX < 0) {
this.negLeft = Math.min(this.negLeft, bottomLeftX);
this.posRight = Math.max(this.posRight, topRightX);
// this signal the extent cannot be wrapped around the dateline
this.negRight = 0;
this.posLeft = 0;
} else {
this.posLeft = Math.min(this.posLeft, bottomLeftX);
this.posRight = Math.max(this.posRight, topRightX);
}
}
static void readFromCompressed(StreamInput input, Extent extent) throws IOException {
final int top = input.readInt();
final int bottom = Math.toIntExact(top - input.readVLong());
final int negLeft;
final int negRight;
final int posLeft;
final int posRight;
byte type = input.readByte();
switch (type) {
case NONE_SET -> {
negLeft = Integer.MAX_VALUE;
negRight = Integer.MIN_VALUE;
posLeft = Integer.MAX_VALUE;
posRight = Integer.MIN_VALUE;
}
case POSITIVE_SET -> {
posLeft = input.readVInt();
posRight = Math.toIntExact(input.readVLong() + posLeft);
negLeft = Integer.MAX_VALUE;
negRight = Integer.MIN_VALUE;
}
case NEGATIVE_SET -> {
negRight = -input.readVInt();
negLeft = Math.toIntExact(negRight - input.readVLong());
posLeft = Integer.MAX_VALUE;
posRight = Integer.MIN_VALUE;
}
case CROSSES_LAT_AXIS -> {
posRight = input.readVInt();
negLeft = -input.readVInt();
posLeft = 0;
negRight = 0;
}
case ALL_SET -> {
posLeft = input.readVInt();
posRight = Math.toIntExact(input.readVLong() + posLeft);
negRight = -input.readVInt();
negLeft = Math.toIntExact(negRight - input.readVLong());
}
default -> throw new IllegalArgumentException("invalid extent values-set byte read [" + type + "]");
}
extent.reset(top, bottom, negLeft, negRight, posLeft, posRight);
}
void writeCompressed(StreamOutput output) throws IOException {
output.writeInt(this.top);
output.writeVLong((long) this.top - this.bottom);
byte type;
if (this.negLeft == Integer.MAX_VALUE && this.negRight == Integer.MIN_VALUE) {
if (this.posLeft == Integer.MAX_VALUE && this.posRight == Integer.MIN_VALUE) {
type = NONE_SET;
} else {
type = POSITIVE_SET;
}
} else if (this.posLeft == Integer.MAX_VALUE && this.posRight == Integer.MIN_VALUE) {
type = NEGATIVE_SET;
} else {
if (posLeft == 0 && negRight == 0) {
type = CROSSES_LAT_AXIS;
} else {
type = ALL_SET;
}
}
output.writeByte(type);
switch (type) {
case NONE_SET:
break;
case POSITIVE_SET:
output.writeVInt(this.posLeft);
output.writeVLong((long) this.posRight - this.posLeft);
break;
case NEGATIVE_SET:
output.writeVInt(-this.negRight);
output.writeVLong((long) this.negRight - this.negLeft);
break;
case CROSSES_LAT_AXIS:
output.writeVInt(this.posRight);
output.writeVInt(-this.negLeft);
break;
case ALL_SET:
output.writeVInt(this.posLeft);
output.writeVLong((long) this.posRight - this.posLeft);
output.writeVInt(-this.negRight);
output.writeVLong((long) this.negRight - this.negLeft);
break;
default:
throw new IllegalArgumentException("invalid extent values-set byte read [" + type + "]");
}
}
/**
* calculates the extent of a point, which is the point itself.
* @param x the x-coordinate of the point
* @param y the y-coordinate of the point
* @return the extent of the point
*/
public static Extent fromPoint(int x, int y) {
return new Extent(
y,
y,
x < 0 ? x : Integer.MAX_VALUE,
x < 0 ? x : Integer.MIN_VALUE,
x >= 0 ? x : Integer.MAX_VALUE,
x >= 0 ? x : Integer.MIN_VALUE
);
}
/**
* calculates the extent of two points representing a bounding box's bottom-left
* and top-right points. It is important that these points accurately represent the
* bottom-left and top-right of the extent since there is no validation being done.
*
* @param bottomLeftX the bottom-left x-coordinate
* @param bottomLeftY the bottom-left y-coordinate
* @param topRightX the top-right x-coordinate
* @param topRightY the top-right y-coordinate
* @return the extent of the two points
*/
public static Extent fromPoints(int bottomLeftX, int bottomLeftY, int topRightX, int topRightY) {
int negLeft = Integer.MAX_VALUE;
int negRight = Integer.MIN_VALUE;
int posLeft = Integer.MAX_VALUE;
int posRight = Integer.MIN_VALUE;
if (bottomLeftX < 0 && topRightX < 0) {
negLeft = bottomLeftX;
negRight = topRightX;
} else if (bottomLeftX < 0) {
negLeft = bottomLeftX;
posRight = topRightX;
// this signal the extent cannot be wrapped around the dateline
negRight = 0;
posLeft = 0;
} else {
posLeft = bottomLeftX;
posRight = topRightX;
}
return new Extent(topRightY, bottomLeftY, negLeft, negRight, posLeft, posRight);
}
/**
* @return the minimum y-coordinate of the extent
*/
public int minY() {
return bottom;
}
/**
* @return the maximum y-coordinate of the extent
*/
public int maxY() {
return top;
}
/**
* @return the absolute minimum x-coordinate of the extent, whether it is positive or negative.
*/
public int minX() {
return Math.min(negLeft, posLeft);
}
/**
* @return the absolute maximum x-coordinate of the extent, whether it is positive or negative.
*/
public int maxX() {
return Math.max(negRight, posRight);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Extent extent = (Extent) o;
return top == extent.top
&& bottom == extent.bottom
&& negLeft == extent.negLeft
&& negRight == extent.negRight
&& posLeft == extent.posLeft
&& posRight == extent.posRight;
}
@Override
public int hashCode() {
return Objects.hash(top, bottom, negLeft, negRight, posLeft, posRight);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder("[");
builder.append("top = " + top + ", ");
builder.append("bottom = " + bottom + ", ");
builder.append("negLeft = " + negLeft + ", ");
builder.append("negRight = " + negRight + ", ");
builder.append("posLeft = " + posLeft + ", ");
builder.append("posRight = " + posRight + "]");
return builder.toString();
}
}
|
Extent
|
java
|
apache__camel
|
dsl/camel-kamelet-main/src/main/java/org/apache/camel/main/download/KameletOptimisedComponentResolver.java
|
{
"start": 1373,
"end": 2743
}
|
class ____ extends DefaultOptimisedComponentResolver {
private final CamelContext camelContext;
public KameletOptimisedComponentResolver(CamelContext camelContext) {
super(camelContext);
this.camelContext = camelContext;
}
@Override
public Component resolveComponent(String uri) {
Component answer = super.resolveComponent(uri);
String scheme = ExchangeHelper.resolveScheme(uri);
// if a kamelet then we need to know the name of the kamelet spec in use
if ("kamelet".equals(scheme)) {
String name = ExchangeHelper.resolveContextPath(uri);
// must be a static name (so we can load the template and resolve nested dependencies)
if (!SimpleLanguage.hasSimpleFunction(name) && answer instanceof KameletComponent kc) {
// need to resolve dependencies from kamelet also
String loc = kc.getLocation();
DependencyDownloaderKamelet listener = camelContext.hasService(DependencyDownloaderKamelet.class);
try {
RouteTemplateHelper.loadRouteTemplateFromLocation(camelContext, listener, name, loc);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
return answer;
}
}
|
KameletOptimisedComponentResolver
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/test/Assertions_assertThat_ambiguous_Test.java
|
{
"start": 2174,
"end": 2453
}
|
class ____<T> implements Iterator<T>, Predicate<T> {
@Override
public boolean test(T t) {
return false;
}
@Override
public boolean hasNext() {
return false;
}
@Override
public T next() {
return null;
}
}
}
|
IteratorPredicate
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java
|
{
"start": 140892,
"end": 141208
}
|
class ____ {",
" @ProtectedAnnotation",
" public abstract String foo();",
"}");
JavaFileObject annotationsFileObject =
JavaFileObjects.forSourceLines(
"otherpackage.Annotations",
"package otherpackage;",
"",
"public
|
Parent
|
java
|
micronaut-projects__micronaut-core
|
json-core/src/main/java/io/micronaut/json/bind/JsonBeanPropertyBinder.java
|
{
"start": 9536,
"end": 9887
}
|
class ____ implements ValueBuilder {
static final FixedValue NULL = new FixedValue(JsonNode.nullNode());
final JsonNode value;
FixedValue(JsonNode value) {
this.value = value;
}
@Override
public JsonNode build() {
return value;
}
}
private static final
|
FixedValue
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/http/impl/http1x/VertxFullHttpResponse.java
|
{
"start": 551,
"end": 765
}
|
class ____ allows to assemble a LastHttpContent and a HttpResponse into one "packet" and so more
* efficient write it through the pipeline.
*
* @author <a href="mailto:nmaurer@redhat.com">Norman Maurer</a>
*/
|
which
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java
|
{
"start": 33308,
"end": 33948
}
|
class ____ extends ReplicationRequest<Request> {
public AtomicBoolean processedOnPrimary = new AtomicBoolean();
public AtomicBoolean runPostReplicationActionsOnPrimary = new AtomicBoolean();
public Set<ShardRouting> processedOnReplicas = ConcurrentCollections.newConcurrentSet();
Request(ShardId shardId) {
super(shardId);
this.index = shardId.getIndexName();
this.waitForActiveShards = ActiveShardCount.NONE;
// keep things simple
}
@Override
public String toString() {
return "Request{}";
}
}
static
|
Request
|
java
|
apache__flink
|
flink-clients/src/main/java/org/apache/flink/client/deployment/application/executors/EmbeddedJobClientCreator.java
|
{
"start": 1019,
"end": 1099
}
|
interface ____ be implemented by {@link JobClient} suppliers. */
@Internal
public
|
to
|
java
|
junit-team__junit5
|
junit-platform-commons/src/main/java/org/junit/platform/commons/util/ClassNamePatternFilterUtils.java
|
{
"start": 762,
"end": 1051
}
|
class ____.
*
* <h2>DISCLAIMER</h2>
*
* <p>These utilities are intended solely for usage within the JUnit framework
* itself. <strong>Any usage by external parties is not supported.</strong>
* Use at your own risk!
*
* @since 1.7
*/
@API(status = INTERNAL, since = "1.7")
public
|
names
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest56.java
|
{
"start": 893,
"end": 1200
}
|
class ____ extends TestCase {
public void test_true() throws Exception {
WallProvider provider = new MySqlWallProvider();
assertTrue(provider.checkValid(//
"SHOW GRANTS FOR CURRENT_USER()"));
assertEquals(0, provider.getTableStats().size());
}
}
|
MySqlWallTest56
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/bug/Issue_697.java
|
{
"start": 116,
"end": 1500
}
|
class ____ extends TestCase {
public void test_for_issue() throws Exception {
String sql = "insert into tag_rule_detail(id, gmt_create, gmt_modified, group_id, priority, rule_condition, rule_action) values(1010102, now(), now(), 10101, 0, 'flow=''trustLogin''', 'be:login,dev:pc, env:web, type:trust_login, from:$loginfrom, result:true') ;\n"
+ "insert into tag_rule_detail(id, gmt_create, gmt_modified, group_id, priority, rule_condition, rule_action) values(1010103, now(), now(), 10101, 0, 'flow=''Ctr''', 'be:login,dev:pc, env:web, type:ctr, from:$loginfrom, result:true') ;";
String expected = "INSERT INTO tag_rule_detail (id, gmt_create, gmt_modified, group_id, priority\n" +
"\t, rule_condition, rule_action)\n" +
"VALUES (1010102, now(), now(), 10101, 0\n" +
"\t, 'flow=''trustLogin''', 'be:login,dev:pc, env:web, type:trust_login, from:$loginfrom, result:true');\n" +
"\n" +
"INSERT INTO tag_rule_detail (id, gmt_create, gmt_modified, group_id, priority\n" +
"\t, rule_condition, rule_action)\n" +
"VALUES (1010103, now(), now(), 10101, 0\n" +
"\t, 'flow=''Ctr''', 'be:login,dev:pc, env:web, type:ctr, from:$loginfrom, result:true');";
assertEquals(expected, SQLUtils.formatMySql(sql));
}
}
|
Issue_697
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/collectionincompatibletype/TruthIncompatibleTypeTest.java
|
{
"start": 1706,
"end": 2093
}
|
class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(TruthIncompatibleType.class, getClass());
@Test
public void positive() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static com.google.common.truth.Truth.assertThat;
public
|
TruthIncompatibleTypeTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webmvc-test/src/test/java/org/springframework/boot/webmvc/test/autoconfigure/SpringBootMockMvcBuilderCustomizerTests.java
|
{
"start": 6067,
"end": 6120
}
|
class ____ extends HttpServlet {
}
static
|
TestServlet
|
java
|
apache__flink
|
flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/src/compression/StandardDeCompressors.java
|
{
"start": 1585,
"end": 4132
}
|
class ____ {
/** All supported file compression formats, by common file extensions. */
private static final Map<String, InflaterInputStreamFactory<?>> DECOMPRESSORS =
buildDecompressorMap(
DeflateInflaterInputStreamFactory.getInstance(),
GzipInflaterInputStreamFactory.getInstance(),
Bzip2InputStreamFactory.getInstance(),
XZInputStreamFactory.getInstance());
/** All common file extensions of supported file compression formats. */
private static final Collection<String> COMMON_SUFFIXES =
Collections.unmodifiableList(new ArrayList<>(DECOMPRESSORS.keySet()));
// ------------------------------------------------------------------------
/** Gets all common file extensions of supported file compression formats. */
public static Collection<String> getCommonSuffixes() {
return COMMON_SUFFIXES;
}
/**
* Gets the decompressor for a file extension. Returns null if there is no decompressor for this
* file extension.
*/
@Nullable
public static InflaterInputStreamFactory<?> getDecompressorForExtension(String extension) {
return DECOMPRESSORS.get(extension);
}
/**
* Gets the decompressor for a file name. This checks the file against all known and supported
* file extensions. Returns null if there is no decompressor for this file name.
*/
@Nullable
public static InflaterInputStreamFactory<?> getDecompressorForFileName(String fileName) {
for (final Map.Entry<String, InflaterInputStreamFactory<?>> entry :
DECOMPRESSORS.entrySet()) {
if (fileName.endsWith(entry.getKey())) {
return entry.getValue();
}
}
return null;
}
// ------------------------------------------------------------------------
private static Map<String, InflaterInputStreamFactory<?>> buildDecompressorMap(
final InflaterInputStreamFactory<?>... decompressors) {
final LinkedHashMap<String, InflaterInputStreamFactory<?>> map =
new LinkedHashMap<>(decompressors.length);
for (InflaterInputStreamFactory<?> decompressor : decompressors) {
for (String suffix : decompressor.getCommonFileExtensions()) {
map.put(suffix, decompressor);
}
}
return map;
}
// ------------------------------------------------------------------------
/** This
|
StandardDeCompressors
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/filter/NullConversionsForContent4200Test.java
|
{
"start": 498,
"end": 898
}
|
class ____ {
private final Map<String, String> value;
@JsonCreator(mode = JsonCreator.Mode.DELEGATING)
DelegatingWrapper4200(@JsonSetter(contentNulls = Nulls.FAIL)
Map<String, String> value)
{
this.value = value;
}
public Map<String, String> getValue() {
return value;
}
}
static
|
DelegatingWrapper4200
|
java
|
google__guice
|
extensions/assistedinject/test/com/google/inject/assistedinject/ManyConstructorsTest.java
|
{
"start": 6569,
"end": 6681
}
|
interface ____ {
Bar create(String name);
Bar create(String name, int idx);
}
public static
|
BarFactory
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoXAttrs.java
|
{
"start": 2278,
"end": 6146
}
|
class ____ extends HTestCase {
private MiniDFSCluster miniDfs;
private Configuration nnConf;
/**
* Fire up our own hand-rolled MiniDFSCluster. We do this here instead
* of relying on TestHdfsHelper because we don't want to turn on XAttr
* support.
*
* @throws Exception
*/
private void startMiniDFS() throws Exception {
File testDirRoot = TestDirHelper.getTestDir();
if (System.getProperty("hadoop.log.dir") == null) {
System.setProperty("hadoop.log.dir",
new File(testDirRoot, "hadoop-log").getAbsolutePath());
}
if (System.getProperty("test.build.data") == null) {
System.setProperty("test.build.data",
new File(testDirRoot, "hadoop-data").getAbsolutePath());
}
Configuration conf = HadoopUsersConfTestHelper.getBaseConf();
HadoopUsersConfTestHelper.addUserConf(conf);
conf.set("fs.hdfs.impl.disable.cache", "true");
conf.set("dfs.block.access.token.enable", "false");
conf.set("dfs.permissions", "true");
conf.set("hadoop.security.authentication", "simple");
// Explicitly turn off XAttr support
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, false);
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
builder.numDataNodes(2);
miniDfs = builder.build();
nnConf = miniDfs.getConfiguration(0);
}
/**
* Create an HttpFS Server to talk to the MiniDFSCluster we created.
* @throws Exception
*/
private void createHttpFSServer() throws Exception {
File homeDir = TestDirHelper.getTestDir();
assertTrue(new File(homeDir, "conf").mkdir());
assertTrue(new File(homeDir, "log").mkdir());
assertTrue(new File(homeDir, "temp").mkdir());
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
File secretFile = new File(new File(homeDir, "conf"), "secret");
Writer w = new FileWriter(secretFile);
w.write("secret");
w.close();
// HDFS configuration
File hadoopConfDir = new File(new File(homeDir, "conf"), "hadoop-conf");
if (!hadoopConfDir.mkdirs()) {
throw new IOException();
}
String fsDefaultName =
nnConf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
Configuration conf = new Configuration(false);
conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
// Explicitly turn off XAttr support
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, false);
File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
OutputStream os = new FileOutputStream(hdfsSite);
conf.writeXml(os);
os.close();
// HTTPFS configuration
conf = new Configuration(false);
conf.set("httpfs.hadoop.config.dir", hadoopConfDir.toString());
conf.set("httpfs.proxyuser." +
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
conf.set("httpfs.proxyuser." +
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite);
conf.writeXml(os);
os.close();
ClassLoader cl = Thread.currentThread().getContextClassLoader();
URL url = cl.getResource("webapp");
if (url == null) {
throw new IOException();
}
WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
Server server = TestJettyHelper.getJettyServer();
server.setHandler(context);
server.start();
}
/**
* Talks to the http
|
TestHttpFSServerNoXAttrs
|
java
|
apache__camel
|
components/camel-micrometer/src/test/java/org/apache/camel/component/micrometer/DistributionSummaryEndpointTest.java
|
{
"start": 1563,
"end": 2989
}
|
class ____ {
private static final String METRICS_NAME = "metrics.name";
private static final Double VALUE = Long.valueOf(System.currentTimeMillis()).doubleValue();
@Mock
private MeterRegistry registry;
private MicrometerEndpoint endpoint;
private InOrder inOrder;
@BeforeEach
public void setUp() {
endpoint = new MicrometerEndpoint(null, null, registry, Meter.Type.DISTRIBUTION_SUMMARY, METRICS_NAME);
inOrder = Mockito.inOrder(registry);
}
@AfterEach
public void tearDown() {
inOrder.verifyNoMoreInteractions();
}
@Test
public void testHistogramEndpoint() {
assertThat(endpoint, is(notNullValue()));
assertThat(endpoint.getRegistry(), is(registry));
assertThat(endpoint.getMetricsName(), is(METRICS_NAME));
}
@Test
public void testCreateProducer() {
Producer producer = endpoint.createProducer();
assertThat(producer, is(notNullValue()));
assertThat(producer, is(instanceOf(DistributionSummaryProducer.class)));
}
@Test
public void testGetValue() {
assertThat(endpoint.getValue(), is(nullValue()));
}
@Test
public void testSetValue() {
assertThat(endpoint.getValue(), is(nullValue()));
endpoint.setValue(VALUE.toString());
assertThat(Double.valueOf(endpoint.getValue()), is(VALUE));
}
}
|
DistributionSummaryEndpointTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
|
{
"start": 1141,
"end": 2494
}
|
class ____ implements WritableComparable<LongWritable> {
private long value;
public LongWritable() {}
public LongWritable(long value) { set(value); }
/**
* Set the value of this LongWritable.
* @param value value.
*/
public void set(long value) { this.value = value; }
/**
* Return the value of this LongWritable.
* @return value of this LongWritable.
*/
public long get() { return value; }
@Override
public void readFields(DataInput in) throws IOException {
value = in.readLong();
}
@Override
public void write(DataOutput out) throws IOException {
out.writeLong(value);
}
/** Returns true iff <code>o</code> is a LongWritable with the same value. */
@Override
public boolean equals(Object o) {
if (!(o instanceof LongWritable))
return false;
LongWritable other = (LongWritable)o;
return this.value == other.value;
}
@Override
public int hashCode() {
return (int)value;
}
/** Compares two LongWritables. */
@Override
public int compareTo(LongWritable o) {
long thisValue = this.value;
long thatValue = o.value;
return (thisValue<thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
}
@Override
public String toString() {
return Long.toString(value);
}
/** A Comparator optimized for LongWritable. */
public static
|
LongWritable
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/builder/lifecycle/OrderDto.java
|
{
"start": 263,
"end": 642
}
|
class ____ {
private List<ItemDto> items;
private String creator;
public List<ItemDto> getItems() {
return items;
}
public void setItems(List<ItemDto> items) {
this.items = items;
}
public String getCreator() {
return creator;
}
public void setCreator(String creator) {
this.creator = creator;
}
}
|
OrderDto
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/LocalDateTimeComparator.java
|
{
"start": 1223,
"end": 1404
}
|
class ____ not extend {@link BasicTypeComparator}, because LocalDateTime is a Comparable of
* ChronoLocalDateTime instead of Comparable of LocalDateTime.
*/
@Internal
public final
|
can
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/base/ToStringHelperTest.java
|
{
"start": 5120,
"end": 5697
}
|
class ____ a name ending like "Outer.$n"
Object unused1 = new Object() {};
Object unused2 = new Object() {};
Object unused3 = new Object() {};
Object unused4 = new Object() {};
Object unused5 = new Object() {};
Object unused6 = new Object() {};
Object unused7 = new Object() {};
Object unused8 = new Object() {};
Object unused9 = new Object() {};
Object o10 = new Object() {};
String toTest = MoreObjects.toStringHelper(o10).toString();
assertTrue(toTest, toTest.matches(".*\\{\\}"));
}
// all remaining test are on an inner
|
has
|
java
|
spring-projects__spring-framework
|
spring-jms/src/main/java/org/springframework/jms/listener/endpoint/DefaultJmsActivationSpecFactory.java
|
{
"start": 5130,
"end": 5248
}
|
class ____ in inbound subpackage: " + specClassName);
}
}
throw new IllegalStateException("No ActivationSpec
|
found
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/stubbing/CloningParameterTest.java
|
{
"start": 436,
"end": 2364
}
|
class ____ extends TestBase {
@Test
public void shouldVerifyEvenIfArgumentsWereMutated() throws Exception {
// given
EmailSender emailSender = mock(EmailSender.class, new ClonesArguments());
// when
businessLogic(emailSender);
// then
verify(emailSender).sendEmail(1, new Person("Wes"));
}
private void businessLogic(EmailSender emailSender) {
Person person = new Person("Wes");
emailSender.sendEmail(1, person);
person.emailSent();
}
@Test
public void shouldReturnDefaultValueWithCloningAnswer() throws Exception {
// given
EmailSender emailSender = mock(EmailSender.class, new ClonesArguments());
when(emailSender.getAllEmails(new Person("Wes"))).thenAnswer(new ClonesArguments());
// when
List<?> emails = emailSender.getAllEmails(new Person("Wes"));
// then
assertNotNull(emails);
}
@Test
public void shouldCloneArrays() throws Exception {
EmailSender emailSender = mock(EmailSender.class, new ClonesArguments());
// 1. Pass an array into a mock that "ClonesArguments"
Person[] ccList = new Person[] {new Person("Wes")};
emailSender.sendGroupEmail(1, ccList);
// 2. Mutate the array
ccList[0] = new Person("Joe");
// 3. Verify that the mock made a copy of the array
verify(emailSender).sendGroupEmail(1, new Person[] {new Person("Wes")});
}
@Test
public void shouldNotThrowNPEWhenCloningNulls() throws Exception {
EmailSender emailSender = mock(EmailSender.class, new ClonesArguments());
// 1. Pass a null into a mock that "ClonesArguments"
emailSender.sendEmail(1, (Person) null);
// 2. Verify that the null argument was captured
verify(emailSender).sendEmail(eq(1), (Person) isNull());
}
public
|
CloningParameterTest
|
java
|
apache__camel
|
components/camel-jpa/src/test/java/org/apache/camel/component/jpa/JpaComponentTest.java
|
{
"start": 1416,
"end": 4804
}
|
class ____ extends CamelTestSupport {
@Test
public void testJpaComponentConsumerHasLockModeType() throws Exception {
try (JpaComponent comp = new JpaComponent()) {
comp.setCamelContext(context);
assertNull(comp.getEntityManagerFactory());
assertNull(comp.getTransactionStrategy());
JpaEndpoint jpa
= (JpaEndpoint) comp
.createEndpoint("jpa://" + SendEmail.class.getName() + "?lockModeType=PESSIMISTIC_WRITE");
JpaConsumer consumer = (JpaConsumer) jpa.createConsumer(null);
assertEquals(LockModeType.PESSIMISTIC_WRITE, consumer.getLockModeType());
}
}
@Test
public void testJpaComponentCtr() throws Exception {
try (JpaComponent comp = new JpaComponent()) {
comp.setCamelContext(context);
assertNull(comp.getEntityManagerFactory());
assertNull(comp.getTransactionStrategy());
JpaEndpoint jpa = (JpaEndpoint) comp.createEndpoint("jpa://" + SendEmail.class.getName());
assertNotNull(jpa);
assertNotNull(jpa.getEntityType());
}
}
@Test
public void testJpaComponentEMFandTM() throws Exception {
try (JpaComponent comp = new JpaComponent()) {
comp.setCamelContext(context);
assertNull(comp.getEntityManagerFactory());
assertNull(comp.getTransactionStrategy());
EntityManagerFactory fac = Persistence.createEntityManagerFactory("camel");
JpaTransactionManager tm = new JpaTransactionManager(fac);
tm.afterPropertiesSet();
comp.setEntityManagerFactory(fac);
if (comp.getTransactionStrategy() instanceof DefaultTransactionStrategy strategy) {
strategy.setTransactionManager(tm);
}
assertSame(fac, comp.getEntityManagerFactory());
if (comp.getTransactionStrategy() instanceof DefaultTransactionStrategy strategy) {
assertSame(tm, strategy.getTransactionManager());
}
JpaEndpoint jpa = (JpaEndpoint) comp.createEndpoint("jpa://" + SendEmail.class.getName());
assertNotNull(jpa);
assertNotNull(jpa.getEntityType());
}
}
@Test
public void testJpaComponentWithPath() throws Exception {
try (JpaComponent comp = new JpaComponent()) {
comp.setCamelContext(context);
assertNull(comp.getEntityManagerFactory());
assertNull(comp.getTransactionStrategy());
JpaEndpoint jpa = (JpaEndpoint) comp.createEndpoint(
"jpa://" + SendEmail.class.getName() + "?persistenceUnit=journalPersistenceUnit&usePersist=true");
assertNotNull(jpa);
assertNotNull(jpa.getEntityType());
}
}
@Test
public void testJpaComponentEmptyPath() throws Exception {
try (JpaComponent comp = new JpaComponent()) {
comp.setCamelContext(context);
assertNull(comp.getEntityManagerFactory());
assertNull(comp.getTransactionStrategy());
JpaEndpoint jpa = (JpaEndpoint) comp.createEndpoint("jpa:?persistenceUnit=journalPersistenceUnit&usePersist=true");
assertNotNull(jpa);
assertNull(jpa.getEntityType());
}
}
}
|
JpaComponentTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/comparable/AbstractComparableAssert_isStrictlyBetween_Test.java
|
{
"start": 819,
"end": 1209
}
|
class ____ extends AbstractComparableAssertBaseTest {
@Override
protected ConcreteComparableAssert invoke_api_method() {
return assertions.isStrictlyBetween(6, 9);
}
@Override
protected void verify_internal_effects() {
verify(comparables).assertIsBetween(getInfo(assertions), getActual(assertions), 6, 9, false, false);
}
}
|
AbstractComparableAssert_isStrictlyBetween_Test
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/injection/guice/ModulesBuilder.java
|
{
"start": 629,
"end": 1551
}
|
class ____ implements Iterable<Module> {
private final List<Module> modules = new ArrayList<>();
public ModulesBuilder add(Module... newModules) {
Collections.addAll(modules, newModules);
return this;
}
public <T> T bindToInstance(Class<T> cls, T instance) {
modules.add(b -> b.bind(cls).toInstance(instance));
return instance;
}
@Override
public Iterator<Module> iterator() {
return modules.iterator();
}
public Injector createInjector() {
Injector injector = Guice.createInjector(modules);
((InjectorImpl) injector).clearCache();
// in ES, we always create all instances as if they are eager singletons
// this allows for considerable memory savings (no need to store construction info) as well as cycles
((InjectorImpl) injector).readOnlyAllSingletons();
return injector;
}
}
|
ModulesBuilder
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobSubmitHandler.java
|
{
"start": 2464,
"end": 10360
}
|
class ____
extends AbstractRestHandler<
DispatcherGateway,
JobSubmitRequestBody,
JobSubmitResponseBody,
EmptyMessageParameters> {
private static final String FILE_TYPE_EXECUTION_PLAN = "ExecutionPlan";
private static final String FILE_TYPE_JAR = "Jar";
private static final String FILE_TYPE_ARTIFACT = "Artifact";
private final Executor executor;
private final Configuration configuration;
public JobSubmitHandler(
GatewayRetriever<? extends DispatcherGateway> leaderRetriever,
Duration timeout,
Map<String, String> headers,
Executor executor,
Configuration configuration) {
super(leaderRetriever, timeout, headers, JobSubmitHeaders.getInstance());
this.executor = executor;
this.configuration = configuration;
}
@Override
protected CompletableFuture<JobSubmitResponseBody> handleRequest(
@Nonnull HandlerRequest<JobSubmitRequestBody> request,
@Nonnull DispatcherGateway gateway)
throws RestHandlerException {
final Collection<File> uploadedFiles = request.getUploadedFiles();
final Map<String, Path> nameToFile =
uploadedFiles.stream()
.collect(Collectors.toMap(File::getName, Path::fromLocalFile));
if (uploadedFiles.size() != nameToFile.size()) {
throw new RestHandlerException(
String.format(
"The number of uploaded files was %s than the expected count. Expected: %s Actual %s",
uploadedFiles.size() < nameToFile.size() ? "lower" : "higher",
nameToFile.size(),
uploadedFiles.size()),
HttpResponseStatus.BAD_REQUEST);
}
final JobSubmitRequestBody requestBody = request.getRequestBody();
if (requestBody.executionPlanFileName == null) {
throw new RestHandlerException(
String.format(
"The %s field must not be omitted or be null.",
JobSubmitRequestBody.FIELD_NAME_JOB_GRAPH),
HttpResponseStatus.BAD_REQUEST);
}
CompletableFuture<ExecutionPlan> executionPlanFuture =
loadExecutionPlan(requestBody, nameToFile);
Collection<Path> jarFiles = getJarFilesToUpload(requestBody.jarFileNames, nameToFile);
Collection<Tuple2<String, Path>> artifacts =
getArtifactFilesToUpload(requestBody.artifactFileNames, nameToFile);
CompletableFuture<ExecutionPlan> finalizedExecutionPlanFuture =
uploadExecutionPlanFiles(
gateway, executionPlanFuture, jarFiles, artifacts, configuration);
CompletableFuture<Acknowledge> jobSubmissionFuture =
finalizedExecutionPlanFuture.thenCompose(
executionPlan -> gateway.submitJob(executionPlan, timeout));
return jobSubmissionFuture.thenCombine(
executionPlanFuture,
(ack, executionPlan) ->
new JobSubmitResponseBody("/jobs/" + executionPlan.getJobID()));
}
private CompletableFuture<ExecutionPlan> loadExecutionPlan(
JobSubmitRequestBody requestBody, Map<String, Path> nameToFile)
throws MissingFileException {
final Path executionPlanFile =
getPathAndAssertUpload(
requestBody.executionPlanFileName, FILE_TYPE_EXECUTION_PLAN, nameToFile);
return CompletableFuture.supplyAsync(
() -> {
ExecutionPlan executionPlan;
try (ObjectInputStream objectIn =
new ObjectInputStream(
executionPlanFile.getFileSystem().open(executionPlanFile))) {
executionPlan = (ExecutionPlan) objectIn.readObject();
} catch (Exception e) {
throw new CompletionException(
new RestHandlerException(
"Failed to deserialize ExecutionPlan.",
HttpResponseStatus.BAD_REQUEST,
e));
}
return executionPlan;
},
executor);
}
private static Collection<Path> getJarFilesToUpload(
Collection<String> jarFileNames, Map<String, Path> nameToFileMap)
throws MissingFileException {
Collection<Path> jarFiles = new ArrayList<>(jarFileNames.size());
for (String jarFileName : jarFileNames) {
Path jarFile = getPathAndAssertUpload(jarFileName, FILE_TYPE_JAR, nameToFileMap);
jarFiles.add(new Path(jarFile.toString()));
}
return jarFiles;
}
private static Collection<Tuple2<String, Path>> getArtifactFilesToUpload(
Collection<JobSubmitRequestBody.DistributedCacheFile> artifactEntries,
Map<String, Path> nameToFileMap)
throws MissingFileException {
Collection<Tuple2<String, Path>> artifacts = new ArrayList<>(artifactEntries.size());
for (JobSubmitRequestBody.DistributedCacheFile artifactFileName : artifactEntries) {
Path artifactFile =
getPathAndAssertUpload(
artifactFileName.fileName, FILE_TYPE_ARTIFACT, nameToFileMap);
artifacts.add(Tuple2.of(artifactFileName.entryName, new Path(artifactFile.toString())));
}
return artifacts;
}
private CompletableFuture<ExecutionPlan> uploadExecutionPlanFiles(
DispatcherGateway gateway,
CompletableFuture<ExecutionPlan> executionPlanFuture,
Collection<Path> jarFiles,
Collection<Tuple2<String, Path>> artifacts,
Configuration configuration) {
CompletableFuture<Integer> blobServerPortFuture = gateway.getBlobServerPort(timeout);
CompletableFuture<InetAddress> blobServerAddressFuture =
gateway.getBlobServerAddress(timeout);
return executionPlanFuture.thenCombine(
blobServerPortFuture.thenCombine(
blobServerAddressFuture,
(blobServerPort, blobServerAddress) ->
new InetSocketAddress(
blobServerAddress.getHostName(), blobServerPort)),
(ExecutionPlan executionPlan, InetSocketAddress blobSocketAddress) -> {
try {
ClientUtils.uploadExecutionPlanFiles(
executionPlan,
jarFiles,
artifacts,
() -> new BlobClient(blobSocketAddress, configuration));
} catch (FlinkException e) {
throw new CompletionException(
new RestHandlerException(
"Could not upload job files.",
HttpResponseStatus.INTERNAL_SERVER_ERROR,
e));
}
return executionPlan;
});
}
private static Path getPathAndAssertUpload(
String fileName, String type, Map<String, Path> uploadedFiles)
throws MissingFileException {
final Path file = uploadedFiles.get(fileName);
if (file == null) {
throw new MissingFileException(type, fileName);
}
return file;
}
private static final
|
JobSubmitHandler
|
java
|
micronaut-projects__micronaut-core
|
core/src/main/java/io/micronaut/core/annotation/AccessorsStyle.java
|
{
"start": 1146,
"end": 2104
}
|
class ____ {
*
* private String name;
* private int age;
*
* public Person(String name, int age) {
* this.name = name;
* this.age = age;
* }
*
* public String name() {
* return this.name;
* }
*
* public void name(String name) {
* this.name = name;
* }
*
* public int age() {
* return this.age;
* }
*
* public void age(int age) {
* this.age = age;
* }
* }</pre>
* <p>
* Defining the {@code readPrefixes} and {@code writePrefixes} as empty strings makes Micronaut aware of those accessors.
*
* It is also possible to annotate fields with this annotation but the usage is only limited when using it with @ConfigurationBuilder.
*
* @author Iván López
* @since 3.3.0
*/
@Documented
@Retention(RUNTIME)
@Target({ElementType.TYPE, ElementType.ANNOTATION_TYPE, ElementType.FIELD, ElementType.PACKAGE})
@Inherited
@Experimental
public @
|
Person
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/selection/methodgenerics/multiple/ReturnTypeHasMultipleTypeVarBothGenericMapper.java
|
{
"start": 379,
"end": 812
}
|
interface ____ {
ReturnTypeHasMultipleTypeVarBothGenericMapper INSTANCE =
Mappers.getMapper( ReturnTypeHasMultipleTypeVarBothGenericMapper.class );
Target toTarget(Source source);
default <T, U> HashMap<T, U> toMap( Pair<T, U> entry) {
HashMap<T, U> result = new HashMap<>( );
result.put( entry.first, entry.second );
return result;
}
|
ReturnTypeHasMultipleTypeVarBothGenericMapper
|
java
|
elastic__elasticsearch
|
distribution/tools/geoip-cli/src/main/java/org/elasticsearch/geoip/GeoIpCli.java
|
{
"start": 1550,
"end": 7139
}
|
class ____ extends Command {
private static final byte[] EMPTY_BUF = new byte[512];
private final OptionSpec<String> sourceDirectory;
private final OptionSpec<String> targetDirectory;
public GeoIpCli() {
super("A CLI tool to prepare local GeoIp database service");
sourceDirectory = parser.acceptsAll(Arrays.asList("s", "source"), "Source directory").withRequiredArg().required();
targetDirectory = parser.acceptsAll(Arrays.asList("t", "target"), "Target directory").withRequiredArg();
}
@Override
protected void execute(Terminal terminal, OptionSet options, ProcessInfo processInfo) throws Exception {
Path source = getPath(options.valueOf(sourceDirectory));
String targetString = options.valueOf(targetDirectory);
Path target = targetString != null ? getPath(targetString) : source;
copyTgzToTarget(source, target);
packDatabasesToTgz(terminal, source, target);
createOverviewJson(terminal, target);
}
@SuppressForbidden(reason = "file arg for cli")
private static Path getPath(String file) {
return PathUtils.get(file);
}
private static void copyTgzToTarget(Path source, Path target) throws IOException {
if (source.equals(target)) {
return;
}
try (Stream<Path> files = Files.list(source)) {
for (Path path : files.filter(p -> p.getFileName().toString().endsWith(".tgz")).toList()) {
Files.copy(path, target.resolve(path.getFileName()), StandardCopyOption.REPLACE_EXISTING);
}
}
}
private static void packDatabasesToTgz(Terminal terminal, Path source, Path target) throws IOException {
try (Stream<Path> files = Files.list(source)) {
for (Path path : files.filter(p -> p.getFileName().toString().endsWith(".mmdb")).toList()) {
String fileName = path.getFileName().toString();
Path compressedPath = target.resolve(fileName.replaceAll("mmdb$", "") + "tgz");
terminal.println("Found " + fileName + ", will compress it to " + compressedPath.getFileName());
try (
OutputStream fos = Files.newOutputStream(compressedPath, TRUNCATE_EXISTING, CREATE);
OutputStream gos = new GZIPOutputStream(new BufferedOutputStream(fos))
) {
long size = Files.size(path);
gos.write(createTarHeader(fileName, size));
Files.copy(path, gos);
if (size % 512 != 0) {
gos.write(EMPTY_BUF, 0, (int) (512 - (size % 512)));
}
gos.write(EMPTY_BUF);
gos.write(EMPTY_BUF);
}
}
}
}
private static void createOverviewJson(Terminal terminal, Path directory) throws IOException {
Path overview = directory.resolve("overview.json");
try (
Stream<Path> files = Files.list(directory);
OutputStream os = new BufferedOutputStream(Files.newOutputStream(overview, TRUNCATE_EXISTING, CREATE));
XContentGenerator generator = XContentType.JSON.xContent().createGenerator(os)
) {
generator.writeStartArray();
for (Path db : files.filter(p -> p.getFileName().toString().endsWith(".tgz")).toList()) {
terminal.println("Adding " + db.getFileName() + " to overview.json");
MessageDigest md5 = MessageDigests.md5();
try (InputStream dis = new DigestInputStream(new BufferedInputStream(Files.newInputStream(db)), md5)) {
dis.transferTo(OutputStream.nullOutputStream());
}
String digest = MessageDigests.toHexString(md5.digest());
generator.writeStartObject();
String fileName = db.getFileName().toString();
generator.writeStringField("name", fileName);
generator.writeStringField("md5_hash", digest);
generator.writeStringField("url", fileName);
generator.writeNumberField("updated", System.currentTimeMillis());
generator.writeEndObject();
}
generator.writeEndArray();
}
terminal.println("overview.json created");
}
private static byte[] createTarHeader(String name, long size) {
byte[] buf = new byte[512];
byte[] sizeBytes = String.format(Locale.ROOT, "%1$012o", size).getBytes(StandardCharsets.UTF_8);
byte[] nameBytes = name.substring(Math.max(0, name.length() - 100)).getBytes(StandardCharsets.US_ASCII);
byte[] id = "0001750".getBytes(StandardCharsets.UTF_8);
byte[] permission = "000644 ".getBytes(StandardCharsets.UTF_8);
byte[] time = String.format(Locale.ROOT, "%1$012o", System.currentTimeMillis() / 1000).getBytes(StandardCharsets.UTF_8);
System.arraycopy(nameBytes, 0, buf, 0, nameBytes.length);
System.arraycopy(permission, 0, buf, 100, 7);
System.arraycopy(id, 0, buf, 108, 7);
System.arraycopy(id, 0, buf, 116, 7);
System.arraycopy(sizeBytes, 0, buf, 124, 12);
System.arraycopy(time, 0, buf, 136, 12);
int checksum = 256;
for (byte b : buf) {
checksum += b & 0xFF;
}
byte[] checksumBytes = String.format(Locale.ROOT, "%1$07o", checksum).getBytes(StandardCharsets.UTF_8);
System.arraycopy(checksumBytes, 0, buf, 148, 7);
return buf;
}
}
|
GeoIpCli
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/io/compression/BufferDecompressionException.java
|
{
"start": 1054,
"end": 1460
}
|
class ____ extends RuntimeException {
public BufferDecompressionException() {
super();
}
public BufferDecompressionException(String message) {
super(message);
}
public BufferDecompressionException(String message, Throwable e) {
super(message, e);
}
public BufferDecompressionException(Throwable e) {
super(e);
}
}
|
BufferDecompressionException
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/internal/PropertyHolderBuilder.java
|
{
"start": 591,
"end": 656
}
|
interface ____
*
* @author Emmanuel Bernard
*/
public final
|
adding
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/DuplicateBindingsValidationTest.java
|
{
"start": 44985,
"end": 45479
}
|
interface ____");
});
}
// Tests the format of the error for a somewhat complex binding method.
@Test
public void formatTest() {
Source modules =
CompilerTests.javaSource(
"test.Modules",
"package test;",
"",
"import com.google.common.collect.ImmutableList;",
"import dagger.Module;",
"import dagger.Provides;",
"import javax.inject.Singleton;",
"",
"
|
Parent
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/StaticNestedTestCase.java
|
{
"start": 423,
"end": 518
}
|
class ____ {
@SuppressWarnings("JUnitMalformedDeclaration")
@Nested
static
|
StaticNestedTestCase
|
java
|
google__gson
|
gson/src/main/java/com/google/gson/internal/GsonTypes.java
|
{
"start": 22189,
"end": 22498
}
|
interface ____ multiple upper bounds and multiple lower bounds. We only
* support what the target Java version supports - at most one bound, see also
* https://bugs.openjdk.java.net/browse/JDK-8250660. If a lower bound is set, the upper bound must
* be Object.class.
*/
private static final
|
supports
|
java
|
apache__spark
|
examples/src/main/java/org/apache/spark/examples/ml/JavaALSExample.java
|
{
"start": 1283,
"end": 4577
}
|
class ____ implements Serializable {
private int userId;
private int movieId;
private float rating;
private long timestamp;
public Rating() {}
public Rating(int userId, int movieId, float rating, long timestamp) {
this.userId = userId;
this.movieId = movieId;
this.rating = rating;
this.timestamp = timestamp;
}
public int getUserId() {
return userId;
}
public int getMovieId() {
return movieId;
}
public float getRating() {
return rating;
}
public long getTimestamp() {
return timestamp;
}
public static Rating parseRating(String str) {
String[] fields = str.split("::");
if (fields.length != 4) {
throw new IllegalArgumentException("Each line must contain 4 fields");
}
int userId = Integer.parseInt(fields[0]);
int movieId = Integer.parseInt(fields[1]);
float rating = Float.parseFloat(fields[2]);
long timestamp = Long.parseLong(fields[3]);
return new Rating(userId, movieId, rating, timestamp);
}
}
// $example off$
public static void main(String[] args) {
SparkSession spark = SparkSession
.builder()
.appName("JavaALSExample")
.getOrCreate();
// $example on$
JavaRDD<Rating> ratingsRDD = spark
.read().textFile("data/mllib/als/sample_movielens_ratings.txt").javaRDD()
.map(Rating::parseRating);
Dataset<Row> ratings = spark.createDataFrame(ratingsRDD, Rating.class);
Dataset<Row>[] splits = ratings.randomSplit(new double[]{0.8, 0.2});
Dataset<Row> training = splits[0];
Dataset<Row> test = splits[1];
// Build the recommendation model using ALS on the training data
ALS als = new ALS()
.setMaxIter(5)
.setRegParam(0.01)
.setUserCol("userId")
.setItemCol("movieId")
.setRatingCol("rating");
ALSModel model = als.fit(training);
// Evaluate the model by computing the RMSE on the test data
// Note we set cold start strategy to 'drop' to ensure we don't get NaN evaluation metrics
model.setColdStartStrategy("drop");
Dataset<Row> predictions = model.transform(test);
RegressionEvaluator evaluator = new RegressionEvaluator()
.setMetricName("rmse")
.setLabelCol("rating")
.setPredictionCol("prediction");
double rmse = evaluator.evaluate(predictions);
System.out.println("Root-mean-square error = " + rmse);
// Generate top 10 movie recommendations for each user
Dataset<Row> userRecs = model.recommendForAllUsers(10);
// Generate top 10 user recommendations for each movie
Dataset<Row> movieRecs = model.recommendForAllItems(10);
// Generate top 10 movie recommendations for a specified set of users
Dataset<Row> users = ratings.select(als.getUserCol()).distinct().limit(3);
Dataset<Row> userSubsetRecs = model.recommendForUserSubset(users, 10);
// Generate top 10 user recommendations for a specified set of movies
Dataset<Row> movies = ratings.select(als.getItemCol()).distinct().limit(3);
Dataset<Row> movieSubSetRecs = model.recommendForItemSubset(movies, 10);
// $example off$
userRecs.show();
movieRecs.show();
userSubsetRecs.show();
movieSubSetRecs.show();
spark.stop();
}
}
|
Rating
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/utils/Shell.java
|
{
"start": 1149,
"end": 1305
}
|
class ____ running a Unix command.
*
* <code>Shell</code> can be used to run unix commands like <code>du</code> or
* <code>df</code>.
*/
public abstract
|
for
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/ExecutingTest.java
|
{
"start": 48739,
"end": 50187
}
|
class ____ extends ExecutionJobVertex {
private final ExecutionVertex mockExecutionVertex;
MockExecutionJobVertex(
Function<ExecutionJobVertex, ExecutionVertex> executionVertexSupplier)
throws JobException {
this(executionVertexSupplier, new JobVertex("test"));
}
MockExecutionJobVertex(
final Function<ExecutionJobVertex, ExecutionVertex> executionVertexSupplier,
final JobVertex jobVertex)
throws JobException {
super(
new MockInternalExecutionGraphAccessor(),
jobVertex,
new DefaultVertexParallelismInfo(1, 1, max -> Optional.empty()),
new CoordinatorStoreImpl(),
UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup());
initialize(
1,
Duration.ofMillis(1L),
1L,
new DefaultSubtaskAttemptNumberStore(Collections.emptyList()));
mockExecutionVertex = executionVertexSupplier.apply(this);
}
@Override
public ExecutionVertex[] getTaskVertices() {
return new ExecutionVertex[] {mockExecutionVertex};
}
public ExecutionVertex getMockExecutionVertex() {
return mockExecutionVertex;
}
}
static
|
MockExecutionJobVertex
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/updatemethods/ErroneousOrganizationMapper1.java
|
{
"start": 450,
"end": 1175
}
|
interface ____ {
ErroneousOrganizationMapper1 INSTANCE = Mappers.getMapper( ErroneousOrganizationMapper1.class );
@Mapping(target = "type", constant = "commercial")
void toOrganizationEntity(OrganizationDto dto, @MappingTarget OrganizationWithoutCompanyGetterEntity entity);
void toCompanyEntity(CompanyDto dto, @MappingTarget CompanyEntity entity);
@Mapping(target = "type", source = "type")
void toName(String type, @MappingTarget OrganizationTypeEntity entity);
@Mappings({
@Mapping(target = "employees", ignore = true ),
@Mapping(target = "secretaryToEmployee", ignore = true )
})
DepartmentEntity toDepartmentEntity(DepartmentDto dto);
}
|
ErroneousOrganizationMapper1
|
java
|
google__auto
|
value/src/it/functional/src/test/java/com/google/auto/value/AutoValueJava8Test.java
|
{
"start": 30176,
"end": 30373
}
|
class ____ the case where an annotation is both a method annotation and a type
// annotation. If we weren't careful, we might emit it twice in the generated code.
@AutoValue
abstract static
|
tests
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/io/FilesTest.java
|
{
"start": 3840,
"end": 12871
}
|
class ____ extends File {
private final long badLength;
BadLengthFile(File delegate, long badLength) {
super(delegate.getPath());
this.badLength = badLength;
}
@Override
public long length() {
return badLength;
}
private static final long serialVersionUID = 0;
}
public void testToString() throws IOException {
File asciiFile = getTestFile("ascii.txt");
File i18nFile = getTestFile("i18n.txt");
assertEquals(ASCII, Files.toString(asciiFile, US_ASCII));
assertEquals(I18N, Files.toString(i18nFile, UTF_8));
assertThat(Files.toString(i18nFile, US_ASCII)).isNotEqualTo(I18N);
}
public void testWriteString() throws IOException {
File temp = createTempFile();
Files.write(I18N, temp, UTF_16LE);
assertEquals(I18N, Files.toString(temp, UTF_16LE));
}
public void testWriteBytes() throws IOException {
File temp = createTempFile();
byte[] data = newPreFilledByteArray(2000);
Files.write(data, temp);
assertThat(Files.toByteArray(temp)).isEqualTo(data);
assertThrows(NullPointerException.class, () -> Files.write(null, temp));
}
public void testAppendString() throws IOException {
File temp = createTempFile();
Files.append(I18N, temp, UTF_16LE);
assertEquals(I18N, Files.toString(temp, UTF_16LE));
Files.append(I18N, temp, UTF_16LE);
assertEquals(I18N + I18N, Files.toString(temp, UTF_16LE));
Files.append(I18N, temp, UTF_16LE);
assertEquals(I18N + I18N + I18N, Files.toString(temp, UTF_16LE));
}
public void testCopyToOutputStream() throws IOException {
File i18nFile = getTestFile("i18n.txt");
ByteArrayOutputStream out = new ByteArrayOutputStream();
Files.copy(i18nFile, out);
assertEquals(I18N, out.toString("UTF-8"));
}
public void testCopyToAppendable() throws IOException {
File i18nFile = getTestFile("i18n.txt");
StringBuilder sb = new StringBuilder();
Files.copy(i18nFile, UTF_8, sb);
assertEquals(I18N, sb.toString());
}
public void testCopyFile() throws IOException {
File i18nFile = getTestFile("i18n.txt");
File temp = createTempFile();
Files.copy(i18nFile, temp);
assertEquals(I18N, Files.toString(temp, UTF_8));
}
public void testCopyEqualFiles() throws IOException {
File temp1 = createTempFile();
File temp2 = file(temp1.getPath());
assertEquals(temp1, temp2);
Files.write(ASCII, temp1, UTF_8);
assertThrows(IllegalArgumentException.class, () -> Files.copy(temp1, temp2));
assertEquals(ASCII, Files.toString(temp1, UTF_8));
}
public void testCopySameFile() throws IOException {
File temp = createTempFile();
Files.write(ASCII, temp, UTF_8);
assertThrows(IllegalArgumentException.class, () -> Files.copy(temp, temp));
assertEquals(ASCII, Files.toString(temp, UTF_8));
}
public void testCopyIdenticalFiles() throws IOException {
File temp1 = createTempFile();
Files.write(ASCII, temp1, UTF_8);
File temp2 = createTempFile();
Files.write(ASCII, temp2, UTF_8);
Files.copy(temp1, temp2);
assertEquals(ASCII, Files.toString(temp2, UTF_8));
}
public void testEqual() throws IOException {
File asciiFile = getTestFile("ascii.txt");
File i18nFile = getTestFile("i18n.txt");
assertFalse(Files.equal(asciiFile, i18nFile));
assertTrue(Files.equal(asciiFile, asciiFile));
File temp = createTempFile();
Files.copy(asciiFile, temp);
assertTrue(Files.equal(asciiFile, temp));
Files.copy(i18nFile, temp);
assertTrue(Files.equal(i18nFile, temp));
Files.copy(asciiFile, temp);
RandomAccessFile rf = new RandomAccessFile(temp, "rw");
rf.writeByte(0);
rf.close();
assertEquals(asciiFile.length(), temp.length());
assertFalse(Files.equal(asciiFile, temp));
assertTrue(Files.asByteSource(asciiFile).contentEquals(Files.asByteSource(asciiFile)));
// 0-length files have special treatment (/proc, etc.)
assertTrue(Files.equal(asciiFile, new BadLengthFile(asciiFile, 0)));
}
public void testNewReader() throws IOException {
File asciiFile = getTestFile("ascii.txt");
assertThrows(NullPointerException.class, () -> Files.newReader(asciiFile, null));
assertThrows(NullPointerException.class, () -> Files.newReader(null, UTF_8));
BufferedReader r = Files.newReader(asciiFile, US_ASCII);
try {
assertEquals(ASCII, r.readLine());
} finally {
r.close();
}
}
public void testNewWriter() throws IOException {
File temp = createTempFile();
assertThrows(NullPointerException.class, () -> Files.newWriter(temp, null));
assertThrows(NullPointerException.class, () -> Files.newWriter(null, UTF_8));
BufferedWriter w = Files.newWriter(temp, UTF_8);
try {
w.write(I18N);
} finally {
w.close();
}
File i18nFile = getTestFile("i18n.txt");
assertTrue(Files.equal(i18nFile, temp));
}
public void testTouch() throws IOException {
File temp = createTempFile();
assertTrue(temp.exists());
assertTrue(temp.delete());
assertFalse(temp.exists());
Files.touch(temp);
assertTrue(temp.exists());
Files.touch(temp);
assertTrue(temp.exists());
assertThrows(
IOException.class,
() ->
Files.touch(
new File(temp.getPath()) {
@Override
public boolean setLastModified(long t) {
return false;
}
private static final long serialVersionUID = 0;
}));
}
public void testTouchTime() throws IOException {
File temp = createTempFile();
assertTrue(temp.exists());
temp.setLastModified(0);
assertEquals(0, temp.lastModified());
Files.touch(temp);
assertThat(temp.lastModified()).isNotEqualTo(0);
}
public void testCreateParentDirs_root() throws IOException {
File file = root();
assertThat(file.getParentFile()).isNull();
assertThat(file.getCanonicalFile().getParentFile()).isNull();
Files.createParentDirs(file);
}
public void testCreateParentDirs_relativePath() throws IOException {
File file = file("nonexistent.file");
assertThat(file.getParentFile()).isNull();
assertThat(file.getCanonicalFile().getParentFile()).isNotNull();
Files.createParentDirs(file);
}
public void testCreateParentDirs_noParentsNeeded() throws IOException {
File file = file(getTempDir(), "nonexistent.file");
assertTrue(file.getParentFile().exists());
Files.createParentDirs(file);
}
public void testCreateParentDirs_oneParentNeeded() throws IOException {
File file = file(getTempDir(), "parent", "nonexistent.file");
File parent = file.getParentFile();
assertFalse(parent.exists());
try {
Files.createParentDirs(file);
assertTrue(parent.exists());
} finally {
assertTrue(parent.delete());
}
}
public void testCreateParentDirs_multipleParentsNeeded() throws IOException {
File file = file(getTempDir(), "grandparent", "parent", "nonexistent.file");
File parent = file.getParentFile();
File grandparent = parent.getParentFile();
assertFalse(grandparent.exists());
Files.createParentDirs(file);
assertTrue(parent.exists());
}
public void testCreateParentDirs_nonDirectoryParentExists() throws IOException {
File parent = getTestFile("ascii.txt");
assertTrue(parent.isFile());
File file = file(parent, "foo");
assertThrows(IOException.class, () -> Files.createParentDirs(file));
}
public void testMove() throws IOException {
File i18nFile = getTestFile("i18n.txt");
File temp1 = createTempFile();
File temp2 = createTempFile();
Files.copy(i18nFile, temp1);
moveHelper(true, temp1, temp2);
assertTrue(Files.equal(temp2, i18nFile));
}
public void testMoveViaCopy() throws IOException {
File i18nFile = getTestFile("i18n.txt");
File temp1 = createTempFile();
File temp2 = createTempFile();
Files.copy(i18nFile, temp1);
moveHelper(true, new UnmovableFile(temp1, false, true), temp2);
assertTrue(Files.equal(temp2, i18nFile));
}
public void testMoveFailures() throws IOException {
File temp1 = createTempFile();
File temp2 = createTempFile();
moveHelper(false, new UnmovableFile(temp1, false, false), temp2);
moveHelper(
false, new UnmovableFile(temp1, false, false), new UnmovableFile(temp2, true, false));
File asciiFile = getTestFile("ascii.txt");
assertThrows(IllegalArgumentException.class, () -> moveHelper(false, asciiFile, asciiFile));
}
private void moveHelper(boolean success, File from, File to) throws IOException {
try {
Files.move(from, to);
if (success) {
assertFalse(from.exists());
assertTrue(to.exists());
} else {
fail("expected exception");
}
} catch (IOException possiblyExpected) {
if (success) {
throw possiblyExpected;
}
}
}
private static
|
BadLengthFile
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng5608ProfileActivationWarningTest.java
|
{
"start": 1496,
"end": 3090
}
|
class ____ extends AbstractMavenIntegrationTestCase {
@Test
public void testitMNG5608() throws Exception {
File testDir = extractResources("/mng-5608-profile-activation-warning");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
// check expected profiles activated, just for sanity (or build should have failed, given other profiles)
assertFileExists(testDir, "target/exists-basedir");
assertFileExists(testDir, "target/mng-5608-missing-project.basedir");
// check that the 2 profiles using ${project.basedir} caused warnings
List<String> logFile = verifier.loadLogLines();
assertNotNull(findWarning(logFile, "mng-5608-exists-project.basedir"));
assertNotNull(findWarning(logFile, "mng-5608-missing-project.basedir"));
}
private void assertFileExists(File dir, String filename) {
File file = new File(dir, filename);
assertTrue(file.exists(), "expected file: " + file);
}
private String findWarning(List<String> logLines, String profileId) {
Pattern pattern =
Pattern.compile("(?i).*Failed to interpolate file location ..project.basedir./pom.xml for profile "
+ profileId + ": .*");
for (String logLine : logLines) {
if (pattern.matcher(logLine).matches()) {
return logLine;
}
}
return null;
}
}
|
MavenITmng5608ProfileActivationWarningTest
|
java
|
apache__maven
|
impl/maven-impl/src/main/java/org/apache/maven/impl/model/DefaultInheritanceAssembler.java
|
{
"start": 1769,
"end": 5879
}
|
class ____ implements InheritanceAssembler {
private static final String CHILD_DIRECTORY = "child-directory";
private static final String CHILD_DIRECTORY_PROPERTY = "project.directory";
private final MavenMerger merger;
@Inject
public DefaultInheritanceAssembler() {
this(new InheritanceModelMerger());
}
public DefaultInheritanceAssembler(MavenMerger merger) {
this.merger = merger;
}
@Override
public Model assembleModelInheritance(
Model child, Model parent, ModelBuilderRequest request, ModelProblemCollector problems) {
Map<Object, Object> hints = new HashMap<>();
String childPath = child.getProperties().getOrDefault(CHILD_DIRECTORY_PROPERTY, child.getArtifactId());
hints.put(CHILD_DIRECTORY, childPath);
hints.put(MavenModelMerger.CHILD_PATH_ADJUSTMENT, getChildPathAdjustment(child, parent, childPath));
return merger.merge(child, parent, false, hints);
}
/**
* Calculates the relative path from the base directory of the parent to the parent directory of the base directory
* of the child. The general idea is to adjust inherited URLs to match the project layout (in SCM).
*
* <p>This calculation is only a heuristic based on our conventions.
* In detail, the algo relies on the following assumptions: <ul>
* <li>The parent uses aggregation and refers to the child via the modules section</li>
* <li>The module path to the child is considered to
* point at the POM rather than its base directory if the path ends with ".xml" (ignoring case)</li>
* <li>The name of the child's base directory matches the artifact id of the child.</li>
* </ul>
* Note that for the sake of independence from the user
* environment, the filesystem is intentionally not used for the calculation.</p>
*
* @param child The child model, must not be <code>null</code>.
* @param parent The parent model, may be <code>null</code>.
* @param childDirectory The directory defined in child model, may be <code>null</code>.
* @return The path adjustment, can be empty but never <code>null</code>.
*/
private String getChildPathAdjustment(Model child, Model parent, String childDirectory) {
String adjustment = "";
if (parent != null) {
String childName = child.getArtifactId();
/*
* This logic (using filesystem, against wanted independence from the user environment) exists only for the
* sake of backward-compat with 2.x (MNG-5000). In general, it is wrong to
* base URL inheritance on the module directory names as this information is unavailable for POMs in the
* repository. In other words, modules where artifactId != moduleDirName will see different effective URLs
* depending on how the model was constructed (from filesystem or from repository).
*/
if (child.getProjectDirectory() != null) {
childName = child.getProjectDirectory().getFileName().toString();
}
for (String module : parent.getModules()) {
module = module.replace('\\', '/');
if (module.regionMatches(true, module.length() - 4, ".xml", 0, 4)) {
module = module.substring(0, module.lastIndexOf('/') + 1);
}
String moduleName = module;
if (moduleName.endsWith("/")) {
moduleName = moduleName.substring(0, moduleName.length() - 1);
}
int lastSlash = moduleName.lastIndexOf('/');
moduleName = moduleName.substring(lastSlash + 1);
if ((moduleName.equals(childName) || (moduleName.equals(childDirectory))) && lastSlash >= 0) {
adjustment = module.substring(0, lastSlash);
break;
}
}
}
return adjustment;
}
/**
* InheritanceModelMerger
*/
protected static
|
DefaultInheritanceAssembler
|
java
|
spring-projects__spring-boot
|
module/spring-boot-r2dbc/src/main/java/org/springframework/boot/r2dbc/autoconfigure/ConnectionFactoryOptionsBuilderCustomizer.java
|
{
"start": 798,
"end": 1052
}
|
interface ____ can be implemented by beans wishing to customize the
* {@link Builder} to fine-tune its auto-configuration before it creates a
* {@link ConnectionFactoryOptions}.
*
* @author Mark Paluch
* @since 4.0.0
*/
@FunctionalInterface
public
|
that
|
java
|
mapstruct__mapstruct
|
processor/src/test/resources/fixtures/org/mapstruct/ap/test/value/exception/CustomUnexpectedValueMappingExceptionDefinedInMapperConfigImpl.java
|
{
"start": 2364,
"end": 3043
}
|
enum ____: " + orderType );
}
return externalOrderType;
}
@Override
public OrderType inverseOnlyWithMappings(ExternalOrderType orderType) {
if ( orderType == null ) {
return null;
}
OrderType orderType1;
switch ( orderType ) {
case SPECIAL: orderType1 = OrderType.EXTRA;
break;
case DEFAULT: orderType1 = OrderType.STANDARD;
break;
case RETAIL: orderType1 = OrderType.RETAIL;
break;
case B2B: orderType1 = OrderType.B2B;
break;
default: throw new CustomIllegalArgumentException( "Unexpected
|
constant
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/dirtiness/HHH11866Test.java
|
{
"start": 4067,
"end": 5104
}
|
class ____ {
private final Map<String, String> setterToPropertyMap = new HashMap<>();
@Transient
private final Set<String> dirtyProperties = new LinkedHashSet<>();
public SelfDirtyCheckingEntity() {
try {
BeanInfo beanInfo = Introspector.getBeanInfo( getClass() );
PropertyDescriptor[] descriptors = beanInfo.getPropertyDescriptors();
for ( PropertyDescriptor descriptor : descriptors ) {
Method setter = descriptor.getWriteMethod();
if ( setter != null ) {
setterToPropertyMap.put( setter.getName(), descriptor.getName() );
}
}
}
catch (IntrospectionException e) {
throw new IllegalStateException( e );
}
}
public Set<String> getDirtyProperties() {
return dirtyProperties;
}
public void clearDirtyProperties() {
dirtyProperties.clear();
}
protected void markDirtyProperty() {
String methodName = Thread.currentThread().getStackTrace()[2].getMethodName();
dirtyProperties.add( setterToPropertyMap.get( methodName ) );
}
}
}
|
SelfDirtyCheckingEntity
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ext/javatime/deser/OffsetDateTimeDeserTest.java
|
{
"start": 948,
"end": 1462
}
|
class ____
extends DateTimeTestBase
{
private static final DateTimeFormatter FORMATTER = DateTimeFormatter.ISO_OFFSET_DATE_TIME;
private final TypeReference<Map<String, OffsetDateTime>> MAP_TYPE_REF = new TypeReference<Map<String, OffsetDateTime>>() { };
private static final ZoneId Z1 = ZoneId.of("America/Chicago");
private static final ZoneId Z2 = ZoneId.of("America/Anchorage");
private static final ZoneId Z3 = ZoneId.of("America/Los_Angeles");
final static
|
OffsetDateTimeDeserTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/metamodel/mapping/internal/EmbeddableMappingTypeImpl.java
|
{
"start": 12683,
"end": 29437
}
|
class ____
if ( bootDescriptor.getStructName() != null ) {
basicTypeRegistry.register( basicType, bootDescriptor.getStructName().render() );
basicTypeRegistry.register( basicType, getMappedJavaType().getJavaTypeClass().getName() );
}
final BasicType<?> resolvedJdbcMapping;
if ( isArray ) {
final JdbcTypeConstructor arrayConstructor = jdbcTypeRegistry.getConstructor( aggregateColumnSqlTypeCode );
if ( arrayConstructor == null ) {
throw new IllegalArgumentException( "No JdbcTypeConstructor registered for SqlTypes." + JdbcTypeNameMapper.getTypeName( aggregateColumnSqlTypeCode ) );
}
//noinspection rawtypes,unchecked
final BasicType<?> arrayType = ( (BasicPluralJavaType) resolution.getDomainJavaType() ).resolveType(
typeConfiguration,
creationContext.getDialect(),
basicType,
aggregateColumn,
typeConfiguration.getCurrentBaseSqlTypeIndicators()
);
basicTypeRegistry.register( arrayType );
resolvedJdbcMapping = arrayType;
}
else {
resolvedJdbcMapping = basicType;
}
resolution.updateResolution( resolvedJdbcMapping );
return resolvedJdbcMapping;
}
public EmbeddableMappingTypeImpl(
EmbeddedAttributeMapping valueMapping,
TableGroupProducer declaringTableGroupProducer,
SelectableMappings selectableMappings,
EmbeddableMappingType inverseMappingType,
MappingModelCreationProcess creationProcess) {
super( new MutableAttributeMappingList( 5 ) );
this.embeddableJtd = inverseMappingType.getJavaType();
this.representationStrategy = inverseMappingType.getRepresentationStrategy();
this.valueMapping = valueMapping;
this.discriminatorMapping = null;
this.concreteEmbeddableBySubclass = null;
this.concreteEmbeddableByDiscriminator = null;
this.aggregateMapping = null;
this.aggregateMappingRequiresColumnWriter = false;
this.preferSelectAggregateMapping = false;
this.preferBindAggregateMapping = false;
this.selectableMappings = selectableMappings;
creationProcess.registerInitializationCallback(
"EmbeddableMappingType(" + inverseMappingType.getNavigableRole().getFullPath() + ".{inverse})#finishInitialization",
() -> inverseInitializeCallback(
declaringTableGroupProducer,
selectableMappings,
inverseMappingType,
creationProcess,
this,
attributeMappings
)
);
}
public EmbeddableMappingType createInverseMappingType(
EmbeddedAttributeMapping valueMapping,
TableGroupProducer declaringTableGroupProducer,
SelectableMappings selectableMappings,
MappingModelCreationProcess creationProcess) {
return new EmbeddableMappingTypeImpl(
valueMapping,
declaringTableGroupProducer,
selectableMappings,
this,
creationProcess
);
}
private boolean finishInitialization(
Component bootDescriptor,
CompositeType compositeType,
String rootTableExpression,
String[] rootTableKeyColumnNames,
DependantValue dependantValue,
int dependantColumnIndex,
boolean[] insertability,
boolean[] updateability,
MappingModelCreationProcess creationProcess) {
// for some reason I cannot get this to work, though only a single test fails - `CompositeElementTest`
// return finishInitialization(
// getNavigableRole(),
// bootDescriptor,
// compositeType,
// rootTableExpression,
// rootTableKeyColumnNames,
// this,
// representationStrategy,
// (name, type) -> {},
// (column, jdbcEnvironment) -> getTableIdentifierExpression(
// column.getValue().getTable(),
// jdbcEnvironment
// ),
// this::addAttribute,
// () -> {
// // We need the attribute mapping types to finish initialization first before we can build the column mappings
// creationProcess.registerInitializationCallback(
// "EmbeddableMappingType(" + getEmbeddedValueMapping().getNavigableRole().getFullPath() + ")#initColumnMappings",
// this::initColumnMappings
// );
// },
// creationProcess
// );
// todo (6.0) - get this ^^ to work, or drop the comment
final TypeConfiguration typeConfiguration = creationProcess.getCreationContext().getTypeConfiguration();
final JdbcServices jdbcServices = creationProcess.getCreationContext().getJdbcServices();
final JdbcEnvironment jdbcEnvironment = jdbcServices.getJdbcEnvironment();
final Dialect dialect = jdbcEnvironment.getDialect();
final String baseTableExpression = valueMapping.getContainingTableExpression();
final Type[] subtypes = compositeType.getSubtypes();
int attributeIndex = 0;
int columnPosition = 0;
// Reset the attribute mappings that were added in previous attempts
attributeMappings.clear();
for ( final Property bootPropertyDescriptor : bootDescriptor.getProperties() ) {
final AttributeMapping attributeMapping;
final Type subtype = subtypes[attributeIndex];
final Value value = bootPropertyDescriptor.getValue();
if ( subtype instanceof BasicType ) {
final var basicValue = (BasicValue) value;
final var selectable =
dependantValue != null
? dependantValue.getColumns().get( dependantColumnIndex + columnPosition )
: basicValue.getColumn();
final String containingTableExpression;
final String columnExpression;
if ( rootTableKeyColumnNames == null ) {
if ( selectable.isFormula() ) {
columnExpression = selectable.getTemplate( dialect,
creationProcess.getCreationContext().getTypeConfiguration() );
}
else {
columnExpression = selectable.getText( dialect );
}
if ( selectable instanceof Column column ) {
containingTableExpression = MappingModelCreationHelper.getTableIdentifierExpression(
column.getValue().getTable(),
creationProcess
);
}
else {
containingTableExpression = baseTableExpression;
}
}
else {
containingTableExpression = rootTableExpression;
columnExpression = rootTableKeyColumnNames[columnPosition];
}
final var role = valueMapping.getNavigableRole().append( bootPropertyDescriptor.getName() );
final SelectablePath selectablePath;
final String columnDefinition;
final Long length;
final Integer arrayLength;
final Integer precision;
final Integer scale;
final Integer temporalPrecision;
final boolean isLob;
final boolean nullable;
if ( selectable instanceof Column column ) {
columnDefinition = column.getSqlType();
length = column.getLength();
arrayLength = column.getArrayLength();
precision = column.getPrecision();
scale = column.getScale();
temporalPrecision = column.getTemporalPrecision();
isLob = column.isSqlTypeLob( creationProcess.getCreationContext().getMetadata() );
nullable = bootPropertyDescriptor.isOptional() && column.isNullable() ;
selectablePath = basicValue.createSelectablePath( column.getQuotedName( dialect ) );
MappingModelCreationHelper.resolveAggregateColumnBasicType( creationProcess, role, column );
}
else {
columnDefinition = null;
length = null;
arrayLength = null;
precision = null;
scale = null;
temporalPrecision = null;
isLob = false;
nullable = bootPropertyDescriptor.isOptional();
selectablePath = new SelectablePath( determineEmbeddablePrefix() + bootPropertyDescriptor.getName() );
}
attributeMapping = MappingModelCreationHelper.buildBasicAttributeMapping(
bootPropertyDescriptor.getName(),
role,
attributeIndex,
attributeIndex,
bootPropertyDescriptor,
this,
basicValue.getResolution().getLegacyResolvedBasicType(),
containingTableExpression,
columnExpression,
selectablePath,
selectable.isFormula(),
selectable.getCustomReadExpression(),
selectable.getWriteExpr(
basicValue.getResolution().getJdbcMapping(),
dialect,
creationProcess.getCreationContext().getBootModel()
),
columnDefinition,
length,
arrayLength,
precision,
scale,
temporalPrecision,
isLob,
nullable,
insertability[columnPosition],
updateability[columnPosition],
representationStrategy.resolvePropertyAccess( bootPropertyDescriptor ),
compositeType.getCascadeStyle( attributeIndex ),
creationProcess
);
columnPosition++;
}
else if ( subtype instanceof AnyType anyType ) {
final var bootValueMapping = (Any) value;
final var propertyAccess = representationStrategy.resolvePropertyAccess( bootPropertyDescriptor );
final boolean nullable = bootValueMapping.isNullable();
final boolean insertable = insertability[columnPosition];
final boolean updateable = updateability[columnPosition];
final boolean includeInOptimisticLocking = bootPropertyDescriptor.isOptimisticLocked();
final var cascadeStyle = compositeType.getCascadeStyle( attributeIndex );
SimpleAttributeMetadata attributeMetadataAccess = new SimpleAttributeMetadata(
propertyAccess,
getMutabilityPlan( updateable ),
nullable,
insertable,
updateable,
includeInOptimisticLocking,
true,
cascadeStyle
);
attributeMapping = new DiscriminatedAssociationAttributeMapping(
valueMapping.getNavigableRole().append( bootPropertyDescriptor.getName() ),
typeConfiguration.getJavaTypeRegistry().resolveDescriptor( Object.class ),
this,
attributeIndex,
attributeIndex,
attributeMetadataAccess,
bootPropertyDescriptor.isLazy() ? FetchTiming.DELAYED : FetchTiming.IMMEDIATE,
propertyAccess,
bootPropertyDescriptor,
anyType,
bootValueMapping,
creationProcess
);
}
else if ( subtype instanceof CompositeType subCompositeType ) {
final int columnSpan = subCompositeType.getColumnSpan( creationProcess.getCreationContext().getMetadata() );
final String subTableExpression;
final String[] subRootTableKeyColumnNames;
if ( rootTableKeyColumnNames == null ) {
subTableExpression = baseTableExpression;
subRootTableKeyColumnNames = null;
}
else {
subTableExpression = rootTableExpression;
subRootTableKeyColumnNames = new String[columnSpan];
System.arraycopy( rootTableKeyColumnNames, columnPosition, subRootTableKeyColumnNames, 0, columnSpan );
}
attributeMapping = MappingModelCreationHelper.buildEmbeddedAttributeMapping(
bootPropertyDescriptor.getName(),
attributeIndex,
attributeIndex,
bootPropertyDescriptor,
dependantValue,
dependantColumnIndex + columnPosition,
this,
subCompositeType,
subTableExpression,
subRootTableKeyColumnNames,
representationStrategy.resolvePropertyAccess( bootPropertyDescriptor ),
compositeType.getCascadeStyle( attributeIndex ),
creationProcess
);
columnPosition += columnSpan;
}
else if ( subtype instanceof CollectionType ) {
attributeMapping = MappingModelCreationHelper.buildPluralAttributeMapping(
bootPropertyDescriptor.getName(),
attributeIndex,
attributeIndex,
bootPropertyDescriptor,
this,
representationStrategy.resolvePropertyAccess( bootPropertyDescriptor ),
compositeType.getCascadeStyle( attributeIndex),
compositeType.getFetchMode( attributeIndex ),
creationProcess
);
}
else if ( subtype instanceof EntityType subentityType ) {
attributeMapping = MappingModelCreationHelper.buildSingularAssociationAttributeMapping(
bootPropertyDescriptor.getName(),
valueMapping.getNavigableRole().append( bootPropertyDescriptor.getName() ),
attributeIndex,
attributeIndex,
bootPropertyDescriptor,
this,
creationProcess.getEntityPersister( bootDescriptor.getOwner().getEntityName() ),
subentityType,
representationStrategy.resolvePropertyAccess( bootPropertyDescriptor ),
compositeType.getCascadeStyle( attributeIndex ),
creationProcess
);
columnPosition += bootPropertyDescriptor.getColumnSpan();
}
else {
throw new MappingException(
String.format(
Locale.ROOT,
"Unable to determine attribute nature : %s#%s",
bootDescriptor.getOwner().getEntityName(),
bootPropertyDescriptor.getName()
)
);
}
if ( isPolymorphic() ) {
final String declaringClass = bootDescriptor.getPropertyDeclaringClass( bootPropertyDescriptor );
for ( var entry : concreteEmbeddableBySubclass.entrySet() ) {
if ( isDefinedInClassOrSuperclass( bootDescriptor, declaringClass, entry.getKey() ) ) {
entry.getValue().declaredAttributes.set( attributeMapping.getStateArrayPosition() );
}
}
}
addAttribute( attributeMapping );
attributeIndex++;
}
// We need the attribute mapping types to finish initialization first before we can build the column mappings
creationProcess.registerInitializationCallback(
"EmbeddableMappingType(" + valueMapping.getNavigableRole().getFullPath() + ")#initColumnMappings",
this::initColumnMappings
);
return true;
}
private boolean isDefinedInClassOrSuperclass(Component bootDescriptor, String declaringClass, String subclass) {
while ( subclass != null ) {
if ( declaringClass.equals( subclass ) ) {
return true;
}
subclass = bootDescriptor.getSuperclass( subclass );
}
return false;
}
private static MutabilityPlan<?> getMutabilityPlan(boolean updateable) {
if ( updateable ) {
return new MutabilityPlan<>() {
@Override
public boolean isMutable() {
return true;
}
@Override
public Object deepCopy(Object value) {
return value;
}
@Override
public Serializable disassemble(Object value, SharedSessionContract session) {
throw new UnsupportedOperationException();
}
@Override
public Object assemble(Serializable cached, SharedSessionContract session) {
throw new UnsupportedOperationException();
}
};
}
else {
return ImmutableMutabilityPlan.instance();
}
}
private EmbeddableDiscriminatorMapping generateDiscriminatorMapping(
Component bootDescriptor,
RuntimeModelCreationContext creationContext) {
final Value discriminator = bootDescriptor.getDiscriminator();
if ( discriminator == null ) {
return null;
}
final var selectable = discriminator.getSelectables().get( 0 );
final String discriminatorColumnExpression;
final String columnDefinition;
final String name;
final Long length;
final Integer arrayLength;
final Integer precision;
final Integer scale;
final boolean isFormula = discriminator.hasFormula();
if ( isFormula ) {
final Formula formula = (Formula) selectable;
discriminatorColumnExpression = name = formula.getTemplate(
creationContext.getDialect(),
creationContext.getTypeConfiguration()
);
columnDefinition = null;
length = null;
arrayLength = null;
precision = null;
scale = null;
}
else {
final Column column = discriminator.getColumns().get( 0 );
assert column != null : "Embeddable discriminators require a column";
discriminatorColumnExpression = column.getReadExpr( creationContext.getDialect() );
columnDefinition = column.getSqlType();
name = column.getName();
length = column.getLength();
arrayLength = column.getArrayLength();
precision = column.getPrecision();
scale = column.getScale();
}
return new ExplicitColumnDiscriminatorMappingImpl(
this,
name,
bootDescriptor.getTable()
.getQualifiedName( creationContext.getSqlStringGenerationContext() ),
discriminatorColumnExpression,
isFormula,
!isFormula,
!isFormula,
columnDefinition,
selectable.getCustomReadExpression(),
length,
arrayLength,
precision,
scale,
bootDescriptor.getDiscriminatorType()
);
}
public EmbeddableValuedModelPart getEmbeddedValueMapping() {
return valueMapping;
}
@Override
public EmbeddableDiscriminatorMapping getDiscriminatorMapping() {
return discriminatorMapping;
}
@Override
public JavaType<?> getMappedJavaType() {
return embeddableJtd;
}
public EmbeddableRepresentationStrategy getRepresentationStrategy() {
return representationStrategy;
}
@Override
public String getPartName() {
return getEmbeddedValueMapping().getPartName();
}
@Override
public NavigableRole getNavigableRole() {
return valueMapping.getNavigableRole();
}
@Override
public <T> DomainResult<T> createDomainResult(
NavigablePath navigablePath,
TableGroup tableGroup,
String resultVariable,
DomainResultCreationState creationState) {
return new EmbeddableResultImpl<>(
navigablePath,
valueMapping,
resultVariable,
creationState
);
}
private static final
|
name
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/AutoValueBoxedValuesTest.java
|
{
"start": 7466,
"end": 8636
}
|
class ____ {",
" abstract Builder setLongId(@Nullable Long value);",
" abstract Builder setIntId(@Nullable Integer value);",
" abstract Builder setByteId(@Nullable Byte value);",
" abstract Builder setShortId(@Nullable Short value);",
" abstract Builder setFloatId(@Nullable Float value);",
" abstract Builder setDoubleId(@Nullable Double value);",
" abstract Builder setBooleanId(@Nullable Boolean value);",
" abstract Builder setCharId(@Nullable Character value);",
" abstract Test build();",
" }"),
lines("}")))
.doTest();
}
@Test
public void genericNullableBoxedTypes() {
compilationHelper
.addSourceLines(
"in/Test.java",
mergeLines(
lines(
"import com.google.auto.value.AutoValue;",
"import org.checkerframework.checker.nullness.qual.Nullable;",
"@AutoValue",
"abstract
|
Builder
|
java
|
apache__camel
|
components/camel-platform-http-main/src/main/java/org/apache/camel/component/platform/http/main/MainHttpServerDevConsole.java
|
{
"start": 1161,
"end": 3321
}
|
class ____ extends AbstractDevConsole {
public MainHttpServerDevConsole() {
super("camel", "main-http-server", "Main HTTP Server", "Camel Main HTTP Server");
}
@Override
protected String doCallText(Map<String, Object> options) {
StringBuilder sb = new StringBuilder();
MainHttpServer server = getCamelContext().hasService(MainHttpServer.class);
if (server != null) {
String p = server.getPath();
if (p != null && p.startsWith("/")) {
p = p.substring(1);
}
String url = String.format("%s:%s%s", server.getHost(), server.getPort(), p);
sb.append(String.format(" Server: http://%s", url));
if (server.getMaxBodySize() != null) {
sb.append(String.format("\n Max Body Size: %s", server.getMaxBodySize()));
}
sb.append(String.format("\n File Upload Enabled: %b", server.isFileUploadEnabled()));
sb.append(String.format("\n File Upload Dir: %s", server.getFileUploadDirectory()));
sb.append(String.format("\n Use Global SSL ContextParameters: %s", server.isUseGlobalSslContextParameters()));
}
return sb.toString();
}
@Override
protected Map<String, Object> doCallJson(Map<String, Object> options) {
JsonObject root = new JsonObject();
MainHttpServer server = getCamelContext().hasService(MainHttpServer.class);
if (server != null) {
root.put("host", server.getHost());
root.put("port", server.getPort());
root.put("path", server.getPath());
if (server.getMaxBodySize() != null) {
root.put("maxBodySize", server.getMaxBodySize());
}
root.put("fileUploadEnabled", server.isFileUploadEnabled());
if (server.getFileUploadDirectory() != null) {
root.put("fileUploadDirectory", server.getFileUploadDirectory());
}
root.put("useGlobalSslContextParameters", server.isUseGlobalSslContextParameters());
}
return root;
}
}
|
MainHttpServerDevConsole
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/orphan/onetoone/A.java
|
{
"start": 370,
"end": 686
}
|
class ____ implements Serializable {
private Integer id;
private B b;
@Id
@GeneratedValue
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@OneToOne(mappedBy = "a", orphanRemoval = true)
public B getB() {
return b;
}
public void setB(B b) {
this.b = b;
}
}
|
A
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/parameters/converters/YearMonthParamConverter.java
|
{
"start": 348,
"end": 1208
}
|
class ____ extends TemporalParamConverter<YearMonth> {
// lifted from the JDK as PARSER is private...
private static final DateTimeFormatter PARSER = new DateTimeFormatterBuilder()
.appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.appendLiteral('-')
.appendValue(MONTH_OF_YEAR, 2)
.toFormatter();
// this can be called by generated code
public YearMonthParamConverter() {
super(PARSER);
}
public YearMonthParamConverter(DateTimeFormatter formatter) {
super(formatter);
}
@Override
protected YearMonth convert(String value) {
return YearMonth.parse(value);
}
@Override
protected YearMonth convert(String value, DateTimeFormatter formatter) {
return YearMonth.parse(value, formatter);
}
public static
|
YearMonthParamConverter
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/main/java/org/springframework/boot/test/json/JsonContentAssert.java
|
{
"start": 23161,
"end": 26545
}
|
class ____ to load the resource
* @return {@code this} assertion object
* @throws AssertionError if the actual JSON value is equal to the given one
*/
public JsonContentAssert isNotStrictlyEqualToJson(String path, Class<?> resourceLoadClass) {
String expectedJson = this.loader.getJson(path, resourceLoadClass);
return assertNotPassed(compare(expectedJson, JSONCompareMode.STRICT));
}
/**
* Verifies that the actual value is not {@link JSONCompareMode#STRICT strictly} equal
* to the specified JSON bytes.
* @param expected the expected JSON bytes
* @return {@code this} assertion object
* @throws AssertionError if the actual JSON value is equal to the given one
*/
public JsonContentAssert isNotStrictlyEqualToJson(byte[] expected) {
String expectedJson = this.loader.getJson(expected);
return assertNotPassed(compare(expectedJson, JSONCompareMode.STRICT));
}
/**
* Verifies that the actual value is not {@link JSONCompareMode#STRICT strictly} equal
* to the specified JSON file.
* @param expected a file containing the expected JSON
* @return {@code this} assertion object
* @throws AssertionError if the actual JSON value is equal to the given one
*/
public JsonContentAssert isNotStrictlyEqualToJson(File expected) {
String expectedJson = this.loader.getJson(expected);
return assertNotPassed(compare(expectedJson, JSONCompareMode.STRICT));
}
/**
* Verifies that the actual value is not {@link JSONCompareMode#STRICT strictly} equal
* to the specified JSON input stream.
* @param expected an input stream containing the expected JSON
* @return {@code this} assertion object
* @throws AssertionError if the actual JSON value is equal to the given one
*/
public JsonContentAssert isNotStrictlyEqualToJson(InputStream expected) {
String expectedJson = this.loader.getJson(expected);
return assertNotPassed(compare(expectedJson, JSONCompareMode.STRICT));
}
/**
* Verifies that the actual value is not {@link JSONCompareMode#STRICT strictly} equal
* to the specified JSON resource.
* @param expected a resource containing the expected JSON
* @return {@code this} assertion object
* @throws AssertionError if the actual JSON value is equal to the given one
*/
public JsonContentAssert isNotStrictlyEqualToJson(Resource expected) {
String expectedJson = this.loader.getJson(expected);
return assertNotPassed(compare(expectedJson, JSONCompareMode.STRICT));
}
/**
* Verifies that the actual value is not equal to the specified JSON. The
* {@code expected} value can contain the JSON itself or, if it ends with
* {@code .json}, the name of a resource to be loaded using {@code resourceLoadClass}.
* @param expected the expected JSON or the name of a resource containing the expected
* JSON
* @param compareMode the compare mode used when checking
* @return {@code this} assertion object
* @throws AssertionError if the actual JSON value is equal to the given one
*/
public JsonContentAssert isNotEqualToJson(CharSequence expected, JSONCompareMode compareMode) {
String expectedJson = this.loader.getJson(expected);
return assertNotPassed(compare(expectedJson, compareMode));
}
/**
* Verifies that the actual value is not equal to the specified JSON resource.
* @param path the name of a resource containing the expected JSON
* @param resourceLoadClass the source
|
used
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/ManifestCommitterTestSupport.java
|
{
"start": 3069,
"end": 12694
}
|
class ____ {
private static final Logger LOG = LoggerFactory.getLogger(
ManifestCommitterTestSupport.class);
private static final DateTimeFormatter FORMATTER =
DateTimeFormatter.ofPattern("yyyyMMddHHmmss");
/**
* Build directory property.
* Value: {@value}.
*/
public static final String PROJECT_BUILD_DIRECTORY_PROPERTY
= "project.build.directory";
/**
* default number of task attempts for some tests.
* Value: {@value}.
*/
public static final int NUMBER_OF_TASK_ATTEMPTS = 2000;
/**
* Smaller number of task attempts for some tests against object
* stores where IO overhead is higher.
* Value: {@value}.
*/
public static final int NUMBER_OF_TASK_ATTEMPTS_SMALL = 200;
private ManifestCommitterTestSupport() {
}
/**
* Create a random Job ID using the fork ID as part of the number if
* set in the current process.
* @return fork ID string in a format parseable by Jobs
*/
public static String randomJobId() {
String testUniqueForkId = System.getProperty("test.unique.fork.id", "0001");
int l = testUniqueForkId.length();
String trailingDigits = testUniqueForkId.substring(l - 4, l);
int digitValue;
try {
digitValue = Integer.valueOf(trailingDigits);
} catch (NumberFormatException e) {
digitValue = 0;
}
return String.format("%s%04d_%04d",
FORMATTER.format(LocalDateTime.now()),
(long) (Math.random() * 1000),
digitValue);
}
public static File getProjectBuildDir() {
String propval = System.getProperty(PROJECT_BUILD_DIRECTORY_PROPERTY);
if (StringUtils.isEmpty(propval)) {
propval = "target";
}
return new File(propval).getAbsoluteFile();
}
/**
* Load a success file; fail if the file is empty/nonexistent.
* @param fs filesystem
* @param outputPath directory containing the success file.
* @return the loaded file.
* @throws IOException failure to find/load the file
* @throws AssertionError file is 0-bytes long,
*/
public static ManifestSuccessData loadSuccessFile(final FileSystem fs,
final Path outputPath) throws IOException {
Path success = new Path(outputPath, SUCCESS_MARKER);
return ManifestSuccessData.load(fs, success);
}
/**
* Load in the success data marker.
* @param fs filesystem
* @param outputDir ouptu path of job
* @param minimumFileCount minimum number of files to have been created
* @param jobId job ID, only verified if non-empty
* @return the success data
* @throws IOException IO failure
*/
public static ManifestSuccessData validateSuccessFile(
final FileSystem fs,
final Path outputDir,
final int minimumFileCount,
final String jobId) throws IOException {
Path successPath = new Path(outputDir, SUCCESS_MARKER);
ManifestSuccessData successData
= loadAndPrintSuccessData(fs, successPath);
assertThat(successData.getCommitter())
.describedAs("Committer field")
.isEqualTo(MANIFEST_COMMITTER_CLASSNAME);
assertThat(successData.getFilenames())
.describedAs("Files committed")
.hasSizeGreaterThanOrEqualTo(minimumFileCount);
if (isNotEmpty(jobId)) {
assertThat(successData.getJobId())
.describedAs("JobID")
.isEqualTo(jobId);
}
return successData;
}
/**
* Load in and print a success data manifest.
* @param fs filesystem
* @param successPath full path to success file.
* @return the success data
* @throws IOException IO failure
*/
public static ManifestSuccessData loadAndPrintSuccessData(
FileSystem fs,
Path successPath) throws IOException {
LOG.info("Manifest {}", successPath);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
final ManifestPrinter showManifest = new ManifestPrinter(fs.getConf(), ps);
ManifestSuccessData successData = showManifest.loadAndPrintManifest(fs, successPath);
LOG.info("{}", baos);
return successData;
}
/**
* Validate all generated files from the manifest.
* All files in the manifest must exist.
* If the exclusive flag is set, only those must exist
* (ignoring all temp files and everything in the _temporary
* dir)
* @param fs filesystem
* @param destDir dest dir to scan
* @param successData manifest
* @param exclusive expect exclusive and complete data.
* @return the files and their status
* @throws IOException IO failure.
*/
public static Map<Path, LocatedFileStatus> validateGeneratedFiles(
FileSystem fs,
Path destDir,
ManifestSuccessData successData,
boolean exclusive) throws IOException {
Map<Path, LocatedFileStatus> fileListing = new HashMap<>();
RemoteIterators.foreach(fs.listFiles(destDir, true),
e -> {
if (!e.getPath().getName().startsWith("_")) {
fileListing.put(e.getPath(), e);
}
});
final List<Path> actual = fileListing.keySet().stream()
.sorted(Comparator.comparing(Path::getName))
.collect(Collectors.toList());
// map has all files other than temp ones and the success marker
// what do we expect
final List<Path> expected = filesInManifest(successData);
expected.sort(Comparator.comparing(Path::getName));
// all of those must be found
Assertions.assertThat(actual)
.describedAs("Files in FS expected to contain all listed in manifest")
.containsAll(expected);
// and if exclusive, that too
if (exclusive) {
Assertions.assertThat(actual)
.describedAs("Files in FS expected to be exclusively of the job")
.hasSize(expected.size())
.containsExactlyInAnyOrderElementsOf(expected);
}
return fileListing;
}
/**
* Given a manifest, get the list of filenames
* and convert to paths.
* @param successData data
* @return the paths.
*/
public static List<Path> filesInManifest(ManifestSuccessData successData) {
return successData.getFilenames().stream()
.map(AbstractManifestData::unmarshallPath)
.collect(Collectors.toList());
}
/**
* List a directory/directory tree.
* @param fileSystem FS
* @param path path
* @param recursive do a recursive listing?
* @return the number of files found.
* @throws IOException failure.
*/
public static long lsR(FileSystem fileSystem, Path path, boolean recursive)
throws Exception {
if (path == null) {
// surfaces when someone calls getParent() on something at the top
// of the path
LOG.info("Empty path");
return 0;
}
return RemoteIterators.foreach(fileSystem.listFiles(path, recursive),
(status) -> LOG.info("{}", status));
}
/**
* Assert that a file or dir entry matches the given parameters.
* Matching on paths, not strings, helps validate marshalling.
* @param fileOrDir file or directory
* @param src source path
* @param dest dest path
* @param l length
*/
static void assertFileEntryMatch(
final FileEntry fileOrDir,
final Path src,
final Path dest,
final long l) {
String entry = fileOrDir.toString();
assertThat(fileOrDir.getSourcePath())
.describedAs("Source path of " + entry)
.isEqualTo(src);
assertThat(fileOrDir.getDestPath())
.describedAs("Dest path of " + entry)
.isEqualTo(dest);
assertThat(fileOrDir.getSize())
.describedAs("Size of " + entry)
.isEqualTo(l);
}
/**
* Assert that a dir entry matches the given parameters.
* Matching on paths, not strings, helps validate marshalling.
* @param fileOrDir file or directory
* @param dest dest path
* @param type type
*/
static void assertDirEntryMatch(
final DirEntry fileOrDir,
final Path dest,
final long type) {
String entry = fileOrDir.toString();
assertThat(fileOrDir.getDestPath())
.describedAs("Dest path of " + entry)
.isEqualTo(dest);
assertThat(fileOrDir.getType())
.describedAs("type of " + entry)
.isEqualTo(type);
}
/**
* Assert that none of the named statistics have any failure counts,
* which may be from being null or 0.
* @param iostats statistics
* @param names base name of the statistics (i.e. without ".failures" suffix)
*/
public static void assertNoFailureStatistics(IOStatistics iostats, String... names) {
final Map<String, Long> counters = iostats.counters();
for (String name : names) {
Assertions.assertThat(counters.get(name + ".failures"))
.describedAs("Failure count of %s", name)
.matches(f -> f == null || f == 0);
}
}
/**
* Save a manifest to an entry file; returning the loaded manifest data.
* Caller MUST clean up the temp file.
* @param entryFileIO IO class
* @param manifest manifest to process.
* @return info about the load
* @throws IOException write failure
*/
public static LoadedManifestData saveManifest(EntryFileIO entryFileIO, TaskManifest manifest)
throws IOException {
final File tempFile = File.createTempFile("entries", ".seq");
final SequenceFile.Writer writer = entryFileIO.createWriter(tempFile);
return new LoadedManifestData(
manifest.getDestDirectories(),
toPath(tempFile),
EntryFileIO.write(writer, manifest.getFilesToCommit(), true));
}
/**
* Closeable which can be used to safely close writers in
* a try-with-resources block..
*/
public static final
|
ManifestCommitterTestSupport
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/AnnotationTypeMappingsTests.java
|
{
"start": 23488,
"end": 23577
}
|
interface ____ {
}
@Retention(RetentionPolicy.RUNTIME)
@Repeating
@Repeating
@
|
Repeating
|
java
|
apache__camel
|
core/camel-console/src/main/java/org/apache/camel/impl/console/RouteGroupDevConsole.java
|
{
"start": 1893,
"end": 14171
}
|
class ____ extends AbstractDevConsole {
private static final Logger LOG = LoggerFactory.getLogger(RouteGroupDevConsole.class);
/**
* Filters the route groups matching by group id
*/
public static final String FILTER = "filter";
/**
* Limits the number of entries displayed
*/
public static final String LIMIT = "limit";
/**
* Action to perform such as start, or stop
*/
public static final String ACTION = "action";
public RouteGroupDevConsole() {
super("camel", "route-group", "Route Group", "Route Group information");
}
@Override
protected String doCallText(Map<String, Object> options) {
String action = (String) options.get(ACTION);
String filter = (String) options.get(FILTER);
if (action != null) {
doAction(getCamelContext(), action, filter);
return "";
}
final StringBuilder sb = new StringBuilder();
Function<ManagedRouteGroupMBean, Object> task = mrg -> {
if (!sb.isEmpty()) {
sb.append("\n");
}
sb.append(String.format(" Group: %s", mrg.getRouteGroup()));
sb.append(String.format("\n Size: %s", mrg.getGroupSize()));
sb.append(String.format("\n State: %s", mrg.getState()));
sb.append(String.format("\n Uptime: %s", mrg.getUptime()));
String coverage = calculateRouteCoverage(mrg, true);
if (coverage != null) {
sb.append(String.format("\n Coverage: %s", coverage));
}
String load1 = getLoad1(mrg);
String load5 = getLoad5(mrg);
String load15 = getLoad15(mrg);
if (!load1.isEmpty() || !load5.isEmpty() || !load15.isEmpty()) {
sb.append(String.format("\n Load Average: %s %s %s", load1, load5, load15));
}
String thp = getThroughput(mrg);
if (!thp.isEmpty()) {
sb.append(String.format("\n Messages/Sec: %s", thp));
}
sb.append(String.format("\n Total: %s", mrg.getExchangesTotal()));
sb.append(String.format("\n Failed: %s", mrg.getExchangesFailed()));
sb.append(String.format("\n Inflight: %s", mrg.getExchangesInflight()));
long idle = mrg.getIdleSince();
if (idle > 0) {
sb.append(String.format("\n Idle Since: %s", TimeUtils.printDuration(idle)));
} else {
sb.append(String.format("\n Idle Since: %s", ""));
}
sb.append(String.format("\n Mean Time: %s", TimeUtils.printDuration(mrg.getMeanProcessingTime(), true)));
sb.append(String.format("\n Max Time: %s", TimeUtils.printDuration(mrg.getMaxProcessingTime(), true)));
sb.append(String.format("\n Min Time: %s", TimeUtils.printDuration(mrg.getMinProcessingTime(), true)));
if (mrg.getExchangesTotal() > 0) {
sb.append(String.format("\n Last Time: %s", TimeUtils.printDuration(mrg.getLastProcessingTime(), true)));
sb.append(String.format("\n Delta Time: %s", TimeUtils.printDuration(mrg.getDeltaProcessingTime(), true)));
}
Date last = mrg.getLastExchangeCreatedTimestamp();
if (last != null) {
String ago = TimeUtils.printSince(last.getTime());
sb.append(String.format("\n Since Last Started: %s", ago));
}
last = mrg.getLastExchangeCompletedTimestamp();
if (last != null) {
String ago = TimeUtils.printSince(last.getTime());
sb.append(String.format("\n Since Last Completed: %s", ago));
}
last = mrg.getLastExchangeFailureTimestamp();
if (last != null) {
String ago = TimeUtils.printSince(last.getTime());
sb.append(String.format("\n Since Last Failed: %s", ago));
}
sb.append("\n");
return null;
};
doCall(options, task);
return sb.toString();
}
@Override
protected JsonObject doCallJson(Map<String, Object> options) {
String action = (String) options.get(ACTION);
String filter = (String) options.get(FILTER);
if (action != null) {
doAction(getCamelContext(), action, filter);
return new JsonObject();
}
final JsonObject root = new JsonObject();
final List<JsonObject> list = new ArrayList<>();
Function<ManagedRouteGroupMBean, Object> task = mrg -> {
JsonObject jo = new JsonObject();
list.add(jo);
jo.put("group", mrg.getRouteGroup());
jo.put("size", mrg.getGroupSize());
jo.put("state", mrg.getState());
jo.put("uptime", mrg.getUptime());
jo.put("routeIds", new JsonArray(Arrays.stream(mrg.getGroupIds()).toList()));
JsonObject stats = new JsonObject();
String coverage = calculateRouteCoverage(mrg, false);
if (coverage != null) {
stats.put("coverage", coverage);
}
String load1 = getLoad1(mrg);
String load5 = getLoad5(mrg);
String load15 = getLoad15(mrg);
if (!load1.isEmpty() || !load5.isEmpty() || !load15.isEmpty()) {
stats.put("load01", load1);
stats.put("load05", load5);
stats.put("load15", load15);
}
String thp = getThroughput(mrg);
if (!thp.isEmpty()) {
stats.put("exchangesThroughput", thp);
}
stats.put("idleSince", mrg.getIdleSince());
stats.put("exchangesTotal", mrg.getExchangesTotal());
stats.put("exchangesFailed", mrg.getExchangesFailed());
stats.put("exchangesInflight", mrg.getExchangesInflight());
stats.put("meanProcessingTime", mrg.getMeanProcessingTime());
stats.put("maxProcessingTime", mrg.getMaxProcessingTime());
stats.put("minProcessingTime", mrg.getMinProcessingTime());
if (mrg.getExchangesTotal() > 0) {
stats.put("lastProcessingTime", mrg.getLastProcessingTime());
stats.put("deltaProcessingTime", mrg.getDeltaProcessingTime());
}
Date last = mrg.getLastExchangeCreatedTimestamp();
if (last != null) {
stats.put("lastCreatedExchangeTimestamp", last.getTime());
}
last = mrg.getLastExchangeCompletedTimestamp();
if (last != null) {
stats.put("lastCompletedExchangeTimestamp", last.getTime());
}
last = mrg.getLastExchangeFailureTimestamp();
if (last != null) {
stats.put("lastFailedExchangeTimestamp", last.getTime());
}
jo.put("statistics", stats);
return null;
};
doCall(options, task);
root.put("routeGroups", list);
return root;
}
protected void doCall(Map<String, Object> options, Function<ManagedRouteGroupMBean, Object> task) {
String path = (String) options.get(Exchange.HTTP_PATH);
String subPath = path != null ? StringHelper.after(path, "/") : null;
String filter = (String) options.get(FILTER);
String limit = (String) options.get(LIMIT);
final int max = limit == null ? Integer.MAX_VALUE : Integer.parseInt(limit);
ManagedCamelContext mcc = getCamelContext().getCamelContextExtension().getContextPlugin(ManagedCamelContext.class);
if (mcc != null) {
List<Route> routes = getCamelContext().getRoutes();
routes.sort((o1, o2) -> o1.getRouteId().compareToIgnoreCase(o2.getRouteId()));
routes.stream()
.map(route -> mcc.getManagedRouteGroup(route.getGroup()))
.filter(Objects::nonNull)
.filter(r -> accept(r, filter))
.filter(r -> accept(r, subPath))
.distinct()
.sorted(RouteGroupDevConsole::sort)
.limit(max)
.forEach(task::apply);
}
}
private static boolean accept(ManagedRouteGroupMBean mrg, String filter) {
if (filter == null || filter.isBlank()) {
return true;
}
return PatternHelper.matchPattern(mrg.getRouteGroup(), filter);
}
private static int sort(ManagedRouteGroupMBean o1, ManagedRouteGroupMBean o2) {
return o1.getRouteGroup().compareToIgnoreCase(o2.getRouteGroup());
}
private String getLoad1(ManagedRouteGroupMBean mrg) {
String s = mrg.getLoad01();
// lets use dot as separator
s = s.replace(',', '.');
return s;
}
private String getLoad5(ManagedRouteGroupMBean mrg) {
String s = mrg.getLoad05();
// lets use dot as separator
s = s.replace(',', '.');
return s;
}
private String getLoad15(ManagedRouteGroupMBean mrg) {
String s = mrg.getLoad15();
// lets use dot as separator
s = s.replace(',', '.');
return s;
}
private String getThroughput(ManagedRouteGroupMBean mrg) {
String s = mrg.getThroughput();
// lets use dot as separator
s = s.replace(',', '.');
return s;
}
private String calculateRouteCoverage(ManagedRouteGroupMBean mrg, boolean percent) {
ManagedCamelContext mcc = getCamelContext().getCamelContextExtension().getContextPlugin(ManagedCamelContext.class);
Collection<String> ids = new ArrayList<>();
for (String id : mrg.getGroupIds()) {
ManagedRouteMBean mrb = mcc.getManagedRoute(id);
try {
ids.addAll(mrb.processorIds());
} catch (Exception e) {
return null;
}
}
int total = ids.size();
int covered = 0;
for (String id : ids) {
ManagedProcessorMBean mp = mcc.getManagedProcessor(id);
if (mp != null) {
if (mp.getExchangesTotal() > 0) {
covered++;
}
}
}
if (percent) {
double p;
if (total > 0) {
p = ((double) covered / total) * 100;
} else {
p = 0;
}
String f = String.format("%.0f", p);
return covered + "/" + total + " (" + f + "%)";
} else {
return covered + "/" + total;
}
}
protected void doAction(CamelContext camelContext, String command, String filter) {
if (filter == null) {
filter = "*";
}
String[] patterns = filter.split(",");
// find matching IDs
List<String> ids = camelContext.getRoutes()
.stream()
.map(Route::getGroup)
.filter(group -> {
for (String p : patterns) {
if (PatternHelper.matchPattern(group, p)) {
return true;
}
}
return false;
})
.distinct()
.toList();
for (String id : ids) {
try {
if ("start".equals(command)) {
if ("*".equals(id)) {
camelContext.getRouteController().startAllRoutes();
} else {
camelContext.getRouteController().startRouteGroup(id);
}
} else if ("stop".equals(command)) {
if ("*".equals(id)) {
camelContext.getRouteController().stopAllRoutes();
} else {
camelContext.getRouteController().stopRouteGroup(id);
}
}
} catch (Exception e) {
LOG.warn("Error {} route: {} due to: {}. This exception is ignored.", command, id, e.getMessage(), e);
}
}
}
}
|
RouteGroupDevConsole
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.