language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_1600/Issue1635.java
|
{
"start": 365,
"end": 1863
}
|
class ____ {
public String name;
public Integer BarCount;
public Boolean flag;
public List list;
public Foo(String name, Integer barCount) {
this.name = name;
BarCount = barCount;
}
}
public void test_issue() throws Exception {
SerializeConfig config = new SerializeConfig();
config.setAsmEnable(false);
Foo foo = new Foo(null, null);
String json = JSON.toJSONString(foo
, config, new PascalNameFilter()
, SerializerFeature.WriteNullBooleanAsFalse
, SerializerFeature.WriteNullNumberAsZero
, SerializerFeature.WriteNullStringAsEmpty
, SerializerFeature.WriteNullListAsEmpty
);
assertEquals("{\"BarCount\":0,\"Flag\":false,\"List\":[],\"Name\":\"\"}", json);
}
public void test_issue_1() throws Exception {
SerializeConfig config = new SerializeConfig();
config.setAsmEnable(false);
Foo foo = new Foo(null, null);
String json = JSON.toJSONString(foo
, config, new PascalNameFilter()
, SerializerFeature.WriteNullBooleanAsFalse
, SerializerFeature.WriteNullNumberAsZero
, SerializerFeature.WriteNullStringAsEmpty
, SerializerFeature.WriteNullListAsEmpty
, SerializerFeature.BeanToArray
);
assertEquals("[0,false,[],\"\"]", json);
}
}
|
Foo
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
|
{
"start": 2073,
"end": 7401
}
|
class ____ extends ESIntegTestCase {
private static final String DATE_FORMAT = "yyyy-MM-dd:hh-mm-ss";
private static final DateFormatter FORMATTER = DateFormatter.forPattern(DATE_FORMAT);
private ZonedDateTime date(String date) {
return DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date));
}
@Before
public void beforeEachTest() throws IOException {
prepareCreate("idx2").setMapping("date", "type=date").get();
}
@After
public void afterEachTest() throws IOException {
internalCluster().wipeIndices("idx2");
}
private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, int idxIdStart) throws IOException,
InterruptedException {
IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours];
for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) {
reqs[i - idxIdStart] = prepareIndex("idx2").setId("" + i)
.setSource(jsonBuilder().startObject().timestampField("date", date).endObject());
date = date.plusHours(stepSizeHours);
}
indexRandom(true, reqs);
}
public void testSingleValueWithPositiveOffset() throws Exception {
prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, 1, 0);
assertResponse(
prepareSearch("idx2").setQuery(matchAllQuery())
.addAggregation(
dateHistogram("date_histo").field("date").offset("2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY)
),
response -> {
assertThat(response.getHits().getTotalHits().value(), equalTo(5L));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 10, 2, 0, 0, 0, ZoneOffset.UTC), 2L);
checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 11, 2, 0, 0, 0, ZoneOffset.UTC), 3L);
}
);
}
public void testSingleValueWithNegativeOffset() throws Exception {
prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, -1, 0);
assertResponse(
prepareSearch("idx2").setQuery(matchAllQuery())
.addAggregation(
dateHistogram("date_histo").field("date").offset("-2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY)
),
response -> {
assertThat(response.getHits().getTotalHits().value(), equalTo(5L));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 9, 22, 0, 0, 0, ZoneOffset.UTC), 2L);
checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 10, 22, 0, 0, 0, ZoneOffset.UTC), 3L);
}
);
}
/**
* Set offset so day buckets start at 6am. Index first 12 hours for two days, with one day gap.
*/
public void testSingleValueWithOffsetMinDocCount() throws Exception {
prepareIndex(date("2014-03-11T00:00:00+00:00"), 12, 1, 0);
prepareIndex(date("2014-03-14T00:00:00+00:00"), 12, 1, 13);
assertResponse(
prepareSearch("idx2").setQuery(matchAllQuery())
.addAggregation(
dateHistogram("date_histo").field("date")
.offset("6h")
.minDocCount(0)
.format(DATE_FORMAT)
.fixedInterval(DateHistogramInterval.DAY)
),
response -> {
assertThat(response.getHits().getTotalHits().value(), equalTo(24L));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(5));
checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 10, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 11, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
checkBucketFor(buckets.get(2), ZonedDateTime.of(2014, 3, 12, 6, 0, 0, 0, ZoneOffset.UTC), 0L);
checkBucketFor(buckets.get(3), ZonedDateTime.of(2014, 3, 13, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
checkBucketFor(buckets.get(4), ZonedDateTime.of(2014, 3, 14, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
}
);
}
/**
* @param bucket the bucket to check assertions for
* @param key the expected key
* @param expectedSize the expected size of the bucket
*/
private static void checkBucketFor(Histogram.Bucket bucket, ZonedDateTime key, long expectedSize) {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(FORMATTER.format(key)));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(expectedSize));
}
}
|
DateHistogramOffsetIT
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/GenericDenseEmbeddingBitResultsTests.java
|
{
"start": 701,
"end": 5542
}
|
class ____ extends AbstractWireSerializingTestCase<GenericDenseEmbeddingBitResults> {
public static GenericDenseEmbeddingBitResults createRandomResults() {
int embeddings = randomIntBetween(1, 10);
List<GenericDenseEmbeddingByteResults.Embedding> embeddingResults = new ArrayList<>(embeddings);
for (int i = 0; i < embeddings; i++) {
embeddingResults.add(createRandomEmbedding());
}
return new GenericDenseEmbeddingBitResults(embeddingResults);
}
private static GenericDenseEmbeddingByteResults.Embedding createRandomEmbedding() {
int columns = randomIntBetween(1, 10);
byte[] bytes = new byte[columns];
for (int i = 0; i < columns; i++) {
bytes[i] = randomByte();
}
return new GenericDenseEmbeddingByteResults.Embedding(bytes);
}
public void testToXContent_CreatesTheRightFormatForASingleEmbedding() throws IOException {
var entity = new GenericDenseEmbeddingBitResults(List.of(new GenericDenseEmbeddingByteResults.Embedding(new byte[] { (byte) 23 })));
String xContentResult = Strings.toString(entity, true, true);
assertThat(xContentResult, is("""
{
"embeddings_bits" : [
{
"embedding" : [
23
]
}
]
}"""));
}
public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws IOException {
var entity = new GenericDenseEmbeddingBitResults(
List.of(
new GenericDenseEmbeddingByteResults.Embedding(new byte[] { (byte) 23 }),
new GenericDenseEmbeddingByteResults.Embedding(new byte[] { (byte) 24 })
)
);
String xContentResult = Strings.toString(entity, true, true);
assertThat(xContentResult, is("""
{
"embeddings_bits" : [
{
"embedding" : [
23
]
},
{
"embedding" : [
24
]
}
]
}"""));
}
public void testTransformToCoordinationFormat() {
var results = new GenericDenseEmbeddingBitResults(
List.of(
new GenericDenseEmbeddingByteResults.Embedding(new byte[] { (byte) 23, (byte) 24 }),
new GenericDenseEmbeddingByteResults.Embedding(new byte[] { (byte) 25, (byte) 26 })
)
).transformToCoordinationFormat();
assertThat(
results,
is(
List.of(
new MlDenseEmbeddingResults(GenericDenseEmbeddingBitResults.EMBEDDINGS_BITS, new double[] { 23F, 24F }, false),
new MlDenseEmbeddingResults(GenericDenseEmbeddingBitResults.EMBEDDINGS_BITS, new double[] { 25F, 26F }, false)
)
)
);
}
public void testGetFirstEmbeddingSize() {
var firstEmbeddingSize = new GenericDenseEmbeddingBitResults(
List.of(
new GenericDenseEmbeddingByteResults.Embedding(new byte[] { (byte) 23, (byte) 24 }),
new GenericDenseEmbeddingByteResults.Embedding(new byte[] { (byte) 25, (byte) 26 })
)
).getFirstEmbeddingSize();
assertThat(firstEmbeddingSize, is(16));
}
@Override
protected Writeable.Reader<GenericDenseEmbeddingBitResults> instanceReader() {
return GenericDenseEmbeddingBitResults::new;
}
@Override
protected GenericDenseEmbeddingBitResults createTestInstance() {
return createRandomResults();
}
@Override
protected GenericDenseEmbeddingBitResults mutateInstance(GenericDenseEmbeddingBitResults instance) throws IOException {
// if true we reduce the embeddings list by a random amount, if false we add an embedding to the list
if (randomBoolean()) {
// -1 to remove at least one item from the list
int end = randomInt(instance.embeddings().size() - 1);
return new GenericDenseEmbeddingBitResults(instance.embeddings().subList(0, end));
} else {
List<GenericDenseEmbeddingByteResults.Embedding> embeddings = new ArrayList<>(instance.embeddings());
embeddings.add(createRandomEmbedding());
return new GenericDenseEmbeddingBitResults(embeddings);
}
}
public static Map<String, Object> buildExpectationByte(List<List<Byte>> embeddings) {
return Map.of(
GenericDenseEmbeddingBitResults.EMBEDDINGS_BITS,
embeddings.stream().map(embedding -> Map.of(EmbeddingResults.EMBEDDING, embedding)).toList()
);
}
}
|
GenericDenseEmbeddingBitResultsTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/ConverterExceptionsTest.java
|
{
"start": 527,
"end": 2447
}
|
class ____ {
@Test
public void testPersisting(EntityManagerFactoryScope scope) {
final EntityManager entityManager = scope.getEntityManagerFactory().createEntityManager();
try {
entityManager.getTransaction().begin();
Person b = new Person(
1,
"drea",
new Address( "S. Egidtio", "Gradoli" )
);
entityManager.persist( b );
entityManager.flush();
entityManager.getTransaction().commit();
fail( "Expected PersistenceException" );
}
catch (PersistenceException pe) {
if ( !entityManager.getTransaction().getRollbackOnly() ) {
fail( "Transaction was not marked for rollback" );
}
}
catch (Exception ex) {
fail( "Expected PersistenceException but thrown:", ex );
}
finally {
try {
if ( entityManager.getTransaction().isActive() ) {
entityManager.getTransaction().rollback();
}
}
finally {
entityManager.close();
}
}
}
@Test
public void testLoading(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
Farm person = new Farm(
1, "drea",
new Address( "S. Egidtio", "Gradoli" )
);
entityManager.persist( person );
entityManager.flush();
}
);
final EntityManager entityManager = scope.getEntityManagerFactory().createEntityManager();
try {
entityManager.getTransaction().begin();
try {
entityManager.find( Farm.class, 1 );
fail( "PersistenceException expected" );
}
catch (PersistenceException pe) {
if ( !entityManager.getTransaction().getRollbackOnly() ) {
fail( "Transaction was not marked for rollback" );
}
}
catch (Exception ex) {
fail( "Expected PersistenceException but thrown:", ex );
}
}
finally {
try {
if ( entityManager.getTransaction().isActive() ) {
entityManager.getTransaction().rollback();
}
}
finally {
entityManager.close();
}
}
}
}
|
ConverterExceptionsTest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/language/RefTest.java
|
{
"start": 1173,
"end": 2704
}
|
class ____ extends LanguageTestSupport {
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("myExp", new MyExpression());
return jndi;
}
@Test
public void testRefExpressions() {
assertExpression("myExp", "Hello World");
}
@Test
public void testRefExpressionsNotFound() {
IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
() -> assertExpression("foo", "Hello World"),
"Should have thrown an exception");
assertEquals("Cannot find expression or predicate in registry with ref: foo", e.getMessage());
}
@Test
public void testRefDynamicExpressions() {
exchange.getMessage().setHeader("foo", "myExp");
assertExpression("${header.foo}", "Hello World");
}
@Test
public void testRefDynamicExpressionsNotFound() {
exchange.getMessage().setHeader("foo", "myExp2");
IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
() -> assertExpression("${header.foo}", "Hello World"),
"Should have thrown an exception");
assertEquals("Cannot find expression or predicate in registry with ref: myExp2", e.getMessage());
}
@Test
public void testPredicates() {
assertPredicate("myExp");
}
@Override
protected String getLanguageName() {
return "ref";
}
private static
|
RefTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/ast/tree/from/TableReferenceJoin.java
|
{
"start": 569,
"end": 1787
}
|
class ____ implements TableJoin, PredicateContainer {
private final boolean innerJoin;
private final NamedTableReference joinedTableBinding;
private Predicate predicate;
public TableReferenceJoin(boolean innerJoin, NamedTableReference joinedTableBinding, Predicate predicate) {
this.innerJoin = innerJoin;
this.joinedTableBinding = joinedTableBinding;
this.predicate = predicate;
}
@Override
public SqlAstJoinType getJoinType() {
return innerJoin ? SqlAstJoinType.INNER : SqlAstJoinType.LEFT;
}
public NamedTableReference getJoinedTableReference() {
return joinedTableBinding;
}
@Override
public SqlAstNode getJoinedNode() {
return joinedTableBinding;
}
@Override
public Predicate getPredicate() {
return predicate;
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
sqlTreeWalker.visitTableReferenceJoin( this );
}
@Override
public String toString() {
return getJoinType().getText() + "join " + getJoinedTableReference().toString();
}
@Override
public boolean isInitialized() {
return true;
}
@Override
public void applyPredicate(Predicate newPredicate) {
predicate = SqlAstTreeHelper.combinePredicates( predicate, newPredicate);
}
}
|
TableReferenceJoin
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapOutputCollector.java
|
{
"start": 1668,
"end": 2174
}
|
class ____ {
private final MapTask mapTask;
private final JobConf jobConf;
private final TaskReporter reporter;
public Context(MapTask mapTask, JobConf jobConf, TaskReporter reporter) {
this.mapTask = mapTask;
this.jobConf = jobConf;
this.reporter = reporter;
}
public MapTask getMapTask() {
return mapTask;
}
public JobConf getJobConf() {
return jobConf;
}
public TaskReporter getReporter() {
return reporter;
}
}
}
|
Context
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableAmb.java
|
{
"start": 961,
"end": 2843
}
|
class ____<T> extends Observable<T> {
final ObservableSource<? extends T>[] sources;
final Iterable<? extends ObservableSource<? extends T>> sourcesIterable;
public ObservableAmb(ObservableSource<? extends T>[] sources, Iterable<? extends ObservableSource<? extends T>> sourcesIterable) {
this.sources = sources;
this.sourcesIterable = sourcesIterable;
}
@Override
@SuppressWarnings("unchecked")
public void subscribeActual(Observer<? super T> observer) {
ObservableSource<? extends T>[] sources = this.sources;
int count = 0;
if (sources == null) {
sources = new ObservableSource[8];
try {
for (ObservableSource<? extends T> p : sourcesIterable) {
if (p == null) {
EmptyDisposable.error(new NullPointerException("One of the sources is null"), observer);
return;
}
if (count == sources.length) {
ObservableSource<? extends T>[] b = new ObservableSource[count + (count >> 2)];
System.arraycopy(sources, 0, b, 0, count);
sources = b;
}
sources[count++] = p;
}
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
EmptyDisposable.error(e, observer);
return;
}
} else {
count = sources.length;
}
if (count == 0) {
EmptyDisposable.complete(observer);
return;
} else
if (count == 1) {
sources[0].subscribe(observer);
return;
}
AmbCoordinator<T> ac = new AmbCoordinator<>(observer, count);
ac.subscribe(sources);
}
static final
|
ObservableAmb
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/mapping/VendorDatabaseIdProviderTest.java
|
{
"start": 1179,
"end": 3767
}
|
class ____ {
private static final String PRODUCT_NAME = "Chewbacca DB";
@Test
void shouldNpeBeThrownIfDataSourceIsNull() {
VendorDatabaseIdProvider provider = new VendorDatabaseIdProvider();
try {
provider.getDatabaseId(null);
fail("Should NullPointerException be thrown.");
} catch (NullPointerException e) {
// pass
}
}
@Test
void shouldProductNameBeReturnedIfPropertiesIsNull() throws Exception {
VendorDatabaseIdProvider provider = new VendorDatabaseIdProvider();
assertEquals(PRODUCT_NAME, provider.getDatabaseId(mockDataSource()));
}
@Test
void shouldProductNameBeReturnedIfPropertiesIsEmpty() throws Exception {
VendorDatabaseIdProvider provider = new VendorDatabaseIdProvider();
provider.setProperties(new Properties());
assertEquals(PRODUCT_NAME, provider.getDatabaseId(mockDataSource()));
}
@Test
void shouldProductNameBeTranslated() throws Exception {
VendorDatabaseIdProvider provider = new VendorDatabaseIdProvider();
Properties properties = new Properties();
String partialProductName = "Chewbacca";
String id = "chewie";
properties.put(partialProductName, id);
provider.setProperties(properties);
assertEquals(id, provider.getDatabaseId(mockDataSource()));
}
@Test
void shouldNullBeReturnedIfNoMatch() throws Exception {
VendorDatabaseIdProvider provider = new VendorDatabaseIdProvider();
Properties properties = new Properties();
properties.put("Ewok DB", "ewok");
provider.setProperties(properties);
assertNull(provider.getDatabaseId(mockDataSource()));
}
@Test
void shouldNullBeReturnedOnDbError() throws Exception {
DataSource dataSource = mock(DataSource.class);
when(dataSource.getConnection()).thenThrow(SQLException.class);
VendorDatabaseIdProvider provider = new VendorDatabaseIdProvider();
Properties properties = new Properties();
properties.put("Ewok DB", "ewok");
try {
provider.getDatabaseId(dataSource);
fail("Should BuilderException be thrown.");
} catch (BuilderException e) {
// pass
}
}
private DataSource mockDataSource() throws SQLException {
DatabaseMetaData metaData = mock(DatabaseMetaData.class);
when(metaData.getDatabaseProductName()).thenReturn(PRODUCT_NAME);
Connection connection = mock(Connection.class);
when(connection.getMetaData()).thenReturn(metaData);
DataSource dataSource = mock(DataSource.class);
when(dataSource.getConnection()).thenReturn(connection);
return dataSource;
}
}
|
VendorDatabaseIdProviderTest
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java
|
{
"start": 87262,
"end": 87610
}
|
class ____ extends ScalarFunction {
@FunctionHint(input = @DataTypeHint("INT"), output = @DataTypeHint("BIGINT"))
public Number eval(Integer i) {
return null;
}
}
@FunctionHint(input = @DataTypeHint("INT"), argumentNames = "a", output = @DataTypeHint("INT"))
private static
|
InvalidFullOutputFunctionHint
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/core/ClusterOperator.java
|
{
"start": 839,
"end": 1885
}
|
interface ____ {
/**
* Update cluster metadata.
*
* @param namespaceId namespace id
* @param serviceName grouped service name of cluster
* @param clusterName cluster name
* @param clusterMetadata cluster metadata
* @throws NacosException exception during update metadata
*/
void updateClusterMetadata(String namespaceId, String serviceName, String clusterName,
ClusterMetadata clusterMetadata) throws NacosException;
/**
* Update cluster metadata.
*
* @param namespaceId namespace id
* @param groupName group name of cluster
* @param serviceName service name of cluster
* @param clusterName cluster name
* @param clusterMetadata cluster metadata
* @throws NacosException exception during update metadata
*/
void updateClusterMetadata(String namespaceId, String groupName, String serviceName, String clusterName,
ClusterMetadata clusterMetadata) throws NacosException;
}
|
ClusterOperator
|
java
|
apache__rocketmq
|
proxy/src/main/java/org/apache/rocketmq/proxy/processor/ReceiptHandleProcessor.java
|
{
"start": 1479,
"end": 3617
}
|
class ____ extends AbstractProcessor {
protected final static Logger log = LoggerFactory.getLogger(LoggerName.PROXY_LOGGER_NAME);
protected DefaultReceiptHandleManager receiptHandleManager;
public ReceiptHandleProcessor(MessagingProcessor messagingProcessor, ServiceManager serviceManager) {
super(messagingProcessor, serviceManager);
StateEventListener<RenewEvent> eventListener = event -> {
ProxyContext context = createContext(event.getEventType().name())
.setChannel(event.getKey().getChannel());
MessageReceiptHandle messageReceiptHandle = event.getMessageReceiptHandle();
ReceiptHandle handle = ReceiptHandle.decode(messageReceiptHandle.getReceiptHandleStr());
messagingProcessor.changeInvisibleTime(context, handle, messageReceiptHandle.getMessageId(),
messageReceiptHandle.getGroup(), messageReceiptHandle.getTopic(), event.getRenewTime())
.whenComplete((v, t) -> {
if (t != null) {
event.getFuture().completeExceptionally(t);
return;
}
event.getFuture().complete(v);
});
};
this.receiptHandleManager = new DefaultReceiptHandleManager(serviceManager.getMetadataService(), serviceManager.getConsumerManager(), eventListener);
this.appendStartAndShutdown(receiptHandleManager);
}
protected ProxyContext createContext(String actionName) {
return ProxyContext.createForInner(this.getClass().getSimpleName() + actionName);
}
public void addReceiptHandle(ProxyContext ctx, Channel channel, String group, String msgID, MessageReceiptHandle messageReceiptHandle) {
receiptHandleManager.addReceiptHandle(ctx, channel, group, msgID, messageReceiptHandle);
}
public MessageReceiptHandle removeReceiptHandle(ProxyContext ctx, Channel channel, String group, String msgID, String receiptHandle) {
return receiptHandleManager.removeReceiptHandle(ctx, channel, group, msgID, receiptHandle);
}
}
|
ReceiptHandleProcessor
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/jackson/JacksonIssue429Test.java
|
{
"start": 1570,
"end": 2125
}
|
class ____ extends StdDeserializer<StackTraceElement> {
private static final long serialVersionUID = 1L;
public Jackson429StackTraceElementDeserializer() {
super(StackTraceElement.class);
}
@Override
public StackTraceElement deserialize(final JsonParser jp, final DeserializationContext ctxt)
throws IOException {
jp.skipChildren();
return new StackTraceElement("a", "b", "b", StackTraceBean.NUM);
}
}
static
|
Jackson429StackTraceElementDeserializer
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/ast/expr/SQLCaseExpr.java
|
{
"start": 3168,
"end": 8542
}
|
class ____ extends SQLObjectImpl implements SQLReplaceable, Serializable {
private static final long serialVersionUID = 1L;
private SQLExpr conditionExpr;
private SQLExpr valueExpr;
public Item() {
}
public Item(SQLExpr conditionExpr, SQLExpr valueExpr) {
setConditionExpr(conditionExpr);
setValueExpr(valueExpr);
}
public SQLExpr getConditionExpr() {
return this.conditionExpr;
}
public void setConditionExpr(SQLExpr conditionExpr) {
if (conditionExpr != null) {
conditionExpr.setParent(this);
}
this.conditionExpr = conditionExpr;
}
public SQLExpr getValueExpr() {
return this.valueExpr;
}
public void setValueExpr(SQLExpr valueExpr) {
if (valueExpr != null) {
valueExpr.setParent(this);
}
this.valueExpr = valueExpr;
}
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
if (this.conditionExpr != null) {
this.conditionExpr.accept(visitor);
}
if (valueExpr != null) {
valueExpr.accept(visitor);
}
}
visitor.endVisit(this);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((conditionExpr == null) ? 0 : conditionExpr.hashCode());
result = prime * result + ((valueExpr == null) ? 0 : valueExpr.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Item other = (Item) obj;
if (conditionExpr == null) {
if (other.conditionExpr != null) {
return false;
}
} else if (!conditionExpr.equals(other.conditionExpr)) {
return false;
}
if (valueExpr == null) {
if (other.valueExpr != null) {
return false;
}
} else if (!valueExpr.equals(other.valueExpr)) {
return false;
}
return true;
}
public Item clone() {
Item x = new Item();
if (conditionExpr != null) {
x.setConditionExpr(conditionExpr.clone());
}
if (valueExpr != null) {
x.setValueExpr(valueExpr.clone());
}
return x;
}
@Override
public boolean replace(SQLExpr expr, SQLExpr target) {
if (valueExpr == expr) {
setValueExpr(target);
return true;
}
if (conditionExpr == expr) {
setConditionExpr(target);
return true;
}
return false;
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((elseExpr == null) ? 0 : elseExpr.hashCode());
result = prime * result + items.hashCode();
result = prime * result + ((valueExpr == null) ? 0 : valueExpr.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
SQLCaseExpr other = (SQLCaseExpr) obj;
if (elseExpr == null) {
if (other.elseExpr != null) {
return false;
}
} else if (!elseExpr.equals(other.elseExpr)) {
return false;
}
if (!items.equals(other.items)) {
return false;
}
if (valueExpr == null) {
if (other.valueExpr != null) {
return false;
}
} else if (!valueExpr.equals(other.valueExpr)) {
return false;
}
return true;
}
public SQLCaseExpr clone() {
SQLCaseExpr x = new SQLCaseExpr();
for (Item item : items) {
x.addItem(item.clone());
}
if (valueExpr != null) {
x.setValueExpr(valueExpr.clone());
}
if (elseExpr != null) {
x.setElseExpr(elseExpr.clone());
}
return x;
}
public SQLDataType computeDataType() {
for (Item item : items) {
SQLExpr expr = item.getValueExpr();
if (expr != null) {
SQLDataType dataType = expr.computeDataType();
if (dataType != null) {
return dataType;
}
}
}
if (elseExpr != null) {
return elseExpr.computeDataType();
}
return null;
}
public String toString() {
return SQLUtils.toSQLString(this, (DbType) null);
}
}
|
Item
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/LongRange.java
|
{
"start": 1060,
"end": 4424
}
|
class ____ extends NumberRange<Long> {
private static final long serialVersionUID = 1L;
/**
* Creates a closed range with the specified minimum and maximum values (both inclusive).
*
* <p>
* The range uses the natural ordering of the elements to determine where values lie in the range.
* </p>
*
* <p>
* The arguments may be passed in the order (min,max) or (max,min). The getMinimum and getMaximum methods will return the correct values.
* </p>
*
* @param fromInclusive the first value that defines the edge of the range, inclusive.
* @param toInclusive the second value that defines the edge of the range, inclusive.
* @return the range object, not null.
*/
public static LongRange of(final long fromInclusive, final long toInclusive) {
return of(Long.valueOf(fromInclusive), Long.valueOf(toInclusive));
}
/**
* Creates a closed range with the specified minimum and maximum values (both inclusive).
*
* <p>
* The range uses the natural ordering of the elements to determine where values lie in the range.
* </p>
*
* <p>
* The arguments may be passed in the order (min,max) or (max,min). The getMinimum and getMaximum methods will return the correct values.
* </p>
*
* @param fromInclusive the first value that defines the edge of the range, inclusive.
* @param toInclusive the second value that defines the edge of the range, inclusive.
* @return the range object, not null.
* @throws IllegalArgumentException if either element is null.
*/
public static LongRange of(final Long fromInclusive, final Long toInclusive) {
return new LongRange(fromInclusive, toInclusive);
}
/**
* Creates a new instance.
*
* @param number1 the first element, not null
* @param number2 the second element, not null
* @throws NullPointerException when element1 is null.
* @throws NullPointerException when element2 is null.
*/
private LongRange(final Long number1, final Long number2) {
super(number1, number2, null);
}
/**
* Fits the given value into this range by returning the given value or, if out of bounds, the range minimum if
* below, or the range maximum if above.
*
* <pre>{@code
* LongRange range = LongRange.of(16, 64);
* range.fit(-9) --> 16
* range.fit(0) --> 16
* range.fit(15) --> 16
* range.fit(16) --> 16
* range.fit(17) --> 17
* ...
* range.fit(63) --> 63
* range.fit(64) --> 64
* range.fit(99) --> 64
* }</pre>
*
* @param element the element to test.
* @return the minimum, the element, or the maximum depending on the element's location relative to the range.
* @since 3.19.0
*/
public long fit(final long element) {
return super.fit(element).longValue();
}
/**
* Returns a sequential ordered {@code LongStream} from {@link #getMinimum()} (inclusive) to {@link #getMaximum()} (inclusive) by an incremental step of
* {@code 1}.
*
* @return a sequential {@code LongStream} for the range of {@code long} elements
* @since 3.18.0
*/
public LongStream toLongStream() {
return LongStream.rangeClosed(getMinimum(), getMaximum());
}
}
|
LongRange
|
java
|
dropwizard__dropwizard
|
dropwizard-validation/src/test/java/io/dropwizard/validation/SelfValidationTest.java
|
{
"start": 2917,
"end": 3192
}
|
class ____ extends FailingExample {
@SuppressWarnings("unused")
@SelfValidation
public void subValidateFail(ViolationCollector col) {
col.addViolation(FAILED + "subclass");
}
}
@SelfValidating
public static
|
SubclassExample
|
java
|
apache__camel
|
components/camel-json-validator/src/main/java/org/apache/camel/component/jsonvalidator/DefaultJsonUriSchemaLoader.java
|
{
"start": 1364,
"end": 2671
}
|
class ____ implements JsonUriSchemaLoader {
protected ObjectMapper mapper = new ObjectMapper();
protected SchemaValidatorsConfig config = new SchemaValidatorsConfig();
protected SpecVersion.VersionFlag defaultVersion = SpecVersion.VersionFlag.V201909;
@Override
public JsonSchema createSchema(CamelContext camelContext, String schemaUri) throws Exception {
// determine schema version
InputStream stream = ResourceHelper.resolveMandatoryResourceAsInputStream(camelContext, schemaUri);
JsonNode node = mapper.readTree(stream);
SpecVersion.VersionFlag version;
try {
version = SpecVersionDetector.detect(node);
} catch (JsonSchemaException e) {
// default if no schema version was specified
version = defaultVersion;
}
JsonSchemaFactory factory = JsonSchemaFactory.getInstance(version);
// the URI based method will correctly resolve relative schema references to other schema in the same directory
URI uri;
if (ResourceHelper.hasScheme(schemaUri)) {
uri = URI.create(schemaUri);
} else {
uri = URI.create("classpath:" + schemaUri);
}
return factory.getSchema(uri, node, config);
}
}
|
DefaultJsonUriSchemaLoader
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
|
{
"start": 40386,
"end": 41332
}
|
class ____ implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
private List<AclEntry> aclEntries;
/**
* Creates a remove acl entry executor.
*
* @param path path to set the acl.
* @param aclSpec acl parts to remove.
*/
public FSRemoveAclEntries(String path, String aclSpec) {
this.path = new Path(path);
this.aclEntries = AclEntry.parseAclSpec(aclSpec, false);
}
/**
* Executes the filesystem operation.
*
* @param fs filesystem instance to use.
*
* @return void.
*
* @throws IOException thrown if an IO error occurred.
*/
@Override
public Void execute(FileSystem fs) throws IOException {
fs.removeAclEntries(path, aclEntries);
return null;
}
}
/**
* Executor that removes the default acl from a directory in a FileSystem
*/
@InterfaceAudience.Private
public static
|
FSRemoveAclEntries
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/invoker/basic/ExcessArgumentsInvokerTest.java
|
{
"start": 2044,
"end": 2266
}
|
class ____ {
public String hello(String param) {
return "foobar_" + param;
}
public static String helloStatic(String param) {
return "quux_" + param;
}
}
}
|
MyService
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/config/conditional/DisableBuiltInGlobalFiltersTests.java
|
{
"start": 1558,
"end": 2065
}
|
class ____ {
@Autowired
private List<GlobalFilter> globalFilters;
@Test
public void shouldInjectBuiltInFilters() {
assertThat(globalFilters).hasSizeGreaterThanOrEqualTo(12);
}
}
@Nested
@SpringBootTest(classes = Config.class,
properties = { "spring.cloud.gateway.server.webflux.global-filter.remove-cached-body.enabled=false",
"spring.cloud.gateway.server.webflux.global-filter.route-to-request-url.enabled=false" })
@ActiveProfiles("disable-components")
public
|
GlobalFilterDefault
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/internal/threadchecker/BlockedThreadEvent.java
|
{
"start": 698,
"end": 1920
}
|
class ____ {
private final Thread thread;
private final long maxExecTime;
private final long duration;
private final long warningExceptionTime;
/**
* Create an instance of BlockedThreadEvent
*
* @param thread The thread being checked
* @param duration The duration the thread has been blocked, in nanoseconds
* @param maxExecTime The max execution time the thread is allowed, in nanoseconds
* @param warningExceptionTime The max time a thread can be blocked before stack traces get logged, in nanoseconds
*/
public BlockedThreadEvent(Thread thread, long duration, long maxExecTime, long warningExceptionTime) {
this.thread = thread;
this.duration = duration;
this.maxExecTime = maxExecTime;
this.warningExceptionTime = warningExceptionTime;
}
public Thread thread() {
return thread;
}
public long maxExecTime() {
return maxExecTime;
}
public long duration() {
return duration;
}
public long warningExceptionTime() {
return warningExceptionTime;
}
@Override
public String toString() {
return "BlockedThreadEvent(thread=" + thread.getName() + ",duration=" + duration + ",maxExecTime=" + maxExecTime + ")";
}
}
|
BlockedThreadEvent
|
java
|
micronaut-projects__micronaut-core
|
http/src/main/java/io/micronaut/http/sse/DefaultEvent.java
|
{
"start": 977,
"end": 2068
}
|
class ____<T> implements Event<T> {
private final T data;
private String id;
private String name;
private String comment;
private Duration retry;
/**
* @param data The event
*/
DefaultEvent(T data) {
this.data = data;
}
@Override
public T getData() {
return data;
}
@Override
public String getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public String getComment() {
return comment;
}
@Override
public Duration getRetry() {
return retry;
}
@Override
public Event<T> retry(Duration duration) {
this.retry = duration;
return this;
}
@Override
public Event<T> id(String id) {
this.id = id;
return this;
}
@Override
public Event<T> name(String name) {
this.name = name;
return this;
}
@Override
public Event<T> comment(String comment) {
this.comment = comment;
return this;
}
}
|
DefaultEvent
|
java
|
bumptech__glide
|
annotation/compiler/src/main/java/com/bumptech/glide/annotation/compiler/GlideGenerator.java
|
{
"start": 1144,
"end": 2385
}
|
class ____ {
* private GiphyGlide() {
* }
*
* public static File getPhotoCacheDir(Context context) {
* return Glide.getPhotoCacheDir(context);
* }
*
* public static File getPhotoCacheDir(Context context, String cacheName) {
* return Glide.getPhotoCacheDir(context, cacheName);
* }
*
* public static Glide get(Context context) {
* return Glide.get(context);
* }
*
* public static void tearDown() {
* Glide.tearDown();
* }
*
* public static GeneratedRequestManager with(Context context) {
* return (GeneratedRequestManager) Glide.with(context);
* }
*
* public static GeneratedRequestManager with(Activity activity) {
* return (GeneratedRequestManager) Glide.with(activity);
* }
*
* public static GeneratedRequestManager with(FragmentActivity activity) {
* return (GeneratedRequestManager) Glide.with(activity);
* }
*
* public static GeneratedRequestManager with(Fragment fragment) {
* return (GeneratedRequestManager) Glide.with(fragment);
* }
*
* public static GeneratedRequestManager with(androidx.fragment.app.Fragment fragment) {
* return (GeneratedRequestManager) Glide.with(fragment);
* }
* </code>
* </pre>
*/
final
|
GlideApp
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceDoubleFinalizationIT.java
|
{
"start": 2663,
"end": 9485
}
|
class ____ extends AbstractSnapshotIntegTestCase {
public void testNoDoubleFinalization() throws Exception {
// 0 - Basic setup
final String masterNodeName = internalCluster().startNode();
final String dataNodeName = internalCluster().startDataOnlyNode();
createIndex(
"index-1",
Settings.builder().put("index.number_of_replicas", 0).put("index.routing.allocation.require._name", masterNodeName).build()
);
indexRandomDocs("index-1", 50);
createIndex(
"index-2",
Settings.builder().put("index.number_of_replicas", 0).put("index.routing.allocation.require._name", dataNodeName).build()
);
indexRandomDocs("index-2", 50);
createIndex(
"index-3",
Settings.builder().put("index.number_of_replicas", 0).put("index.routing.allocation.require._name", dataNodeName).build()
);
indexRandomDocs("index-3", 50);
// 1 - create repository and take a snapshot
final String repoName = "repo";
createRepository(repoName, TestRepositoryPlugin.REPO_TYPE);
final TestRepository testRepository = getRepositoryOnMaster(repoName);
logger.info("--> create snapshot snap-1");
createSnapshot(repoName, "snap-1", List.of("index-1"));
// 2 - Start deleting the snap-1 and block it at listing root blobs
PlainActionFuture<Void> future = setWaitForClusterState(state -> {
final SnapshotDeletionsInProgress snapshotDeletionsInProgress = SnapshotDeletionsInProgress.get(state);
return snapshotDeletionsInProgress.getEntries()
.stream()
.flatMap(entry -> entry.snapshots().stream())
.anyMatch(snapshotId -> snapshotId.getName().equals("snap-1"));
});
final CyclicBarrier barrier = testRepository.blockOnceForListBlobs();
new Thread(() -> {
logger.info("--> start deleting snapshot snap-1 ");
startDeleteSnapshot(repoName, "snap-1");
}).start();
assertBusy(() -> assertThat(barrier.getNumberWaiting(), equalTo(1)));
future.actionGet();
logger.info("--> repository blocked at listing root blobs");
// 3 - Stop data node so that index-2, index-3 become unassigned
internalCluster().stopNode(dataNodeName);
internalCluster().validateClusterFormed();
// 4 - Create new snapshot for the unassigned index and its shards should have both QUEUED and MISSING
future = setWaitForClusterState(state -> {
final SnapshotsInProgress snapshotsInProgress = SnapshotsInProgress.get(state);
return snapshotsInProgress.asStream()
.anyMatch(
entry -> entry.snapshot().getSnapshotId().getName().equals("snap-2")
&& entry.state() == SnapshotsInProgress.State.STARTED
&& entry.shards()
.values()
.stream()
.map(SnapshotsInProgress.ShardSnapshotStatus::state)
.collect(Collectors.toSet())
.equals(Set.of(SnapshotsInProgress.ShardState.QUEUED, SnapshotsInProgress.ShardState.MISSING))
);
});
clusterAdmin().prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, "snap-2")
.setIndices("index-2", "index-3")
.setPartial(true)
.setWaitForCompletion(false)
.get();
// Delete index-3 so that it becomes MISSING for snapshot
indicesAdmin().prepareDelete("index-3").get();
future.actionGet();
// 5 - Start deleting snap-2, itself should be WAITING. But changes InProgress snap-2 to SUCCESS
future = setWaitForClusterState(state -> {
final SnapshotsInProgress snapshotsInProgress = SnapshotsInProgress.get(state);
final boolean foundSnapshot = snapshotsInProgress.asStream()
.anyMatch(
entry -> entry.snapshot().getSnapshotId().getName().equals("snap-2")
&& entry.state() == SnapshotsInProgress.State.SUCCESS
&& entry.shards()
.values()
.stream()
.map(SnapshotsInProgress.ShardSnapshotStatus::state)
.collect(Collectors.toSet())
.equals(Set.of(SnapshotsInProgress.ShardState.FAILED, SnapshotsInProgress.ShardState.MISSING))
);
if (false == foundSnapshot) {
return false;
}
final SnapshotDeletionsInProgress snapshotDeletionsInProgress = SnapshotDeletionsInProgress.get(state);
return snapshotDeletionsInProgress.getEntries()
.stream()
.anyMatch(
entry -> entry.state() == SnapshotDeletionsInProgress.State.WAITING
&& entry.snapshots().stream().anyMatch(snapshotId -> snapshotId.getName().equals("snap-2"))
);
});
new Thread(() -> {
logger.info("--> start deleting snapshot snap-2 ");
startDeleteSnapshot(repoName, "snap-2");
}).start();
future.actionGet();
// 6 - Let the deletion of snap-1 to complete. It should *not* lead to double finalization
barrier.await();
awaitNoMoreRunningOperations();
}
private PlainActionFuture<Void> setWaitForClusterState(Predicate<ClusterState> predicate) {
final var clusterStateObserver = new ClusterStateObserver(
internalCluster().getCurrentMasterNodeInstance(ClusterService.class),
TimeValue.timeValueMillis(60000),
logger,
new ThreadContext(Settings.EMPTY)
);
final PlainActionFuture<Void> future = new PlainActionFuture<>();
clusterStateObserver.waitForNextChange(new ClusterStateObserver.Listener() {
@Override
public void onNewClusterState(ClusterState state) {
future.onResponse(null);
}
@Override
public void onClusterServiceClose() {
future.onFailure(new IllegalStateException("cluster service closed"));
}
@Override
public void onTimeout(TimeValue timeout) {
future.onFailure(new IllegalStateException("timeout"));
}
}, predicate, TimeValue.timeValueSeconds(30));
return future;
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(TestRepositoryPlugin.class);
}
public static
|
SnapshotsServiceDoubleFinalizationIT
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/annotations/AnnotationPersister.java
|
{
"start": 2933,
"end": 4901
}
|
class ____ {
private final String jobId;
private BulkRequest bulkRequest = new BulkRequest(AnnotationIndex.WRITE_ALIAS_NAME);
private final Supplier<Boolean> shouldRetry;
private Builder(String jobId, Supplier<Boolean> shouldRetry) {
this.jobId = Objects.requireNonNull(jobId);
this.shouldRetry = Objects.requireNonNull(shouldRetry);
}
public Builder persistAnnotation(Annotation annotation) {
return persistAnnotation(null, annotation);
}
public Builder persistAnnotation(@Nullable String annotationId, Annotation annotation) {
Objects.requireNonNull(annotation);
try (XContentBuilder xContentBuilder = annotation.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) {
bulkRequest.add(new IndexRequest().id(annotationId).source(xContentBuilder).setRequireAlias(true));
} catch (IOException e) {
logger.error(() -> "[" + jobId + "] Error serialising annotation", e);
}
if (bulkRequest.numberOfActions() >= bulkLimit) {
executeRequest();
}
return this;
}
/**
* Execute the bulk action
*/
public BulkResponse executeRequest() {
if (bulkRequest.numberOfActions() == 0) {
return null;
}
logger.trace("[{}] ES API CALL: bulk request with {} actions", () -> jobId, () -> bulkRequest.numberOfActions());
BulkResponse bulkResponse = resultsPersisterService.bulkIndexWithRetry(
bulkRequest,
jobId,
shouldRetry,
retryMessage -> logger.debug("[{}] Bulk indexing of annotations failed {}", jobId, retryMessage)
);
bulkRequest = new BulkRequest(AnnotationIndex.WRITE_ALIAS_NAME);
return bulkResponse;
}
}
}
|
Builder
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/TypeVariablesTest.java
|
{
"start": 1328,
"end": 1742
}
|
class ____ {
@ClassRule public static final CompilationRule compilationRule = new CompilationRule();
@Rule public final Expect expect = Expect.create();
private static Elements elementUtils;
private static Types typeUtils;
@BeforeClass
public static void setUpClass() {
elementUtils = compilationRule.getElements();
typeUtils = compilationRule.getTypes();
}
abstract static
|
TypeVariablesTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java
|
{
"start": 1175,
"end": 1254
}
|
class ____.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public
|
name
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ReturnAtTheEndOfVoidFunctionTest.java
|
{
"start": 3764,
"end": 4063
}
|
class ____ {
public Builder() {}
}
""")
.doTest();
}
@Test
public void abstractDoesntCrash() {
helper
.addInputLines(
"Builder.java",
"""
package com.google.gporeba;
public abstract
|
Builder
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/RedissonMapCacheNative.java
|
{
"start": 1547,
"end": 23438
}
|
class ____<K, V> extends RedissonMap<K, V> implements RMapCacheNative<K, V> {
public RedissonMapCacheNative(CommandAsyncExecutor commandExecutor, String name, RedissonClient redisson, MapOptions<K, V> options, WriteBehindService writeBehindService) {
super(commandExecutor, name, redisson, options, writeBehindService);
}
public RedissonMapCacheNative(Codec codec, CommandAsyncExecutor commandExecutor, String name) {
super(codec, commandExecutor, name);
}
public RedissonMapCacheNative(Codec codec, CommandAsyncExecutor commandExecutor, String name, RedissonClient redisson, MapOptions<K, V> options, WriteBehindService writeBehindService) {
super(codec, commandExecutor, name, redisson, options, writeBehindService);
}
@Override
public V put(K key, V value, Duration ttl) {
return get(putAsync(key, value, ttl));
}
@Override
public RFuture<V> putAsync(K key, V value, Duration ttl) {
checkKey(key);
checkValue(value);
if (ttl.toMillis() < 0) {
throw new IllegalArgumentException("ttl can't be negative");
}
if (ttl.toMillis() == 0) {
return putAsync(key, value);
}
RFuture<V> future = putOperationAsync(key, value, ttl);
future = new CompletableFutureWrapper<>(future);
if (hasNoWriter()) {
return future;
}
MapWriterTask.Add listener = new MapWriterTask.Add(key, value);
return mapWriterFuture(future, listener);
}
protected RFuture<V> putOperationAsync(K key, V value, Duration ttl) {
String name = getRawName(key);
return commandExecutor.evalWriteAsync(name, codec, RedisCommands.EVAL_OBJECT,
"local currValue = redis.call('hget', KEYS[1], ARGV[2]); "
+ "redis.call('hset', KEYS[1], ARGV[2], ARGV[3]); "
+ "redis.call('hpexpire', KEYS[1], ARGV[1], 'fields', 1, ARGV[2]); "
+ "return currValue; ",
Collections.singletonList(name),
ttl.toMillis(), encodeMapKey(key), encodeMapValue(value));
}
@Override
public boolean fastPut(K key, V value, Duration ttl) {
return get(fastPutAsync(key, value, ttl));
}
@Override
public RFuture<Boolean> fastPutAsync(K key, V value, Duration ttl) {
checkKey(key);
checkValue(value);
if (ttl.toMillis() < 0) {
throw new IllegalArgumentException("ttl can't be negative");
}
if (ttl.toMillis() == 0) {
return fastPutAsync(key, value);
}
RFuture<Boolean> future = fastPutOperationAsync(key, value, ttl);
future = new CompletableFutureWrapper<>(future);
if (hasNoWriter()) {
return future;
}
return mapWriterFuture(future, new MapWriterTask.Add(key, value));
}
protected RFuture<Boolean> fastPutOperationAsync(K key, V value, Duration ttl) {
String name = getRawName(key);
return commandExecutor.evalWriteAsync(name, StringCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"local added = redis.call('hset', KEYS[1], ARGV[2], ARGV[3]); " +
"redis.call('hpexpire', KEYS[1], ARGV[1], 'fields', 1, ARGV[2]); " +
"return added;",
Collections.singletonList(name),
ttl.toMillis(), encodeMapKey(key), encodeMapValue(value));
}
@Override
public V putIfAbsent(K key, V value, Duration ttl) {
return get(putIfAbsentAsync(key, value, ttl));
}
@Override
public RFuture<V> putIfAbsentAsync(K key, V value, Duration ttl) {
checkKey(key);
checkValue(value);
if (ttl.toMillis() < 0) {
throw new IllegalArgumentException("ttl can't be negative");
}
if (ttl.toMillis() == 0) {
return putIfAbsentAsync(key, value);
}
RFuture<V> future = putIfAbsentOperationAsync(key, value, ttl);
future = new CompletableFutureWrapper<>(future);
if (hasNoWriter()) {
return future;
}
MapWriterTask.Add task = new MapWriterTask.Add(key, value);
return mapWriterFuture(future, task, r -> r == null);
}
protected RFuture<V> putIfAbsentOperationAsync(K key, V value, Duration ttl) {
String name = getRawName(key);
if (value == null) {
return commandExecutor.evalWriteAsync(name, codec, RedisCommands.EVAL_MAP_VALUE,
"local currValue = redis.call('hget', KEYS[1], ARGV[1]); " +
"if currValue ~= false then " +
"return currValue;" +
"end;" +
"redis.call('hdel', KEYS[1], ARGV[1]); " +
"return nil; ",
Collections.singletonList(name), encodeMapKey(key));
}
return commandExecutor.evalWriteAsync(name, codec, RedisCommands.EVAL_MAP_VALUE,
"local currValue = redis.call('hget', KEYS[1], ARGV[2]); " +
"if currValue ~= false then " +
"return currValue;" +
"end;" +
"redis.call('hset', KEYS[1], ARGV[2], ARGV[3]); " +
"redis.call('hpexpire', KEYS[1], ARGV[1], 'fields', 1, ARGV[2]); " +
"return nil; ",
Collections.singletonList(name),
ttl.toMillis(), encodeMapKey(key), encodeMapValue(value));
}
@Override
public boolean fastPutIfAbsent(K key, V value, Duration ttl) {
return get(fastPutIfAbsentAsync(key, value, ttl));
}
@Override
public RFuture<Boolean> fastPutIfAbsentAsync(K key, V value, Duration ttl) {
checkKey(key);
checkValue(value);
if (ttl.toMillis() < 0) {
throw new IllegalArgumentException("ttl can't be negative");
}
if (ttl.toMillis() == 0) {
return fastPutIfAbsentAsync(key, value);
}
RFuture<Boolean> future = fastPutIfAbsentOperationAsync(key, value, ttl);
future = new CompletableFutureWrapper<>(future);
if (hasNoWriter()) {
return future;
}
MapWriterTask.Add task = new MapWriterTask.Add(key, value);
return mapWriterFuture(future, task, Function.identity());
}
protected RFuture<Boolean> fastPutIfAbsentOperationAsync(K key, V value, Duration ttl) {
String name = getRawName(key);
if (value == null) {
return commandExecutor.evalWriteAsync(name, StringCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"local currValue = redis.call('hget', KEYS[1], ARGV[1]); " +
"if currValue ~= false then " +
"return 0;" +
"end;" +
"redis.call('hdel', KEYS[1], ARGV[1]); " +
"return 1; ",
Collections.singletonList(name), encodeMapKey(key));
}
return commandExecutor.evalWriteAsync(name, StringCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"local currValue = redis.call('hget', KEYS[1], ARGV[2]); " +
"if currValue ~= false then " +
"return 0;" +
"end;" +
"redis.call('hset', KEYS[1], ARGV[2], ARGV[3]); " +
"redis.call('hpexpire', KEYS[1], ARGV[1], 'fields', 1, ARGV[2]); " +
"return 1; ",
Collections.singletonList(name),
ttl.toMillis(), encodeMapKey(key), encodeMapValue(value));
}
@Override
public long remainTimeToLive(K key) {
return get(remainTimeToLiveAsync(key));
}
@Override
public RFuture<Long> remainTimeToLiveAsync(K key) {
checkKey(key);
String name = getRawName(key);
return commandExecutor.readAsync(name, StringCodec.INSTANCE, RedisCommands.HPTTL, name, "FIELDS", 1, encodeMapKey(key));
}
@Override
public Map<K, Long> remainTimeToLive(Set<K> keys) {
return get(remainTimeToLiveAsync(keys));
}
@Override
public RFuture<Map<K, Long>> remainTimeToLiveAsync(Set<K> keys) {
List<Object> plainKeys = new ArrayList<>(keys);
List<Object> params = new ArrayList<>(keys.size() + 1);
params.add(getRawName());
params.add("FIELDS");
params.add(plainKeys.size());
encodeMapKeys(params, plainKeys);
RedisCommand<Map<Object, Object>> command = new RedisCommand<>("HPTTL",
new MapNativeAllDecoder(plainKeys, Long.class));
return commandExecutor.readAsync(getRawName(), StringCodec.INSTANCE, command, params.toArray());
}
@Override
public void putAll(Map<? extends K, ? extends V> map, Duration ttl) {
get(putAllAsync(map, ttl));
}
@Override
public RFuture<Void> putAllAsync(Map<? extends K, ? extends V> map, Duration ttl) {
if (map.isEmpty()) {
return new CompletableFutureWrapper<>((Void) null);
}
RFuture<Void> future = putAllOperationAsync(map, ttl);
if (hasNoWriter()) {
return future;
}
MapWriterTask listener = new MapWriterTask.Add(map);
return mapWriterFuture(future, listener);
}
protected RFuture<Void> putAllOperationAsync(Map<? extends K, ? extends V> map, Duration ttl) {
List<Object> args = new ArrayList<>();
args.add(ttl.toMillis());
encodeMapKeys(args, map);
return commandExecutor.evalWriteAsync(name, StringCodec.INSTANCE, RedisCommands.EVAL_VOID,
"for i = 2, #ARGV, 2 do " +
"redis.call('hset', KEYS[1], ARGV[i], ARGV[i + 1]); " +
"redis.call('hpexpire', KEYS[1], ARGV[1], 'fields', 1, ARGV[i]); " +
"end; ",
Collections.singletonList(name), args.toArray());
}
@Override
public boolean expireEntry(K key, Duration ttl) {
return get(expireEntryAsync(key, ttl));
}
@Override
public RFuture<Boolean> expireEntryAsync(K key, Duration ttl) {
String name = getRawName(key);
return commandExecutor.evalWriteAsync(name, LongCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"local expireSet = redis.call('hpexpire', KEYS[1], ARGV[1], 'fields', 1, ARGV[2]); "
+ "if #expireSet > 0 and expireSet[1] >= 1 then "
+ "return 1;"
+ "end; "
+ "return 0; ",
Arrays.asList(name),
ttl.toMillis(), encodeMapKey(key));
}
@Override
public boolean expireEntryIfNotSet(K key, Duration ttl) {
return get(expireEntryIfNotSetAsync(key, ttl));
}
@Override
public RFuture<Boolean> expireEntryIfNotSetAsync(K key, Duration ttl) {
return expireEntryAsync("NX", key, ttl);
}
private RFuture<Boolean> expireEntryAsync(String param, K key, Duration ttl) {
String name = getRawName(key);
return commandExecutor.evalWriteAsync(name, LongCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"local expireSet = redis.call('hpexpire', KEYS[1], ARGV[1], ARGV[3], 'fields', 1, ARGV[2]); "
+ "if #expireSet > 0 and expireSet[1] >= 1 then "
+ "return 1;"
+ "end; "
+ "return 0; ",
Arrays.asList(name),
ttl.toMillis(), encodeMapKey(key), param);
}
@Override
public int expireEntries(Set<K> keys, Duration ttl) {
return get(expireEntriesAsync(keys, ttl));
}
@Override
public RFuture<Integer> expireEntriesAsync(Set<K> keys, Duration ttl) {
List<Object> args = new ArrayList<>();
args.add(ttl.toMillis());
encodeMapKeys(args, keys);
return commandExecutor.evalWriteAsync(name, LongCodec.INSTANCE, RedisCommands.EVAL_INTEGER,
"local result = 0;"
+ "for j = 2, #ARGV, 1 do "
+ "local expireSet = redis.call('hpexpire', KEYS[1], ARGV[1], 'fields', 1, ARGV[j]); "
+ "if #expireSet > 0 and expireSet[1] >= 1 then "
+ "result = result + 1;"
+ "end; "
+ "end; "
+ "return result; ",
Arrays.asList(name),
args.toArray());
}
@Override
public int expireEntriesIfNotSet(Set<K> keys, Duration ttl) {
return get(expireEntriesIfNotSetAsync(keys, ttl));
}
@Override
public RFuture<Integer> expireEntriesIfNotSetAsync(Set<K> keys, Duration ttl) {
return expireEntriesAsync("NX", keys, ttl);
}
private RFuture<Integer> expireEntriesAsync(String param, Set<K> keys, Duration ttl) {
List<Object> args = new ArrayList<>();
args.add(param);
args.add(ttl.toMillis());
encodeMapKeys(args, keys);
return commandExecutor.evalWriteAsync(name, LongCodec.INSTANCE, RedisCommands.EVAL_INTEGER,
"local result = 0;"
+ "for j = 3, #ARGV, 1 do "
+ "local expireSet = redis.call('hpexpire', KEYS[1], ARGV[2], ARGV[1], 'fields', 1, ARGV[j]); "
+ "if #expireSet > 0 and expireSet[1] >= 1 then "
+ "result = result + 1;"
+ "end; "
+ "end; "
+ "return result; ",
Arrays.asList(name),
args.toArray());
}
@Override
public boolean expireEntryIfGreater(K key, Duration ttl) {
return get(expireEntryIfGreaterAsync(key, ttl));
}
@Override
public boolean expireEntryIfLess(K key, Duration ttl) {
return get(expireEntryIfLessAsync(key, ttl));
}
@Override
public int expireEntriesIfGreater(Set<K> keys, Duration ttl) {
return get(expireEntriesIfGreaterAsync(keys, ttl));
}
@Override
public int expireEntriesIfLess(Set<K> keys, Duration ttl) {
return get(expireEntriesIfLessAsync(keys, ttl));
}
@Override
public RFuture<Boolean> expireEntryIfGreaterAsync(K key, Duration ttl) {
return expireEntryAsync("GT", key, ttl);
}
@Override
public RFuture<Boolean> expireEntryIfLessAsync(K key, Duration ttl) {
return expireEntryAsync("LT", key, ttl);
}
@Override
public RFuture<Integer> expireEntriesIfGreaterAsync(Set<K> keys, Duration ttl) {
return expireEntriesAsync("GT", keys, ttl);
}
@Override
public RFuture<Integer> expireEntriesIfLessAsync(Set<K> keys, Duration ttl) {
return expireEntriesAsync("LT", keys, ttl);
}
@Override
public Boolean clearExpire(K key) {
return get(clearExpireAsync(key));
}
@Override
public RFuture<Boolean> clearExpireAsync(K key) {
String name = getRawName(key);
return commandExecutor.writeAsync(name, LongCodec.INSTANCE, RedisCommands.HPERSIST, name, "FIELDS", 1, encodeMapKey(key));
}
@Override
public Map<K, Boolean> clearExpire(Set<K> keys) {
return get(clearExpireAsync(keys));
}
@Override
public RFuture<Map<K, Boolean>> clearExpireAsync(Set<K> keys) {
List<Object> plainKeys = new ArrayList<>(keys);
List<Object> params = new ArrayList<>(keys.size() + 1);
params.add(getRawName());
params.add("FIELDS");
params.add(plainKeys.size());
encodeMapKeys(params, plainKeys);
RedisCommand<Map<Object, Object>> command = new RedisCommand<>("HPERSIST",
new MapNativeAllDecoder(plainKeys, Boolean.class));
return commandExecutor.readAsync(getRawName(), StringCodec.INSTANCE, command, params.toArray());
}
@Override
public int addListener(ObjectListener listener) {
if (listener instanceof MapExpiredListener) {
return addListener("__keyevent@*:hexpired", (MapExpiredListener) listener, MapExpiredListener::onExpired);
}
return super.addListener(listener);
}
@Override
public RFuture<Integer> addListenerAsync(ObjectListener listener) {
if (listener instanceof MapExpiredListener) {
return addListenerAsync("__keyevent@*:hexpired", (MapExpiredListener) listener, MapExpiredListener::onExpired);
}
return super.addListenerAsync(listener);
}
@Override
public void removeListener(int listenerId) {
removeListener(listenerId, "__keyevent@*:hexpired");
super.removeListener(listenerId);
}
@Override
public RFuture<Void> removeListenerAsync(int listenerId) {
return removeListenerAsync(super.removeListenerAsync(listenerId), listenerId, "__keyevent@*:hexpired");
}
@Override
public V compute(K key, Duration ttl, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
checkNotBatch();
checkKey(key);
Objects.requireNonNull(remappingFunction);
RLock lock = getLock(key);
lock.lock();
try {
V oldValue = get(key);
V newValue = remappingFunction.apply(key, oldValue);
if (newValue == null) {
if (oldValue != null) {
fastRemove(key);
}
} else {
fastPut(key, newValue, ttl);
}
return newValue;
} finally {
lock.unlock();
}
}
@Override
public RFuture<V> computeAsync(K key, Duration ttl, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
checkNotBatch();
checkKey(key);
Objects.requireNonNull(remappingFunction);
RLock lock = getLock(key);
long threadId = Thread.currentThread().getId();
CompletionStage<V> f = (CompletionStage<V>) lock.lockAsync(threadId)
.thenCompose(r -> {
RFuture<V> oldValueFuture = getAsync(key, threadId);
return oldValueFuture.thenCompose(oldValue -> {
return CompletableFuture.supplyAsync(() -> remappingFunction.apply(key, oldValue), getServiceManager().getExecutor())
.thenCompose(newValue -> {
if (newValue == null) {
if (oldValue != null) {
return fastRemoveAsync(key)
.thenApply(rr -> newValue);
}
return CompletableFuture.completedFuture(newValue);
}
return fastPutAsync(key, newValue, ttl)
.thenApply(rr -> newValue);
});
}).whenComplete((c, e) -> {
lock.unlockAsync(threadId);
});
});
return new CompletableFutureWrapper<>(f);
}
@Override
public V computeIfAbsent(K key, Duration ttl, Function<? super K, ? extends V> mappingFunction) {
checkNotBatch();
checkKey(key);
Objects.requireNonNull(mappingFunction);
V value = get(key);
if (value != null) {
return value;
}
RLock lock = getLock(key);
lock.lock();
try {
value = get(key);
if (value == null) {
V newValue = mappingFunction.apply(key);
if (newValue != null) {
V r = putIfAbsent(key, newValue, ttl);
if (r != null) {
return r;
}
return newValue;
}
return null;
}
return value;
} finally {
lock.unlock();
}
}
@Override
public RFuture<V> computeIfAbsentAsync(K key, Duration ttl, Function<? super K, ? extends V> mappingFunction) {
checkNotBatch();
checkKey(key);
Objects.requireNonNull(mappingFunction);
RLock lock = getLock(key);
long threadId = Thread.currentThread().getId();
CompletionStage<V> f = lock.lockAsync(threadId)
.thenCompose(r -> {
RFuture<V> oldValueFuture = getAsync(key, threadId);
return oldValueFuture.thenCompose(oldValue -> {
if (oldValue != null) {
return CompletableFuture.completedFuture(oldValue);
}
return CompletableFuture.supplyAsync(() -> mappingFunction.apply(key), getServiceManager().getExecutor())
.thenCompose(newValue -> {
if (newValue != null) {
return putIfAbsentAsync(key, newValue, ttl).thenApply(rr -> {
if (rr != null) {
return rr;
}
return newValue;
});
}
return CompletableFuture.completedFuture(null);
});
}).whenComplete((c, e) -> {
lock.unlockAsync(threadId);
});
});
return new CompletableFutureWrapper<>(f);
}
}
|
RedissonMapCacheNative
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/enrollment/KibanaEnrollmentResponse.java
|
{
"start": 706,
"end": 2553
}
|
class ____ extends ActionResponse implements ToXContentObject {
private final String tokenName;
private final SecureString tokenValue;
private final String httpCa;
public KibanaEnrollmentResponse(StreamInput in) throws IOException {
tokenName = in.readString();
tokenValue = in.readSecureString();
httpCa = in.readString();
}
public KibanaEnrollmentResponse(String tokenName, SecureString tokenValue, String httpCa) {
this.tokenName = tokenName;
this.tokenValue = tokenValue;
this.httpCa = httpCa;
}
public String getTokenName() {
return tokenName;
}
public SecureString getTokenValue() {
return tokenValue;
}
public String getHttpCa() {
return httpCa;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(tokenName);
out.writeSecureString(tokenValue);
out.writeString(httpCa);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
KibanaEnrollmentResponse that = (KibanaEnrollmentResponse) o;
return tokenName.equals(that.tokenName) && tokenValue.equals(that.tokenValue) && httpCa.equals(that.httpCa);
}
@Override
public int hashCode() {
return Objects.hash(tokenName, tokenValue, httpCa);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject()
.startObject("token")
.field("name", tokenName)
.field("value", tokenValue.toString())
.endObject()
.field("http_ca", httpCa)
.endObject();
return builder;
}
}
|
KibanaEnrollmentResponse
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/AbstractFuture.java
|
{
"start": 46540,
"end": 46787
}
|
enum ____ implements Executor {
INSTANCE;
@Override
public void execute(Runnable command) {
command.run();
}
@Override
public String toString() {
return "MoreExecutors.directExecutor()";
}
}
}
|
DirectExecutor
|
java
|
alibaba__nacos
|
client-basic/src/main/java/com/alibaba/nacos/client/auth/ram/identify/StsCredentialHolder.java
|
{
"start": 1236,
"end": 4036
}
|
class ____ {
private static final Logger LOGGER = LoggerFactory.getLogger(StsCredentialHolder.class);
private static final StsCredentialHolder INSTANCE = new StsCredentialHolder();
private StsCredential stsCredential;
private StsCredentialHolder() {
}
public static StsCredentialHolder getInstance() {
return INSTANCE;
}
/**
* Get Sts Credential.
*
* @return StsCredential
*/
public StsCredential getStsCredential() {
boolean cacheSecurityCredentials = StsConfig.getInstance().isCacheSecurityCredentials();
if (cacheSecurityCredentials && stsCredential != null) {
long currentTime = System.currentTimeMillis();
long expirationTime = stsCredential.getExpiration().getTime();
int timeToRefreshInMillisecond = StsConfig.getInstance().getTimeToRefreshInMillisecond();
if (expirationTime - currentTime > timeToRefreshInMillisecond) {
return stsCredential;
}
}
String stsResponse = getStsResponse();
stsCredential = JacksonUtils.toObj(stsResponse, new TypeReference<StsCredential>() {
});
LOGGER.info("[getSTSCredential] code:{}, accessKeyId:{}, lastUpdated:{}, expiration:{}",
stsCredential.getCode(), stsCredential.getAccessKeyId(), stsCredential.getLastUpdated(),
stsCredential.getExpiration());
return stsCredential;
}
private static String getStsResponse() {
String securityCredentials = StsConfig.getInstance().getSecurityCredentials();
if (securityCredentials != null) {
return securityCredentials;
}
String securityCredentialsUrl = StsConfig.getInstance().getSecurityCredentialsUrl();
try {
HttpRestResult<String> result = HttpClientManager.getInstance().getNacosRestTemplate()
.get(securityCredentialsUrl, Header.EMPTY, Query.EMPTY, String.class);
if (!result.ok()) {
LOGGER.error(
"can not get security credentials, securityCredentialsUrl: {}, responseCode: {}, response: {}",
securityCredentialsUrl, result.getCode(), result.getMessage());
throw new NacosRuntimeException(NacosException.SERVER_ERROR,
"can not get security credentials, responseCode: " + result.getCode() + ", response: " + result
.getMessage());
}
return result.getData();
} catch (Exception e) {
LOGGER.error("can not get security credentials", e);
throw new NacosRuntimeException(NacosException.SERVER_ERROR, e);
}
}
}
|
StsCredentialHolder
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/DoNotMockCheckerTest.java
|
{
"start": 10081,
"end": 10777
}
|
class ____ {}
public static void f() {
Mockito.mock(MockableObject.class);
// BUG: Diagnostic contains:
Mockito.mock(DoNotMockMyAutoValue.class);
Mockito.mock(MyAutoValue.class);
MyAutoValue myAutoValue = MyAutoValue.create(1);
DoNotMockMyAutoValue doNotMockMyAutoValue = DoNotMockMyAutoValue.create(1);
}
}
""")
.addSourceLines(
"lib/MyAutoValue.java",
"package lib;",
"import com.google.auto.value.AutoValue;",
"import com.google.errorprone.annotations.DoNotMock;",
"
|
MockableObject
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/records/AddReservationHomeSubClusterRequest.java
|
{
"start": 1465,
"end": 2576
}
|
class ____ {
@Private
@Unstable
public static AddReservationHomeSubClusterRequest newInstance(
ReservationHomeSubCluster reservationHomeSubCluster) {
AddReservationHomeSubClusterRequest mapRequest =
Records.newRecord(AddReservationHomeSubClusterRequest.class);
mapRequest.setReservationHomeSubCluster(reservationHomeSubCluster);
return mapRequest;
}
/**
* Get the {@link ReservationHomeSubCluster} representing the mapping of the
* Reservation to it's home sub-cluster.
*
* @return the mapping of the Reservation to it's home sub-cluster.
*/
@Public
@Unstable
public abstract ReservationHomeSubCluster getReservationHomeSubCluster();
/**
* Set the {@link ReservationHomeSubCluster} representing the mapping of the
* Reservation to it's home sub-cluster.
*
* @param reservationHomeSubCluster the mapping of the Reservation to it's
* home sub-cluster.
*/
@Private
@Unstable
public abstract void setReservationHomeSubCluster(
ReservationHomeSubCluster reservationHomeSubCluster);
}
|
AddReservationHomeSubClusterRequest
|
java
|
square__retrofit
|
retrofit-mock/src/main/java/retrofit2/mock/BehaviorDelegate.java
|
{
"start": 5353,
"end": 5811
}
|
class ____ implements ParameterizedType {
private final Type bodyType;
CallParameterizedTypeImpl(Type bodyType) {
this.bodyType = bodyType;
}
@Override
public Type[] getActualTypeArguments() {
return new Type[] {bodyType};
}
@Override
public Type getRawType() {
return Call.class;
}
@Override
public @Nullable Type getOwnerType() {
return null;
}
}
static
|
CallParameterizedTypeImpl
|
java
|
junit-team__junit5
|
junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/descriptor/MethodBasedTestDescriptor.java
|
{
"start": 7272,
"end": 7849
}
|
class ____ {
private final List<DiscoveryIssue> discoveryIssues = new ArrayList<>();
private final Class<?> testClass;
private final Method testMethod;
/**
* Set of method-level tags; does not contain tags from parent.
*/
private final Set<TestTag> tags;
MethodInfo(Class<?> testClass, Method testMethod) {
this.testClass = Preconditions.notNull(testClass, "Class must not be null");
this.testMethod = testMethod;
this.tags = getTags(testMethod, //
() -> "method '%s'".formatted(testMethod.toGenericString()), //
// Use _declaring_
|
MethodInfo
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/annotation/web/configurers/HeadersConfigurer.java
|
{
"start": 22082,
"end": 22748
}
|
class ____ {
private CacheControlHeadersWriter writer;
private CacheControlConfig() {
enable();
}
/**
* Disables Cache Control
* @return the {@link HeadersConfigurer} for additional configuration
*/
public HeadersConfigurer<H> disable() {
this.writer = null;
return HeadersConfigurer.this;
}
/**
* Ensures the Cache Control headers are enabled if they are not already.
* @return the {@link CacheControlConfig} for additional customization
*/
private CacheControlConfig enable() {
if (this.writer == null) {
this.writer = new CacheControlHeadersWriter();
}
return this;
}
}
public final
|
CacheControlConfig
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/FlatpackEndpointBuilderFactory.java
|
{
"start": 53612,
"end": 53939
}
|
class ____ extends AbstractEndpointBuilder implements FlatpackEndpointBuilder, AdvancedFlatpackEndpointBuilder {
public FlatpackEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new FlatpackEndpointBuilderImpl(path);
}
}
|
FlatpackEndpointBuilderImpl
|
java
|
google__guava
|
android/guava/src/com/google/common/io/TempFileCreator.java
|
{
"start": 2335,
"end": 3618
}
|
class ____ {
static final TempFileCreator INSTANCE = pickSecureCreator();
/**
* @throws IllegalStateException if the directory could not be created (to implement the contract
* of {@link Files#createTempDir()}, such as if the system does not support creating temporary
* directories securely
*/
abstract File createTempDir();
abstract File createTempFile(String prefix) throws IOException;
private static TempFileCreator pickSecureCreator() {
try {
Class.forName("java.nio.file.Path");
return new JavaNioCreator();
} catch (ClassNotFoundException runningUnderAndroid) {
// Try another way.
}
try {
int version = (int) Class.forName("android.os.Build$VERSION").getField("SDK_INT").get(null);
int jellyBean =
(int) Class.forName("android.os.Build$VERSION_CODES").getField("JELLY_BEAN").get(null);
/*
* I assume that this check can't fail because JELLY_BEAN will be present only if we're
* running under Jelly Bean or higher. But it seems safest to check.
*/
if (version < jellyBean) {
return new ThrowingCreator();
}
} catch (ReflectiveOperationException e) {
// Should be impossible, but we want to return *something* so that
|
TempFileCreator
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/HazelcastListEndpointBuilderFactory.java
|
{
"start": 18399,
"end": 22544
}
|
interface ____
extends
HazelcastListEndpointConsumerBuilder,
HazelcastListEndpointProducerBuilder {
default AdvancedHazelcastListEndpointBuilder advanced() {
return (AdvancedHazelcastListEndpointBuilder) this;
}
/**
* To specify a default operation to use, if no operation header has
* been provided.
*
* The option is a:
* <code>org.apache.camel.component.hazelcast.HazelcastOperation</code>
* type.
*
* Group: common
*
* @param defaultOperation the value to set
* @return the dsl builder
*/
default HazelcastListEndpointBuilder defaultOperation(org.apache.camel.component.hazelcast.HazelcastOperation defaultOperation) {
doSetProperty("defaultOperation", defaultOperation);
return this;
}
/**
* To specify a default operation to use, if no operation header has
* been provided.
*
* The option will be converted to a
* <code>org.apache.camel.component.hazelcast.HazelcastOperation</code>
* type.
*
* Group: common
*
* @param defaultOperation the value to set
* @return the dsl builder
*/
default HazelcastListEndpointBuilder defaultOperation(String defaultOperation) {
doSetProperty("defaultOperation", defaultOperation);
return this;
}
/**
* Hazelcast configuration file.
*
* This option can also be loaded from an existing file, by prefixing
* with file: or classpath: followed by the location of the file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param hazelcastConfigUri the value to set
* @return the dsl builder
*/
default HazelcastListEndpointBuilder hazelcastConfigUri(String hazelcastConfigUri) {
doSetProperty("hazelcastConfigUri", hazelcastConfigUri);
return this;
}
/**
* The hazelcast instance reference which can be used for hazelcast
* endpoint.
*
* The option is a: <code>com.hazelcast.core.HazelcastInstance</code>
* type.
*
* Group: common
*
* @param hazelcastInstance the value to set
* @return the dsl builder
*/
default HazelcastListEndpointBuilder hazelcastInstance(com.hazelcast.core.HazelcastInstance hazelcastInstance) {
doSetProperty("hazelcastInstance", hazelcastInstance);
return this;
}
/**
* The hazelcast instance reference which can be used for hazelcast
* endpoint.
*
* The option will be converted to a
* <code>com.hazelcast.core.HazelcastInstance</code> type.
*
* Group: common
*
* @param hazelcastInstance the value to set
* @return the dsl builder
*/
default HazelcastListEndpointBuilder hazelcastInstance(String hazelcastInstance) {
doSetProperty("hazelcastInstance", hazelcastInstance);
return this;
}
/**
* The hazelcast instance reference name which can be used for hazelcast
* endpoint. If you don't specify the instance reference, camel use the
* default hazelcast instance from the camel-hazelcast instance.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param hazelcastInstanceName the value to set
* @return the dsl builder
*/
default HazelcastListEndpointBuilder hazelcastInstanceName(String hazelcastInstanceName) {
doSetProperty("hazelcastInstanceName", hazelcastInstanceName);
return this;
}
}
/**
* Advanced builder for endpoint for the Hazelcast List component.
*/
public
|
HazelcastListEndpointBuilder
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/doublearrays/DoubleArrays_assertContains_Test.java
|
{
"start": 1290,
"end": 1766
}
|
class ____ extends DoubleArraysBaseTest {
private Arrays internalArrays;
@BeforeEach
@Override
public void setUp() {
super.setUp();
internalArrays = mock(Arrays.class);
setArrays(internalArrays);
}
@Test
void should_delegate_to_internal_Arrays() {
arrays.assertContains(someInfo(), actual, arrayOf(6d, 8d, 10d));
verify(internalArrays).assertContains(someInfo(), failures, actual, arrayOf(6d, 8d, 10d));
}
}
|
DoubleArrays_assertContains_Test
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/ser/BeanSerializerFactory.java
|
{
"start": 28778,
"end": 40592
}
|
class ____ have.
*/
protected List<BeanPropertyWriter> filterBeanProperties(SerializationConfig config,
BeanDescription.Supplier beanDescRef, List<BeanPropertyWriter> props)
{
final Class<?> beanClass = beanDescRef.getBeanClass();
final AnnotatedClass classInfo = beanDescRef.getClassInfo();
// 01-May-2016, tatu: Which base type to use here gets tricky, since
// it may often make most sense to use general type for overrides,
// but what we have here may be more specific impl type. But for now
// just use it as is.
JsonIgnoreProperties.Value ignorals = config.getDefaultPropertyIgnorals(beanClass, classInfo);
Set<String> ignored = null;
if (ignorals != null) {
ignored = ignorals.findIgnoredForSerialization();
}
JsonIncludeProperties.Value inclusions = config.getDefaultPropertyInclusions(beanClass, classInfo);
Set<String> included = null;
if (inclusions != null) {
included = inclusions.getIncluded();
}
if (included != null || (ignored != null && !ignored.isEmpty())) {
Iterator<BeanPropertyWriter> it = props.iterator();
while (it.hasNext()) {
if (IgnorePropertiesUtil.shouldIgnore(it.next().getName(), ignored, included)) {
it.remove();
}
}
}
return props;
}
/**
* Overridable method used to filter out specifically problematic JDK provided
* properties.
*<p>
* See issue <a href="https://github.com/FasterXML/jackson-databind/issues/3305">
* databind-3305</a> for details.
*/
protected List<BeanPropertyWriter> filterUnwantedJDKProperties(SerializationConfig config,
BeanDescription.Supplier beanDescRef, List<BeanPropertyWriter> props)
{
// First, only consider something that implements `CharSequence`
if (beanDescRef.getType().isTypeOrSubTypeOf(CharSequence.class)) {
// And only has a single property from "isEmpty()" default method
if (props.size() == 1) {
BeanPropertyWriter prop = props.get(0);
// And only remove property induced by `isEmpty()` method declared
// in `CharSequence` (default implementation)
// (could in theory relax this limit, probably but... should be fine)
AnnotatedMember m = prop.getMember();
if ((m instanceof AnnotatedMethod)
&& "isEmpty".equals(m.getName())
&& m.getDeclaringClass() == CharSequence.class) {
props.remove(0);
}
}
}
return props;
}
/**
* Method called to handle view information for constructed serializer,
* based on bean property writers.
*<p>
* Note that this method is designed to be overridden by sub-classes
* if they want to provide custom view handling. As such it is not
* considered an internal implementation detail, and will be supported
* as part of API going forward.
*/
protected void processViews(SerializationConfig config, BeanSerializerBuilder builder)
{
// whether non-annotated fields are included by default or not is configurable
List<BeanPropertyWriter> props = builder.getProperties();
boolean includeByDefault = config.isEnabled(MapperFeature.DEFAULT_VIEW_INCLUSION);
final int propCount = props.size();
int viewsFound = 0;
BeanPropertyWriter[] filtered = new BeanPropertyWriter[propCount];
// Simple: view information is stored within individual writers, need to combine:
for (int i = 0; i < propCount; ++i) {
BeanPropertyWriter bpw = props.get(i);
Class<?>[] views = bpw.getViews();
if (views == null
// [databind#2311]: sometimes we add empty array
|| views.length == 0) { // no view info? include or exclude by default?
if (includeByDefault) {
filtered[i] = bpw;
}
} else {
++viewsFound;
filtered[i] = constructFilteredBeanWriter(bpw, views);
}
}
// minor optimization: if no view info, include-by-default, can leave out filtering info altogether:
if (includeByDefault && viewsFound == 0) {
return;
}
builder.setFilteredProperties(filtered);
}
/**
* Method that will apply by-type limitations (as per [JACKSON-429]);
* by default this is based on {@link com.fasterxml.jackson.annotation.JsonIgnoreType}
* annotation but can be supplied by module-provided introspectors too.
* Starting with 2.8 there are also "Config overrides" to consider.
*/
protected void removeIgnorableTypes(SerializationContext ctxt,
BeanDescription.Supplier beanDescRef,
List<BeanPropertyDefinition> properties)
{
AnnotationIntrospector intr = ctxt.getAnnotationIntrospector();
HashMap<Class<?>,Boolean> ignores = new HashMap<Class<?>,Boolean>();
Iterator<BeanPropertyDefinition> it = properties.iterator();
while (it.hasNext()) {
BeanPropertyDefinition property = it.next();
AnnotatedMember accessor = property.getAccessor();
// 22-Oct-2016, tatu: Looks like this removal is an important part of
// processing, as taking it out will result in a few test failures...
// But should probably be done somewhere else, not here?
if (accessor == null) {
it.remove();
continue;
}
Class<?> type = property.getRawPrimaryType();
Boolean result = ignores.get(type);
if (result == null) {
final SerializationConfig config = ctxt.getConfig();
result = config.getConfigOverride(type).getIsIgnoredType();
if (result == null) {
AnnotatedClass ac = ctxt.introspectClassAnnotations(type);
result = intr.isIgnorableType(config, ac);
// default to false, non-ignorable
if (result == null) {
result = Boolean.FALSE;
}
}
ignores.put(type, result);
}
// lotsa work, and yes, it is ignorable type, so:
if (result) {
it.remove();
}
}
}
/**
* Helper method that will remove all properties that do not have a mutator.
*/
protected void removeSetterlessGetters(SerializationConfig config,
BeanDescription.Supplier beanDescRef,
List<BeanPropertyDefinition> properties)
{
// one caveat: only remove implicit properties;
// explicitly annotated ones should remain
properties.removeIf(property -> !property.couldDeserialize() && !property.isExplicitlyIncluded());
}
/**
* Helper method called to ensure that we do not have "duplicate" type ids.
* Added to resolve [databind#222]
*/
protected List<BeanPropertyWriter> removeOverlappingTypeIds(SerializationContext ctxt,
BeanDescription.Supplier beanDescRef, BeanSerializerBuilder builder,
List<BeanPropertyWriter> props)
{
for (int i = 0, end = props.size(); i < end; ++i) {
BeanPropertyWriter bpw = props.get(i);
TypeSerializer td = bpw.getTypeSerializer();
if ((td == null) || (td.getTypeInclusion() != As.EXTERNAL_PROPERTY)) {
continue;
}
String n = td.getPropertyName();
PropertyName typePropName = PropertyName.construct(n);
for (BeanPropertyWriter w2 : props) {
if ((w2 != bpw) && w2.wouldConflictWithName(typePropName)) {
bpw.assignTypeSerializer(null);
break;
}
}
}
return props;
}
/*
/**********************************************************************
/* Internal helper methods
/**********************************************************************
*/
/**
* Secondary helper method for constructing {@link BeanPropertyWriter} for
* given member (field or method).
*/
protected BeanPropertyWriter _constructWriter(SerializationContext ctxt,
BeanPropertyDefinition propDef,
PropertyBuilder pb, boolean staticTyping, AnnotatedMember accessor)
{
final PropertyName name = propDef.getFullName();
JavaType type = accessor.getType();
BeanProperty.Std property = new BeanProperty.Std(name, type, propDef.getWrapperName(),
accessor, propDef.getMetadata());
// Does member specify a serializer? If so, let's use it.
ValueSerializer<?> annotatedSerializer = findSerializerFromAnnotation(ctxt,
accessor);
// Unlike most other code paths, serializer produced
// here will NOT be resolved or contextualized, unless done here, so:
if (annotatedSerializer != null) {
annotatedSerializer.resolve(ctxt);
// 05-Sep-2013, tatu: should be primary property serializer so:
annotatedSerializer = ctxt.handlePrimaryContextualization(annotatedSerializer, property);
}
// And how about polymorphic typing? First special to cover JAXB per-field settings:
TypeSerializer contentTypeSer = null;
// 16-Feb-2014, cgc: contentType serializers for collection-like and map-like types
if (type.isContainerType() || type.isReferenceType()) {
contentTypeSer = findPropertyContentTypeSerializer(ctxt, type, accessor);
}
// and if not JAXB collection/array with annotations, maybe regular type info?
TypeSerializer typeSer = ctxt.findPropertyTypeSerializer(type, accessor);
return pb.buildWriter(ctxt, propDef, type, annotatedSerializer,
typeSer, contentTypeSer, accessor, staticTyping);
}
protected ValueSerializer<?> _findUnsupportedTypeSerializer(SerializationContext ctxt,
JavaType type, BeanDescription.Supplier beanDescRef)
{
// 05-May-2020, tatu: Should we check for possible Shape override to "POJO"?
// (to let users force 'serialize-as-POJO'?
final String errorMsg = BeanUtil.checkUnsupportedType(ctxt.getConfig(), type);
if (errorMsg != null) {
// 30-Sep-2020, tatu: [databind#2867] Avoid checks if there is a mix-in
// which likely providers a handler...
if (ctxt.getConfig().findMixInClassFor(type.getRawClass()) == null) {
return new UnsupportedTypeSerializer(type, errorMsg);
}
}
return null;
}
/* Helper method used for preventing attempts to serialize various Jackson
* processor things which are not generally serializable.
*/
protected boolean _isUnserializableJacksonType(SerializationContext ctxt,
JavaType type)
{
final Class<?> raw = type.getRawClass();
return ObjectMapper.class.isAssignableFrom(raw)
|| ObjectReader.class.isAssignableFrom(raw)
|| ObjectWriter.class.isAssignableFrom(raw)
|| DatabindContext.class.isAssignableFrom(raw)
|| TokenStreamFactory.class.isAssignableFrom(raw)
|| JsonParser.class.isAssignableFrom(raw)
|| JsonGenerator.class.isAssignableFrom(raw)
;
}
}
|
may
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_3360/Vehicle.java
|
{
"start": 660,
"end": 990
}
|
class ____ extends Vehicle {
private final int numOfDoors;
public Car(String name, String modelName, int numOfDoors) {
super( name, modelName );
this.numOfDoors = numOfDoors;
}
public int getNumOfDoors() {
return numOfDoors;
}
}
public static
|
Car
|
java
|
quarkusio__quarkus
|
extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/utils/SignatureMethodCreator.java
|
{
"start": 3051,
"end": 3902
}
|
class ____ {
private Class<?> classType;
private Type type;
}
public static ReturnType responseType(Object entityTypeStr) {
return getReturnType(RestResponse.class, entityTypeStr);
}
public static ReturnType uniType(Object entityTypeStr) {
return getReturnType(Uni.class, entityTypeStr);
}
private static ReturnType getReturnType(Class<?> entityType, Object... arguments) {
ReturnType returnType = new ReturnType();
Type[] typeArguments = new Type[arguments.length];
for (int index = 0; index < arguments.length; index++) {
typeArguments[index] = toGizmoType(arguments[index]);
}
returnType.classType = entityType;
returnType.type = parameterizedType(classType(entityType), typeArguments);
return returnType;
}
}
|
ReturnType
|
java
|
quarkusio__quarkus
|
integration-tests/openshift-client/src/test/java/io/quarkus/it/openshift/client/KubernetesTestServerTest.java
|
{
"start": 665,
"end": 1716
}
|
class ____ {
@KubernetesTestServer
KubernetesServer mockServer;
@Inject
KubernetesClient kubernetesClient;
@Inject
OpenShiftClient openShiftClient;
@Test
public void clientsInjectedWithValidConfiguration() {
assertThat(kubernetesClient)
.isSameAs(openShiftClient)
.extracting(c -> c.getConfiguration().getMasterUrl())
.isEqualTo(mockServer.getKubernetesMockServer().url("/"));
}
@Test
public void openShiftClientInjectionWorks() throws InterruptedException {
openShiftClient.routes().resource(
new RouteBuilder()
.withNewMetadata().withName("the-route").endMetadata()
.withNewSpec().withHost("example.com").endSpec()
.build())
.createOr(NonDeletingOperation::update);
assertThat(mockServer.getLastRequest().getPath())
.isEqualTo("/apis/route.openshift.io/v1/namespaces/test/routes");
}
}
|
KubernetesTestServerTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/NullArgumentForNonNullParameterTest.java
|
{
"start": 3552,
"end": 3955
}
|
class ____ {
void foo() {
// BUG: Diagnostic contains:
ImmutableSet.of(null);
}
}
""")
.doTest();
}
@Test
public void positiveGuavaImmutableSetBuilderAdd() {
conservativeHelper
.addSourceLines(
"Foo.java",
"import com.google.common.collect.ImmutableSet;",
"
|
Foo
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeChecker.java
|
{
"start": 3592,
"end": 4750
}
|
class ____ {
private final long debugThresholdMs;
private final long infoThresholdMs;
private final long warnThresholdMs;
Thresholds(TimeValue debugThreshold, TimeValue infoThreshold, TimeValue warnThreshold) {
this.debugThresholdMs = debugThreshold.millis();
this.infoThresholdMs = infoThreshold.millis();
this.warnThresholdMs = warnThreshold.millis();
}
public Level getLogLevel(long millis) {
if (millis > warnThresholdMs) {
return Level.WARN;
}
if (millis > infoThresholdMs) {
return Level.INFO;
}
if (millis > debugThresholdMs) {
return Level.DEBUG;
}
return Level.TRACE;
}
long getDebugThresholdMs() {
return debugThresholdMs;
}
long getInfoThresholdMs() {
return infoThresholdMs;
}
long getWarnThresholdMs() {
return warnThresholdMs;
}
}
static final Consumer<Collection<String>> NO_OP_CONSUMER = ignore -> {};
static
|
Thresholds
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/TestDefaultNameNodePort.java
|
{
"start": 1189,
"end": 2739
}
|
class ____ {
@Test
public void testGetAddressFromString() throws Exception {
assertEquals(HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT,
DFSUtilClient.getNNAddress("foo").getPort());
assertEquals(HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT,
DFSUtilClient.getNNAddress("hdfs://foo/").getPort());
assertEquals(555,
DFSUtilClient.getNNAddress("hdfs://foo:555").getPort());
assertEquals(555,
DFSUtilClient.getNNAddress("foo:555").getPort());
}
@Test
public void testGetAddressFromConf() throws Exception {
Configuration conf = new HdfsConfiguration();
FileSystem.setDefaultUri(conf, "hdfs://foo/");
assertEquals(HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT,
DFSUtilClient.getNNAddress(conf).getPort());
FileSystem.setDefaultUri(conf, "hdfs://foo:555/");
assertEquals(555, DFSUtilClient.getNNAddress(conf).getPort());
FileSystem.setDefaultUri(conf, "foo");
assertEquals(HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT,
DFSUtilClient.getNNAddress(conf).getPort());
FileSystem.setDefaultUri(conf, "foo:555");
assertEquals(555, DFSUtilClient.getNNAddress(conf).getPort());
}
@Test
public void testGetUri() {
assertEquals(URI.create("hdfs://foo:555"),
DFSUtilClient.getNNUri(new InetSocketAddress("foo", 555)));
assertEquals(URI.create("hdfs://foo"),
DFSUtilClient.getNNUri(new InetSocketAddress("foo",
HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT)));
}
}
|
TestDefaultNameNodePort
|
java
|
apache__camel
|
components/camel-twitter/src/main/java/org/apache/camel/component/twitter/timeline/AbstractStatusConsumerHandler.java
|
{
"start": 1245,
"end": 2044
}
|
class ____ extends AbstractTwitterConsumerHandler {
AbstractStatusConsumerHandler(TwitterEndpoint endpoint) {
super(endpoint);
}
@Override
public List<Exchange> pollConsume() throws TwitterException {
List<Status> statusList = doPoll();
for (Status status : doPoll()) {
setLastIdIfGreater(status.getId());
}
return TwitterEventType.STATUS.createExchangeList(endpoint, statusList);
}
@Override
public List<Exchange> directConsume() throws TwitterException {
return TwitterEventType.STATUS.createExchangeList(endpoint, doDirect());
}
protected abstract List<Status> doPoll() throws TwitterException;
protected abstract List<Status> doDirect() throws TwitterException;
}
|
AbstractStatusConsumerHandler
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/BeanModelHelper.java
|
{
"start": 6886,
"end": 14884
}
|
class ____ optional for supplier
if (def.getBeanClass() != null) {
routeTemplateContext.bind(def.getName(), def.getBeanClass(), def.getBeanSupplier());
} else {
routeTemplateContext.bind(def.getName(), def.getBeanSupplier());
}
}
} else if (def.getScript() != null && def.getScriptLanguage() != null) {
final CamelContext camelContext = routeTemplateContext.getCamelContext();
final Language lan = camelContext.resolveLanguage(def.getScriptLanguage());
final Class<?> clazz;
if (def.getBeanClass() != null) {
clazz = def.getBeanClass();
} else if (def.getType() != null) {
String fqn = def.getType();
if (fqn.contains(":")) {
fqn = StringHelper.after(fqn, ":");
}
clazz = camelContext.getClassResolver().resolveMandatoryClass(fqn);
} else {
clazz = Object.class;
}
final String script = resolveScript(camelContext, def);
final ScriptingLanguage slan = lan instanceof ScriptingLanguage ? (ScriptingLanguage) lan : null;
if (slan != null) {
// scripting language should be evaluated with route template context as binding
// and memorize so the script is only evaluated once and the local bean is the same
// if a route template refers to the local bean multiple times
routeTemplateContext.bind(def.getName(), clazz, Suppliers.memorize(() -> {
Object local;
Map<String, Object> bindings = new HashMap<>();
// use rtx as the short-hand name, as context would imply its CamelContext
bindings.put("rtc", routeTemplateContext);
try {
local = slan.evaluate(script, bindings, Object.class);
if (!props.isEmpty()) {
PropertyBindingSupport.setPropertiesOnTarget(camelContext, local, props);
}
if (def.getInitMethod() != null) {
org.apache.camel.support.ObjectHelper.invokeMethodSafe(def.getInitMethod(), local);
}
if (def.getDestroyMethod() != null) {
routeTemplateContext.registerDestroyMethod(def.getName(), def.getDestroyMethod());
}
} catch (Exception e) {
throw new IllegalStateException(
"Cannot create bean: " + def.getType(), e);
}
return local;
}));
} else {
// exchange based languages needs a dummy exchange to be evaluated
// and memorize so the script is only evaluated once and the local bean is the same
// if a route template refers to the local bean multiple times
routeTemplateContext.bind(def.getName(), clazz, Suppliers.memorize(() -> {
try {
Exchange dummy = ExchangeHelper.getDummy(camelContext);
String text = ScriptHelper.resolveOptionalExternalScript(camelContext, dummy, script);
if (text != null) {
Expression exp = lan.createExpression(text);
Object local = exp.evaluate(dummy, clazz);
if (!props.isEmpty()) {
PropertyBindingSupport.setPropertiesOnTarget(camelContext, local, props);
}
if (def.getInitMethod() != null) {
try {
org.apache.camel.support.ObjectHelper.invokeMethodSafe(def.getInitMethod(), local);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeException(e);
}
}
if (def.getDestroyMethod() != null) {
routeTemplateContext.registerDestroyMethod(def.getName(), def.getDestroyMethod());
}
return local;
} else {
return null;
}
} catch (Exception e) {
throw new IllegalStateException(
"Cannot create bean: " + def.getType(), e);
}
}));
}
} else if (def.getBeanClass() != null || def.getType() != null) {
String type = def.getType();
if (type == null) {
type = def.getBeanClass().getName();
}
if (!type.startsWith("#")) {
type = "#class:" + type;
}
// factory bean/method
if (def.getFactoryBean() != null && def.getFactoryMethod() != null) {
type = type + "#" + def.getFactoryBean() + ":" + def.getFactoryMethod();
} else if (def.getFactoryMethod() != null) {
type = type + "#" + def.getFactoryMethod();
}
// property binding support has constructor arguments as part of the type
StringJoiner ctr = new StringJoiner(", ");
if (def.getConstructors() != null && !def.getConstructors().isEmpty()) {
// need to sort constructor args based on index position
Map<Integer, Object> sorted = new TreeMap<>(def.getConstructors());
for (Object val : sorted.values()) {
String text = val.toString();
if (!StringHelper.isQuoted(text)) {
text = "\"" + text + "\"";
}
ctr.add(text);
}
type = type + "(" + ctr + ")";
}
final String classType = type;
final CamelContext camelContext = routeTemplateContext.getCamelContext();
routeTemplateContext.bind(def.getName(), Object.class, Suppliers.memorize(() -> {
try {
Object local = PropertyBindingSupport.resolveBean(camelContext, classType);
// do not set properties when using #type as it uses an existing shared bean
boolean setProps = !classType.startsWith("#type");
if (setProps) {
// set optional properties on created bean
if (def.getProperties() != null && !def.getProperties().isEmpty()) {
PropertyBindingSupport.setPropertiesOnTarget(camelContext, local, def.getProperties());
}
}
if (def.getInitMethod() != null) {
org.apache.camel.support.ObjectHelper.invokeMethodSafe(def.getInitMethod(), local);
}
if (def.getDestroyMethod() != null) {
routeTemplateContext.registerDestroyMethod(def.getName(), def.getDestroyMethod());
}
return local;
} catch (Exception e) {
throw new IllegalStateException(
"Cannot create bean: " + def.getType(), e);
}
}));
} else {
// invalid syntax for the local bean, so lets report an exception
throw new IllegalArgumentException(
"Route template local bean: " + def.getName() + " has invalid type syntax: " + def.getType()
+ ". To refer to a
|
is
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/handler/AbstractUrlHandlerMapping.java
|
{
"start": 20464,
"end": 20959
}
|
class ____ implements HandlerInterceptor {
private final Map<String, String> uriTemplateVariables;
public UriTemplateVariablesHandlerInterceptor(Map<String, String> uriTemplateVariables) {
this.uriTemplateVariables = uriTemplateVariables;
}
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) {
exposeUriTemplateVariables(this.uriTemplateVariables, request);
return true;
}
}
}
|
UriTemplateVariablesHandlerInterceptor
|
java
|
apache__maven
|
compat/maven-resolver-provider/src/main/java/org/apache/maven/repository/internal/DefaultVersionResolver.java
|
{
"start": 16758,
"end": 17278
}
|
class ____ {
String timestamp;
String version;
ArtifactRepository repository;
VersionInfo(String timestamp, String version, ArtifactRepository repository) {
this.timestamp = (timestamp != null) ? timestamp : "";
this.version = version;
this.repository = repository;
}
boolean isOutdated(String timestamp) {
return timestamp != null && timestamp.compareTo(this.timestamp) > 0;
}
}
private static
|
VersionInfo
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/main/java/org/hibernate/processor/validation/MockSessionFactory.java
|
{
"start": 27149,
"end": 36324
}
|
class ____<X> extends EntityTypeImpl<X> {
public MockEntityDomainType(JavaType<X> javaType) {
this(javaType, getJpaEntityName(javaType.getTypeName()));
}
public MockEntityDomainType(JavaType<X> javaType, String jpaEntityName) {
super(javaType.getTypeName(), jpaEntityName,
false, true, false, javaType, null,
metamodel.getJpaMetamodel());
}
@Override
public @Nullable SqmSingularPersistentAttribute<? super X, ?> findVersionAttribute() {
final BasicType<?> type = getVersionType(getHibernateEntityName());
if (type == null) {
return null;
}
else {
return new SingularAttributeImpl<>(
MockEntityDomainType.this,
EntityVersionMapping.VERSION_ROLE_NAME,
AttributeClassification.BASIC,
type,
type.getRelationalJavaType(),
null,
false,
true,
false,
false
);
}
}
@Override
public boolean hasVersionAttribute() {
return getVersionType(getHibernateEntityName()) != null;
}
@Override
public @Nullable SqmPathSource<?> getIdentifierDescriptor() {
final Type type = getIdentifierType(getHibernateEntityName());
if (type instanceof BasicDomainType<?> basicDomainType) {
return new BasicSqmPathSource<>(
EntityIdentifierMapping.ID_ROLE_NAME,
null,
basicDomainType,
MockEntityDomainType.this.getExpressibleJavaType(),
Bindable.BindableType.SINGULAR_ATTRIBUTE,
false
);
}
else if (type instanceof SqmEmbeddableDomainType<?> embeddableDomainType) {
return new EmbeddedSqmPathSource<>(
EntityIdentifierMapping.ID_ROLE_NAME,
null,
embeddableDomainType,
Bindable.BindableType.SINGULAR_ATTRIBUTE,
false
);
}
else {
return null;
}
}
@Override
public @Nullable SqmPathSource<?> findSubPathSource(String name, boolean includeSubtypes) {
switch (name) {
case EntityIdentifierMapping.ID_ROLE_NAME:
return getIdentifierDescriptor();
case EntityVersionMapping.VERSION_ROLE_NAME:
return findVersionAttribute();
}
final SqmPathSource<?> source = super.findSubPathSource(name, includeSubtypes);
if ( source != null ) {
return source;
}
final String supertype = MockSessionFactory.this.getSupertype(getHibernateEntityName());
final PersistentAttribute<? super Object, ?> superattribute
= new MockMappedDomainType<>(supertype).findAttribute(name);
if (superattribute != null) {
return (SqmPathSource<?>) superattribute;
}
for (Map.Entry<String, MockEntityPersister> entry : entityPersistersByName.entrySet()) {
final MockEntityPersister entityPersister = entry.getValue();
if (!entityPersister.getEntityName().equals(getHibernateEntityName())
&& isSubtype(entityPersister.getEntityName(), getHibernateEntityName())) {
final MockEntityDomainType<Object> entityDomainType =
new MockEntityDomainType<>(new MockJavaType<>(entityPersister.getEntityName()),
entityPersister.getJpaEntityName());
final PersistentAttribute<? super Object, ?> subattribute =
entityDomainType.findAttribute(name);
if (subattribute != null) {
return (SqmPathSource<?>) subattribute;
}
}
}
return null;
}
@Override
public @Nullable SqmPersistentAttribute<? super X, ?> findAttribute(String name) {
final var attribute = super.findAttribute(name);
if (attribute != null) {
return attribute;
}
else {
final String supertype = MockSessionFactory.this.getSupertype( getHibernateEntityName() );
return new MockMappedDomainType<>( supertype ).findAttribute( name );
}
}
@Override
public @Nullable SqmPersistentAttribute<X,?> findDeclaredAttribute(String name) {
final String entityName = getHibernateEntityName();
return isAttributeDefined(entityName, name)
? createAttribute(name, entityName, getReferencedPropertyType(entityName, name), this)
: null;
}
}
protected abstract String getJpaEntityName(String typeName);
private <T> SqmPersistentAttribute<T,?> createAttribute(String name, String entityName, Type type, ManagedDomainType<T> owner) {
if (type==null) {
throw new UnsupportedOperationException(entityName + "." + name);
}
else if ( type.isCollectionType() ) {
final CollectionType collectionType = (CollectionType) type;
return createPluralAttribute(collectionType, entityName, name, owner);
}
else if ( type.isEntityType() ) {
return new SingularAttributeImpl<>(
owner,
name,
AttributeClassification.MANY_TO_ONE,
new MockEntityDomainType<>(new MockJavaType<>(type.getName())),
null,
null,
false,
false,
true,
false
);
}
else if ( type.isComponentType() ) {
final CompositeType compositeType = (CompositeType) type;
return new SingularAttributeImpl<>(
owner,
name,
AttributeClassification.EMBEDDED,
createEmbeddableDomainType(entityName, compositeType, owner),
null,
null,
false,
false,
true,
false
);
}
else {
return new SingularAttributeImpl<>(
owner,
name,
AttributeClassification.BASIC,
(SqmDomainType<?>) type,
type instanceof JdbcMapping jdbcMapping
? jdbcMapping.getJavaTypeDescriptor()
: null,
null,
false,
false,
true,
false
);
}
}
private SqmDomainType<?> getElementDomainType(String entityName, CollectionType collectionType, ManagedDomainType<?> owner) {
final Type elementType = collectionType.getElementType(MockSessionFactory.this);
return getDomainType(entityName, collectionType, owner, elementType);
}
private DomainType<?> getMapKeyDomainType(String entityName, CollectionType collectionType, ManagedDomainType<?> owner) {
final Type keyType = getMappingMetamodel().getCollectionDescriptor( collectionType.getRole() ).getIndexType();
return getDomainType(entityName, collectionType, owner, keyType);
}
private SqmDomainType<?> getDomainType(
String entityName, CollectionType collectionType, ManagedDomainType<?> owner, Type elementType) {
if ( elementType.isEntityType() ) {
final String associatedEntityName = collectionType.getAssociatedEntityName(this);
return new MockEntityDomainType<>(new MockJavaType<>(associatedEntityName));
}
else if ( elementType.isComponentType() ) {
final CompositeType compositeType = (CompositeType) elementType;
return createEmbeddableDomainType(entityName, compositeType, owner);
}
else if ( elementType instanceof SqmDomainType<?> domainType ) {
return domainType;
}
else {
return OBJECT_BASIC_TYPE;
}
}
private <T> AbstractPluralAttribute<T,?,?> createPluralAttribute(
CollectionType collectionType,
String entityName,
String name,
ManagedDomainType<T> owner) {
final Property property = new Property();
property.setName(name);
final JavaType<?> collectionJavaType =
typeConfiguration.getJavaTypeRegistry()
.resolveDescriptor(collectionType.getReturnedClass());
final SqmDomainType<?> elementDomainType = getElementDomainType(entityName, collectionType, owner);
final CollectionClassification classification = collectionType.getCollectionClassification();
return switch ( classification ) {
case LIST -> new ListAttributeImpl(
new PluralAttributeBuilder<>(
collectionJavaType,
true,
AttributeClassification.MANY_TO_MANY,
classification,
elementDomainType,
typeConfiguration.getBasicTypeRegistry()
.getRegisteredType( Integer.class ),
owner,
property,
null
)
);
case BAG, ID_BAG -> new BagAttributeImpl(
new PluralAttributeBuilder<>(
collectionJavaType,
true,
AttributeClassification.MANY_TO_MANY,
classification,
elementDomainType,
null,
owner,
property,
null
)
);
case SET, SORTED_SET, ORDERED_SET -> new SetAttributeImpl(
new PluralAttributeBuilder<>(
collectionJavaType,
true,
AttributeClassification.MANY_TO_MANY,
classification,
elementDomainType,
null,
owner,
property,
null
)
);
case MAP, SORTED_MAP, ORDERED_MAP -> new MapAttributeImpl(
new PluralAttributeBuilder<>(
collectionJavaType,
true,
AttributeClassification.MANY_TO_MANY,
classification,
elementDomainType,
getMapKeyDomainType( entityName, collectionType, owner ),
owner,
property,
null
)
);
default -> null;
};
}
private <T> EmbeddableTypeImpl<T> createEmbeddableDomainType(String entityName, CompositeType compositeType, ManagedDomainType<T> owner) {
final JavaType<T> javaType = new UnknownBasicJavaType<>(null, compositeType.getReturnedClassName());
return new EmbeddableTypeImpl<>( javaType, null, null, true, metamodel.getJpaMetamodel() ) {
@Override
public @Nullable SqmPersistentAttribute<T, ?> findAttribute(String name) {
return createAttribute(
name,
entityName,
compositeType.getSubtypes()[compositeType.getPropertyIndex(name)],
owner
);
}
};
}
}
|
MockEntityDomainType
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringInterceptSimpleRouteStopTest.java
|
{
"start": 1059,
"end": 1437
}
|
class ____ extends InterceptSimpleRouteStopTest {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/SpringInterceptSimpleRouteStopTest.xml");
}
}
|
SpringInterceptSimpleRouteStopTest
|
java
|
bumptech__glide
|
library/test/src/test/java/com/bumptech/glide/load/resource/gif/GifDrawableTest.java
|
{
"start": 1957,
"end": 18822
}
|
class ____ {
@Rule public final TearDownGlide tearDownGlide = new TearDownGlide();
private GifDrawable drawable;
private int frameHeight;
private int frameWidth;
private Bitmap firstFrame;
private int initialSdkVersion;
@Mock private Drawable.Callback cb;
@Mock private GifFrameLoader frameLoader;
@Mock private Paint paint;
@Mock private Transformation<Bitmap> transformation;
private Application context;
private static Paint isAPaint() {
return isA(Paint.class);
}
private static Rect isARect() {
return isA(Rect.class);
}
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
context = ApplicationProvider.getApplicationContext();
frameWidth = 120;
frameHeight = 450;
firstFrame = Bitmap.createBitmap(frameWidth, frameHeight, Bitmap.Config.RGB_565);
drawable = new GifDrawable(frameLoader, paint);
when(frameLoader.getWidth()).thenReturn(frameWidth);
when(frameLoader.getHeight()).thenReturn(frameHeight);
when(frameLoader.getCurrentFrame()).thenReturn(firstFrame);
when(frameLoader.getCurrentIndex()).thenReturn(0);
drawable.setCallback(cb);
initialSdkVersion = Build.VERSION.SDK_INT;
}
@After
public void tearDown() {
Util.setSdkVersionInt(initialSdkVersion);
}
@Test
public void testShouldDrawFirstFrameBeforeAnyFrameRead() {
Canvas canvas = new Canvas();
drawable.draw(canvas);
ShadowCanvas shadowCanvas = Shadow.extract(canvas);
assertThat(shadowCanvas.getDescription())
.isEqualTo(
"Bitmap ("
+ firstFrame.getWidth()
+ " x "
+ firstFrame.getHeight()
+ ") at (0,0) with height=0 and width=0");
}
@Test
public void testDoesDrawCurrentFrameIfOneIsAvailable() {
Canvas canvas = mock(Canvas.class);
Bitmap currentFrame = Bitmap.createBitmap(100, 100, Bitmap.Config.ARGB_4444);
when(frameLoader.getCurrentFrame()).thenReturn(currentFrame);
drawable.draw(canvas);
verify(canvas).drawBitmap(eq(currentFrame), (Rect) isNull(), isARect(), isAPaint());
verify(canvas, never()).drawBitmap(eq(firstFrame), (Rect) isNull(), isARect(), isAPaint());
}
@Test
public void testRequestsNextFrameOnStart() {
drawable.setVisible(true, true);
drawable.start();
verify(frameLoader).subscribe(eq(drawable));
}
@Test
public void testRequestsNextFrameOnStartWithoutCallToSetVisible() {
drawable.start();
verify(frameLoader).subscribe(eq(drawable));
}
@Test
public void testDoesNotRequestNextFrameOnStartIfGotCallToSetVisibleWithVisibleFalse() {
drawable.setVisible(false, false);
drawable.start();
verify(frameLoader, never()).subscribe(eq(drawable));
}
@Test
public void testDoesNotRequestNextFrameOnStartIfHasSingleFrame() {
when(frameLoader.getFrameCount()).thenReturn(1);
drawable.setVisible(true, false);
drawable.start();
verify(frameLoader, never()).subscribe(eq(drawable));
}
@Test
public void testInvalidatesSelfOnStartIfHasSingleFrame() {
when(frameLoader.getFrameCount()).thenReturn(1);
drawable.setVisible(true, false);
drawable.start();
verify(cb).invalidateDrawable(eq(drawable));
}
@Test
public void testShouldInvalidateSelfOnRun() {
drawable.setVisible(true, true);
drawable.start();
verify(cb).invalidateDrawable(eq(drawable));
}
@Test
public void testShouldNotScheduleItselfIfAlreadyRunning() {
drawable.setVisible(true, true);
drawable.start();
drawable.start();
verify(frameLoader, times(1)).subscribe(eq(drawable));
}
@Test
public void testReturnsFalseFromIsRunningWhenNotRunning() {
assertFalse(drawable.isRunning());
}
@Test
public void testReturnsTrueFromIsRunningWhenRunning() {
drawable.setVisible(true, true);
drawable.start();
assertTrue(drawable.isRunning());
}
@Test
public void testInvalidatesSelfWhenFrameReady() {
drawable.setIsRunning(true);
drawable.onFrameReady();
verify(cb).invalidateDrawable(eq(drawable));
}
@Test
public void testDoesNotStartLoadingNextFrameWhenCurrentFinishesIfHasNoCallback() {
drawable.setIsRunning(true);
drawable.setCallback(null);
drawable.onFrameReady();
verify(frameLoader).unsubscribe(eq(drawable));
}
@Test
public void testStopsWhenCurrentFrameFinishesIfHasNoCallback() {
drawable.setIsRunning(true);
drawable.setCallback(null);
drawable.onFrameReady();
assertFalse(drawable.isRunning());
}
@Test
public void testUnsubscribesWhenCurrentFinishesIfHasNoCallback() {
drawable.setIsRunning(true);
drawable.setCallback(null);
drawable.onFrameReady();
verify(frameLoader).unsubscribe(eq(drawable));
}
@Test
public void testSetsIsRunningFalseOnStop() {
drawable.start();
drawable.stop();
assertFalse(drawable.isRunning());
}
@Test
public void testStopsOnSetVisibleFalse() {
drawable.start();
drawable.setVisible(false, true);
assertFalse(drawable.isRunning());
}
@Test
public void testStartsOnSetVisibleTrueIfRunning() {
drawable.start();
drawable.setVisible(false, false);
drawable.setVisible(true, true);
assertTrue(drawable.isRunning());
}
@Test
public void testDoesNotStartOnVisibleTrueIfNotRunning() {
drawable.setVisible(true, true);
assertFalse(drawable.isRunning());
}
@Test
public void testDoesNotStartOnSetVisibleIfStartedAndStopped() {
drawable.start();
drawable.stop();
drawable.setVisible(true, true);
assertFalse(drawable.isRunning());
}
@Test
public void testDoesNotImmediatelyRunIfStartedWhileNotVisible() {
drawable.setVisible(false, false);
drawable.start();
assertFalse(drawable.isRunning());
}
@Test
public void testGetOpacityReturnsTransparent() {
assertEquals(PixelFormat.TRANSPARENT, drawable.getOpacity());
}
@Test
public void testReturnsFrameCountFromDecoder() {
int expected = 4;
when(frameLoader.getFrameCount()).thenReturn(expected);
assertEquals(expected, drawable.getFrameCount());
}
@Test
public void testReturnsDefaultFrameIndex() {
final int expected = -1;
when(frameLoader.getCurrentIndex()).thenReturn(expected);
assertEquals(expected, drawable.getFrameIndex());
}
@Test
public void testReturnsNonDefaultFrameIndex() {
final int expected = 100;
when(frameLoader.getCurrentIndex()).thenReturn(expected);
assertEquals(expected, drawable.getFrameIndex());
}
@Test
public void testRecycleCallsClearOnFrameManager() {
drawable.recycle();
verify(frameLoader).clear();
}
@Test
public void testIsNotRecycledIfNotRecycled() {
assertFalse(drawable.isRecycled());
}
@Test
public void testIsRecycledAfterRecycled() {
drawable.recycle();
assertTrue(drawable.isRecycled());
}
@Test
public void testReturnsNonNullConstantState() {
assertNotNull(drawable.getConstantState());
}
@Test
public void testReturnsSizeFromFrameLoader() {
int size = 1243;
when(frameLoader.getSize()).thenReturn(size);
assertThat(drawable.getSize()).isEqualTo(size);
}
@Test
public void testReturnsNewDrawableFromConstantState() {
Bitmap firstFrame = Bitmap.createBitmap(100, 100, Bitmap.Config.ARGB_8888);
drawable =
new GifDrawable(
ApplicationProvider.getApplicationContext(),
mock(GifDecoder.class),
transformation,
100,
100,
firstFrame);
assertNotNull(Preconditions.checkNotNull(drawable.getConstantState()).newDrawable());
assertNotNull(
drawable
.getConstantState()
.newDrawable(ApplicationProvider.getApplicationContext().getResources()));
}
@Test
public void testReturnsFrameWidthAndHeightForIntrinsicDimensions() {
assertEquals(frameWidth, drawable.getIntrinsicWidth());
assertEquals(frameHeight, drawable.getIntrinsicHeight());
}
@Test
public void testLoopsASingleTimeIfLoopCountIsSetToOne() {
final int loopCount = 1;
final int frameCount = 2;
when(frameLoader.getFrameCount()).thenReturn(frameCount);
drawable.setLoopCount(loopCount);
drawable.setVisible(true, true);
drawable.start();
runLoops(loopCount, frameCount);
verifyRanLoops(loopCount, frameCount);
assertFalse("drawable should be stopped after loop is completed", drawable.isRunning());
}
@Test
public void testLoopsForeverIfLoopCountIsSetToLoopForever() {
final int loopCount = 40;
final int frameCount = 2;
when(frameLoader.getFrameCount()).thenReturn(frameCount);
drawable.setLoopCount(GifDrawable.LOOP_FOREVER);
drawable.setVisible(true, true);
drawable.start();
runLoops(loopCount, frameCount);
verifyRanLoops(loopCount, frameCount);
assertTrue("drawable should be still running", drawable.isRunning());
}
@Test
public void testLoopsOnceIfLoopCountIsSetToOneWithThreeFrames() {
final int loopCount = 1;
final int frameCount = 3;
when(frameLoader.getFrameCount()).thenReturn(frameCount);
drawable.setLoopCount(loopCount);
drawable.setVisible(true, true);
drawable.start();
runLoops(loopCount, frameCount);
verifyRanLoops(loopCount, frameCount);
assertFalse("drawable should be stopped after loop is completed", drawable.isRunning());
}
@Test
public void testLoopsThreeTimesIfLoopCountIsSetToThree() {
final int loopCount = 3;
final int frameCount = 2;
when(frameLoader.getFrameCount()).thenReturn(frameCount);
drawable.setLoopCount(loopCount);
drawable.setVisible(true, true);
drawable.start();
runLoops(loopCount, frameCount);
verifyRanLoops(loopCount, frameCount);
assertFalse("drawable should be stopped after loop is completed", drawable.isRunning());
}
@Test
public void testCallingStartResetsLoopCounter() {
when(frameLoader.getFrameCount()).thenReturn(2);
drawable.setLoopCount(1);
drawable.setVisible(true, true);
drawable.start();
drawable.onFrameReady();
when(frameLoader.getCurrentIndex()).thenReturn(1);
drawable.onFrameReady();
assertFalse("drawable should be stopped after loop is completed", drawable.isRunning());
drawable.start();
when(frameLoader.getCurrentIndex()).thenReturn(0);
drawable.onFrameReady();
when(frameLoader.getCurrentIndex()).thenReturn(1);
drawable.onFrameReady();
// 4 onFrameReady(), 2 start()
verify(cb, times(4 + 2)).invalidateDrawable(eq(drawable));
assertFalse("drawable should be stopped after loop is completed", drawable.isRunning());
}
@Test
public void testChangingTheLoopCountAfterHittingTheMaxLoopCount() {
final int initialLoopCount = 1;
final int frameCount = 2;
when(frameLoader.getFrameCount()).thenReturn(frameCount);
drawable.setLoopCount(initialLoopCount);
drawable.setVisible(true, true);
drawable.start();
runLoops(initialLoopCount, frameCount);
assertFalse("drawable should be stopped after loop is completed", drawable.isRunning());
final int newLoopCount = 2;
drawable.setLoopCount(newLoopCount);
drawable.start();
runLoops(newLoopCount, frameCount);
int numStarts = 2;
int expectedFrames = (initialLoopCount + newLoopCount) * frameCount + numStarts;
verify(cb, times(expectedFrames)).invalidateDrawable(eq(drawable));
assertFalse("drawable should be stopped after loop is completed", drawable.isRunning());
}
@Test(expected = IllegalArgumentException.class)
public void testThrowsIfGivenLoopCountLessThanZeroAndNotInfinite() {
drawable.setLoopCount(-2);
}
@Test
public void testUsesDecoderTotalLoopCountIfLoopCountIsLoopIntrinsic() {
final int frameCount = 3;
final int loopCount = 2;
when(frameLoader.getLoopCount()).thenReturn(loopCount);
when(frameLoader.getFrameCount()).thenReturn(frameCount);
drawable.setLoopCount(GifDrawable.LOOP_INTRINSIC);
drawable.setVisible(true, true);
drawable.start();
runLoops(loopCount, frameCount);
verifyRanLoops(loopCount, frameCount);
assertFalse("drawable should be stopped after loop is completed", drawable.isRunning());
}
@Test
public void testLoopsForeverIfLoopCountIsLoopIntrinsicAndTotalIterationCountIsForever() {
final int frameCount = 3;
final int loopCount = 40;
when(frameLoader.getLoopCount()).thenReturn(GifDecoder.TOTAL_ITERATION_COUNT_FOREVER);
when(frameLoader.getFrameCount()).thenReturn(frameCount);
drawable.setLoopCount(GifDrawable.LOOP_INTRINSIC);
drawable.setVisible(true, true);
drawable.start();
runLoops(loopCount, frameCount);
verifyRanLoops(loopCount, frameCount);
assertTrue("drawable should be still running", drawable.isRunning());
}
@Test
public void testDoesNotDrawFrameAfterRecycle() {
Bitmap bitmap = Bitmap.createBitmap(100, 112341, Bitmap.Config.RGB_565);
drawable.setVisible(true, true);
drawable.start();
when(frameLoader.getCurrentFrame()).thenReturn(bitmap);
drawable.onFrameReady();
drawable.recycle();
Canvas canvas = mock(Canvas.class);
drawable.draw(canvas);
verify(canvas, never()).drawBitmap(eq(bitmap), isARect(), isARect(), isAPaint());
}
@Test
public void testSetsFrameTransformationOnFrameManager() {
Bitmap bitmap = Bitmap.createBitmap(100, 100, Bitmap.Config.ARGB_8888);
drawable.setFrameTransformation(transformation, bitmap);
verify(frameLoader).setFrameTransformation(eq(transformation), eq(bitmap));
}
@Test(expected = NullPointerException.class)
public void testThrowsIfConstructedWithNullFirstFrame() {
new GifDrawable(
ApplicationProvider.getApplicationContext(),
mock(GifDecoder.class),
transformation,
100,
100,
null);
}
@Test
public void testAppliesGravityOnDrawAfterBoundsChange() {
Rect bounds = new Rect(0, 0, frameWidth * 2, frameHeight * 2);
drawable.setBounds(bounds);
Canvas canvas = mock(Canvas.class);
drawable.draw(canvas);
verify(canvas).drawBitmap(isA(Bitmap.class), (Rect) isNull(), eq(bounds), eq(paint));
}
@Test
public void testSetAlphaSetsAlphaOnPaint() {
int alpha = 100;
drawable.setAlpha(alpha);
verify(paint).setAlpha(eq(alpha));
}
@Test
public void testSetColorFilterSetsColorFilterOnPaint() {
ColorFilter colorFilter = new PorterDuffColorFilter(Color.RED, Mode.ADD);
drawable.setColorFilter(colorFilter);
// Use ArgumentCaptor instead of eq() due to b/73121412 where ShadowPorterDuffColorFilter.equals
// uses a method that can't be found (PorterDuffColorFilter.getColor).
ArgumentCaptor<ColorFilter> captor = ArgumentCaptor.forClass(ColorFilter.class);
verify(paint).setColorFilter(captor.capture());
assertThat(captor.getValue()).isSameInstanceAs(colorFilter);
}
@Test
public void testReturnsCurrentTransformationInGetFrameTransformation() {
@SuppressWarnings("unchecked")
Transformation<Bitmap> newTransformation = mock(Transformation.class);
Bitmap bitmap = Bitmap.createBitmap(100, 100, Bitmap.Config.ARGB_8888);
drawable.setFrameTransformation(newTransformation, bitmap);
verify(frameLoader).setFrameTransformation(eq(newTransformation), eq(bitmap));
}
@Test(expected = NullPointerException.class)
public void testThrowsIfCreatedWithNullState() {
new GifDrawable(null);
}
@Test
public void onFrameReady_whenAttachedToDrawableCallbackButNotViewCallback_stops() {
TransitionDrawable topLevel = new TransitionDrawable(new Drawable[] {drawable});
drawable.setCallback(topLevel);
topLevel.setCallback(null);
drawable.start();
drawable.onFrameReady();
assertThat(drawable.isRunning()).isFalse();
}
@Test
public void onFrameReady_whenAttachedtoDrawableCallbackWithViewCallbackParent_doesNotStop() {
TransitionDrawable topLevel = new TransitionDrawable(new Drawable[] {drawable});
drawable.setCallback(topLevel);
topLevel.setCallback(new View(context));
drawable.start();
drawable.onFrameReady();
assertThat(drawable.isRunning()).isTrue();
}
private void verifyRanLoops(int loopCount, int frameCount) {
// 1 for invalidate in start().
verify(cb, times(1 + loopCount * frameCount)).invalidateDrawable(eq(drawable));
}
private void runLoops(int loopCount, int frameCount) {
for (int loop = 0; loop < loopCount; loop++) {
for (int frame = 0; frame < frameCount; frame++) {
when(frameLoader.getCurrentIndex()).thenReturn(frame);
assertTrue(
"drawable should be started before calling drawable.onFrameReady()",
drawable.isRunning());
drawable.onFrameReady();
}
}
}
}
|
GifDrawableTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java
|
{
"start": 14196,
"end": 20040
}
|
enum ____ {
MAP(new ParseField("map")) {
@Override
Aggregator create(
String name,
AggregatorFactories factories,
ValuesSourceConfig valuesSourceConfig,
DocValueFormat format,
TermsAggregator.BucketCountThresholds bucketCountThresholds,
IncludeExclude includeExclude,
AggregationContext context,
Aggregator parent,
SignificanceHeuristic significanceHeuristic,
SignificanceLookup lookup,
CardinalityUpperBound cardinality,
Map<String, Object> metadata
) throws IOException {
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format);
return new MapStringTermsAggregator(
name,
factories,
new MapStringTermsAggregator.ValuesSourceCollectorSource(valuesSourceConfig),
a -> a.new SignificantTermsResults(lookup, significanceHeuristic, cardinality),
null,
format,
bucketCountThresholds,
filter,
context,
parent,
SubAggCollectionMode.BREADTH_FIRST,
false,
cardinality,
metadata,
false
);
}
},
GLOBAL_ORDINALS(new ParseField("global_ordinals")) {
@Override
Aggregator create(
String name,
AggregatorFactories factories,
ValuesSourceConfig valuesSourceConfig,
DocValueFormat format,
TermsAggregator.BucketCountThresholds bucketCountThresholds,
IncludeExclude includeExclude,
AggregationContext context,
Aggregator parent,
SignificanceHeuristic significanceHeuristic,
SignificanceLookup lookup,
CardinalityUpperBound cardinality,
Map<String, Object> metadata
) throws IOException {
boolean remapGlobalOrd = true;
if (cardinality == CardinalityUpperBound.ONE && factories == AggregatorFactories.EMPTY && includeExclude == null) {
/*
* We don't need to remap global ords iff this aggregator:
* - collects from a single bucket AND
* - has no include/exclude rules AND
* - has no sub-aggregator
*/
remapGlobalOrd = false;
}
ValuesSource.Bytes.WithOrdinals.FieldData ordinalsValuesSource =
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSourceConfig.getValuesSource();
SortedSetDocValues values = TermsAggregatorFactory.globalOrdsValues(context, ordinalsValuesSource);
return new GlobalOrdinalsStringTermsAggregator(
name,
factories,
a -> a.new SignificantTermsResults(lookup, significanceHeuristic, cardinality),
ordinalsValuesSource,
() -> TermsAggregatorFactory.globalOrdsValues(context, ordinalsValuesSource),
null,
format,
bucketCountThresholds,
TermsAggregatorFactory.gloabalOrdsFilter(includeExclude, format, values),
context,
parent,
remapGlobalOrd,
SubAggCollectionMode.BREADTH_FIRST,
false,
cardinality,
metadata,
false
);
}
};
public static ExecutionMode fromString(String value, final DeprecationLogger deprecationLogger) {
if ("global_ordinals".equals(value)) {
return GLOBAL_ORDINALS;
} else if ("global_ordinals_hash".equals(value)) {
/*
* We have no plans to remove this so we don't break anyone, no matter
* how few people still use this or how long it's been deprecated.
*/
deprecationLogger.warn(
DeprecationCategory.AGGREGATIONS,
"global_ordinals_hash",
"global_ordinals_hash is deprecated. Please use [global_ordinals] instead."
);
return GLOBAL_ORDINALS;
} else if ("map".equals(value)) {
return MAP;
}
throw new IllegalArgumentException("Unknown `execution_hint`: [" + value + "], expected any of [map, global_ordinals]");
}
private final ParseField parseField;
ExecutionMode(ParseField parseField) {
this.parseField = parseField;
}
abstract Aggregator create(
String name,
AggregatorFactories factories,
ValuesSourceConfig valuesSourceConfig,
DocValueFormat format,
TermsAggregator.BucketCountThresholds bucketCountThresholds,
IncludeExclude includeExclude,
AggregationContext context,
Aggregator parent,
SignificanceHeuristic significanceHeuristic,
SignificanceLookup lookup,
CardinalityUpperBound cardinality,
Map<String, Object> metadata
) throws IOException;
@Override
public String toString() {
return parseField.getPreferredName();
}
}
}
|
ExecutionMode
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-config/src/main/java/smoketest/config/SampleConfigApplication.java
|
{
"start": 955,
"end": 1098
}
|
class ____ {
public static void main(String[] args) {
SpringApplication.run(SampleConfigApplication.class, args);
}
}
|
SampleConfigApplication
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java
|
{
"start": 2262,
"end": 2880
}
|
class ____ extends TextSimilarityRankBuilder {
public TopNConfigurationAcceptingTextSimilarityRankBuilder(
String field,
String inferenceId,
String inferenceText,
int rankWindowSize,
Float minScore,
int topN
) {
super(field, inferenceId + "-task-settings-top-" + topN, inferenceText, rankWindowSize, minScore, false, null);
}
}
/**
* {@code TextSimilarityRankBuilder} that simulates an inference call returning N results.
*/
public static
|
TopNConfigurationAcceptingTextSimilarityRankBuilder
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/embeddable/MyEntity.java
|
{
"start": 232,
"end": 306
}
|
class ____ extends Base {
@Embedded
private EmbeddableEntity emb;
}
|
MyEntity
|
java
|
apache__camel
|
components/camel-lumberjack/src/test/java/org/apache/camel/component/lumberjack/LumberjackMultiThreadIT.java
|
{
"start": 4170,
"end": 4620
}
|
class ____ extends Thread {
private List<Integer> responses;
@Override
public void run() {
try {
this.responses = LumberjackUtil.sendMessages(PORT, null, Arrays.asList(15, 10));
latch.countDown();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
interrupted = true;
}
}
}
}
|
LumberjackThreadTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeStorageInfo.java
|
{
"start": 11797,
"end": 11860
}
|
enum ____ {
ADDED, REPLACED, ALREADY_EXIST
}
}
|
AddBlockResult
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/bytecode/enhancement/extension/engine/BytecodeEnhancedTestEngine.java
|
{
"start": 16071,
"end": 18836
}
|
class ____ extends TestMethodTestDescriptor {
private final boolean enhanced;
private final String[] classes;
public EnhancementWorkedCheckMethodTestDescriptor(UniqueId uniqueId, Class<?> testClass, Supplier<List<Class<?>>> enclosingInstanceTypes, JupiterConfiguration configuration, boolean enhanced, String[] classes) {
super(
prepareId( uniqueId, testMethod( enhanced ) ),
testClass, testMethod( enhanced ),
enclosingInstanceTypes,
configuration
);
this.enhanced = enhanced;
this.classes = classes;
}
private static Method testMethod(boolean enhanced) {
return enhanced ? METHOD_ENHANCED : METHOD_NOT_ENHANCED;
}
@Override
public JupiterEngineExecutionContext execute(JupiterEngineExecutionContext context,
DynamicTestExecutor dynamicTestExecutor) {
ExtensionContext extensionContext = context.getExtensionContext();
ThrowableCollector throwableCollector = context.getThrowableCollector();
throwableCollector.execute( () -> {
Object instance = extensionContext.getRequiredTestInstance();
for ( String className : classes ) {
assertEnhancementWorked( className, enhanced, instance );
}
} );
return context;
}
private static final Method METHOD_ENHANCED;
private static final Method METHOD_NOT_ENHANCED;
static {
try {
METHOD_ENHANCED = EnhancementWorkedCheckMethodTestDescriptor.class.getDeclaredMethod(
"assertEntityClassesWereEnhanced" );
METHOD_NOT_ENHANCED = EnhancementWorkedCheckMethodTestDescriptor.class.getDeclaredMethod(
"assertEntityClassesWereNotEnhanced" );
}
catch (NoSuchMethodException e) {
throw new RuntimeException( e );
}
}
private static void assertEntityClassesWereEnhanced() {
// just for JUint to display the name
}
private static void assertEntityClassesWereNotEnhanced() {
// just for JUint to display the name
}
private static void assertEnhancementWorked(String className, boolean enhanced, Object testClassInstance) {
try {
Class<?> loaded = testClassInstance.getClass().getClassLoader().loadClass( className );
if ( enhanced ) {
assertThat( loaded.getDeclaredMethods() )
.extracting( Method::getName )
.anyMatch( name -> name.startsWith( "$$_hibernate_" ) );
}
else {
assertThat( loaded.getDeclaredMethods() )
.extracting( Method::getName )
.noneMatch( name -> name.startsWith( "$$_hibernate_" ) );
}
}
catch (ClassNotFoundException e) {
Assertions.fail( e.getMessage() );
}
}
private static UniqueId prepareId(UniqueId uniqueId, Method method) {
return uniqueId.append(
TestMethodTestDescriptor.SEGMENT_TYPE,
method.getName()
);
}
}
}
|
EnhancementWorkedCheckMethodTestDescriptor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/boot/models/hbm/join/Data.java
|
{
"start": 241,
"end": 303
}
|
class ____ {
private String first;
private String second;
}
|
Data
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/time/FastDateParserTest.java
|
{
"start": 2896,
"end": 32080
}
|
enum ____ {
// @formatter:off
India(INDIA, "+05", "+0530", "+05:30", true),
Greenwich(TimeZones.GMT, "Z", "Z", "Z", false),
NewYork(NEW_YORK, "-05", "-0500", "-05:00", false);
// @formatter:on
final TimeZone zone;
final String one;
final String two;
final String three;
final long offset;
Expected1806(final TimeZone zone, final String one, final String two, final String three,
final boolean hasHalfHourOffset) {
this.zone = zone;
this.one = one;
this.two = two;
this.three = three;
this.offset = hasHalfHourOffset ? 30 * 60 * 1000 : 0;
}
}
static final String DATE_PARSER_PARAMETERS = "dateParserParameters";
static final String SHORT_FORMAT_NOERA = "y/M/d/h/a/m/s/E";
static final String LONG_FORMAT_NOERA = "yyyy/MMMM/dddd/hhhh/mmmm/ss/aaaa/EEEE";
static final String SHORT_FORMAT = "G/" + SHORT_FORMAT_NOERA;
static final String LONG_FORMAT = "GGGG/" + LONG_FORMAT_NOERA;
private static final String yMdHmsSZ = "yyyy-MM-dd'T'HH:mm:ss.SSS Z";
private static final String DMY_DOT = "dd.MM.yyyy";
private static final String YMD_SLASH = "yyyy/MM/dd";
private static final String MDY_DASH = "MM-DD-yyyy";
private static final String MDY_SLASH = "MM/DD/yyyy";
private static final TimeZone REYKJAVIK = TimeZones.getTimeZone("Atlantic/Reykjavik");
private static final TimeZone NEW_YORK = TimeZones.getTimeZone("America/New_York");
private static final TimeZone INDIA = TimeZones.getTimeZone("Asia/Calcutta");
private static final Locale SWEDEN = new Locale("sv", "SE");
static void checkParse(final Locale locale, final Calendar cal, final SimpleDateFormat simpleDateFormat,
final DateParser dateParser) {
final String formattedDate = simpleDateFormat.format(cal.getTime());
checkParse(locale, simpleDateFormat, dateParser, formattedDate, formattedDate);
checkParse(locale, simpleDateFormat, dateParser, formattedDate.toLowerCase(locale), formattedDate);
checkParse(locale, simpleDateFormat, dateParser, formattedDate.toUpperCase(locale), formattedDate);
}
static void checkParse(final Locale locale, final SimpleDateFormat simpleDateFormat, final DateParser dateParser,
final String formattedDate, final String originalFormattedDate) {
try {
final Date expectedTime = simpleDateFormat.parse(formattedDate);
final Date actualTime = dateParser.parse(formattedDate);
assertEquals(expectedTime, actualTime,
"locale: " + locale + ", formattedDate: '" + formattedDate + "', originalFormattedDate: '"
+ originalFormattedDate + ", simpleDateFormat.pattern: '" + simpleDateFormat + "', Java: "
+ SystemUtils.JAVA_RUNTIME_VERSION + "\n");
} catch (final Exception e) {
fail("locale: " + locale + ", formattedDate: '" + formattedDate + "', error : " + e + "\n", e);
}
}
static Stream<Arguments> dateParserParameters() {
return Stream.of(
// @formatter:off
Arguments.of((TriFunction<String, TimeZone, Locale, DateParser>) (format, timeZone, locale)
-> new FastDateParser(format, timeZone, locale, null)),
Arguments.of((TriFunction<String, TimeZone, Locale, DateParser>) FastDateFormat::getInstance)
// @formatter:on
);
}
private static Calendar initializeCalendar(final TimeZone timeZone) {
final Calendar cal = Calendar.getInstance(timeZone);
cal.set(Calendar.YEAR, 2001);
cal.set(Calendar.MONTH, 1); // not daylight savings
cal.set(Calendar.DAY_OF_MONTH, 4);
cal.set(Calendar.HOUR_OF_DAY, 12);
cal.set(Calendar.MINUTE, 8);
cal.set(Calendar.SECOND, 56);
cal.set(Calendar.MILLISECOND, 235);
return cal;
}
static ArgumentSets testParsesFactory() {
// @formatter:off
return ArgumentSets
.argumentsForFirstParameter(LONG_FORMAT, SHORT_FORMAT)
.argumentsForNextParameter(LocaleUtils.availableLocaleList())
.argumentsForNextParameter(NEW_YORK, REYKJAVIK, TimeZones.GMT)
.argumentsForNextParameter(2003, 1940, 1868, 1867, 1, -1, -1940);
// @formatter:on
}
private final TriFunction<String, TimeZone, Locale, DateParser> dateParserProvider = (format, timeZone, locale) -> new FastDateParser(format, timeZone,
locale, null);
@BeforeEach
@AfterEach
void clear() {
AbstractFormatCache.clear();
FastDateFormat.clear();
FastDateParser.clear();
FastDatePrinter.clear();
}
private DateParser getDateInstance(final int dateStyle, final Locale locale) {
return getInstance(null, AbstractFormatCache.getPatternForStyle(Integer.valueOf(dateStyle), null, locale), TimeZone.getDefault(), Locale.getDefault());
}
private Calendar getEraStart(int year, final TimeZone zone, final Locale locale) {
final Calendar cal = Calendar.getInstance(zone, locale);
cal.clear();
// https://docs.oracle.com/javase/8/docs/technotes/guides/intl/calendar.doc.html
if (locale.equals(FastDateParser.JAPANESE_IMPERIAL)) {
if (year < 1868) {
cal.set(Calendar.ERA, 0);
cal.set(Calendar.YEAR, 1868 - year);
}
} else {
if (year < 0) {
cal.set(Calendar.ERA, GregorianCalendar.BC);
year = -year;
}
cal.set(Calendar.YEAR, year / 100 * 100);
}
return cal;
}
DateParser getInstance(final String format) {
return getInstance(null, format, TimeZone.getDefault(), Locale.getDefault());
}
DateParser getInstance(final String format, final Locale locale) {
return getInstance(null, format, TimeZone.getDefault(), locale);
}
private DateParser getInstance(final String format, final TimeZone timeZone) {
return getInstance(null, format, timeZone, Locale.getDefault());
}
/**
* Override this method in derived tests to change the construction of instances
*
* @param dpProvider TODO
* @param format the format string to use
* @param timeZone the time zone to use
* @param locale the locale to use
* @return the DateParser instance to use for testing
*/
protected DateParser getInstance(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider,
final String format, final TimeZone timeZone, final Locale locale) {
return (dpProvider == null ? this.dateParserProvider : dpProvider).apply(format, timeZone, locale);
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void test_Equality_Hash(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider) {
// @formatter:off
final DateParser[] parsers = {
getInstance(dpProvider, yMdHmsSZ, NEW_YORK, Locale.US),
getInstance(dpProvider, DMY_DOT, NEW_YORK, Locale.US),
getInstance(dpProvider, YMD_SLASH, NEW_YORK, Locale.US),
getInstance(dpProvider, MDY_DASH, NEW_YORK, Locale.US),
getInstance(dpProvider, MDY_SLASH, NEW_YORK, Locale.US),
getInstance(dpProvider, MDY_SLASH, REYKJAVIK, Locale.US),
getInstance(dpProvider, MDY_SLASH, REYKJAVIK, SWEDEN)
};
// @formatter:on
final Map<DateParser, Integer> map = new HashMap<>();
int i = 0;
for (final DateParser parser : parsers) {
map.put(parser, Integer.valueOf(i++));
}
i = 0;
for (final DateParser parser : parsers) {
assertEquals(i++, map.get(parser).intValue());
}
}
@Test
void test1806() throws ParseException {
final String formatStub = "yyyy-MM-dd'T'HH:mm:ss.SSS";
final String dateStub = "2001-02-04T12:08:56.235";
for (final Expected1806 trial : Expected1806.values()) {
final Calendar cal = initializeCalendar(trial.zone);
final String message = trial.zone.getDisplayName() + ";";
DateParser parser = getInstance(formatStub + "X", trial.zone);
assertEquals(cal.getTime().getTime(), parser.parse(dateStub + trial.one).getTime() - trial.offset,
message + trial.one);
parser = getInstance(formatStub + "XX", trial.zone);
assertEquals(cal.getTime(), parser.parse(dateStub + trial.two), message + trial.two);
parser = getInstance(formatStub + "XXX", trial.zone);
assertEquals(cal.getTime(), parser.parse(dateStub + trial.three), message + trial.three);
}
}
@Test
void test1806Argument() {
assertIllegalArgumentException(() -> getInstance("XXXX"));
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testAmPm(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider) throws ParseException {
final Calendar cal = Calendar.getInstance(NEW_YORK, Locale.US);
cal.clear();
final DateParser h = getInstance(dpProvider, "yyyy-MM-dd hh a mm:ss", NEW_YORK, Locale.US);
final DateParser K = getInstance(dpProvider, "yyyy-MM-dd KK a mm:ss", NEW_YORK, Locale.US);
final DateParser k = getInstance(dpProvider, "yyyy-MM-dd kk:mm:ss", NEW_YORK, Locale.US);
final DateParser H = getInstance(dpProvider, "yyyy-MM-dd HH:mm:ss", NEW_YORK, Locale.US);
cal.set(2010, Calendar.AUGUST, 1, 0, 33, 20);
assertEquals(cal.getTime(), h.parse("2010-08-01 12 AM 33:20"));
assertEquals(cal.getTime(), K.parse("2010-08-01 0 AM 33:20"));
assertEquals(cal.getTime(), k.parse("2010-08-01 00:33:20"));
assertEquals(cal.getTime(), H.parse("2010-08-01 00:33:20"));
cal.set(2010, Calendar.AUGUST, 1, 3, 33, 20);
assertEquals(cal.getTime(), h.parse("2010-08-01 3 AM 33:20"));
assertEquals(cal.getTime(), K.parse("2010-08-01 3 AM 33:20"));
assertEquals(cal.getTime(), k.parse("2010-08-01 03:33:20"));
assertEquals(cal.getTime(), H.parse("2010-08-01 03:33:20"));
cal.set(2010, Calendar.AUGUST, 1, 15, 33, 20);
assertEquals(cal.getTime(), h.parse("2010-08-01 3 PM 33:20"));
assertEquals(cal.getTime(), K.parse("2010-08-01 3 PM 33:20"));
assertEquals(cal.getTime(), k.parse("2010-08-01 15:33:20"));
assertEquals(cal.getTime(), H.parse("2010-08-01 15:33:20"));
cal.set(2010, Calendar.AUGUST, 1, 12, 33, 20);
assertEquals(cal.getTime(), h.parse("2010-08-01 12 PM 33:20"));
assertEquals(cal.getTime(), K.parse("2010-08-01 0 PM 33:20"));
assertEquals(cal.getTime(), k.parse("2010-08-01 12:33:20"));
assertEquals(cal.getTime(), H.parse("2010-08-01 12:33:20"));
}
@Test
void testDayNumberOfWeek() throws ParseException {
final DateParser parser = getInstance("u");
final Calendar calendar = Calendar.getInstance();
calendar.setTime(parser.parse("1"));
assertEquals(Calendar.MONDAY, calendar.get(Calendar.DAY_OF_WEEK));
calendar.setTime(parser.parse("6"));
assertEquals(Calendar.SATURDAY, calendar.get(Calendar.DAY_OF_WEEK));
calendar.setTime(parser.parse("7"));
assertEquals(Calendar.SUNDAY, calendar.get(Calendar.DAY_OF_WEEK));
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testDayOf(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider) throws ParseException {
final Calendar cal = Calendar.getInstance(NEW_YORK, Locale.US);
cal.clear();
cal.set(2003, Calendar.FEBRUARY, 10);
final DateParser fdf = getInstance(dpProvider, "W w F D y", NEW_YORK, Locale.US);
assertEquals(cal.getTime(), fdf.parse("3 7 2 41 03"));
}
@Test
void testEquals() {
final DateParser parser1 = getInstance(YMD_SLASH);
final DateParser parser2 = getInstance(YMD_SLASH);
assertEquals(parser1, parser2);
assertEquals(parser1.hashCode(), parser2.hashCode());
assertNotEquals(parser1, new Object());
}
@Test
void testJpLocales() {
final Calendar cal = Calendar.getInstance(TimeZones.GMT);
cal.clear();
cal.set(2003, Calendar.FEBRUARY, 10);
cal.set(Calendar.ERA, GregorianCalendar.BC);
final Locale locale = LocaleUtils.toLocale("zh");
// ja_JP_JP cannot handle dates before 1868 properly
final SimpleDateFormat sdf = new SimpleDateFormat(LONG_FORMAT, locale);
final DateParser fdf = getInstance(LONG_FORMAT, locale);
// If parsing fails, a ParseException will be thrown and the test will fail
checkParse(locale, cal, sdf, fdf);
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testLANG_831(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider) throws Exception {
testSdfAndFdp(dpProvider, "M E", "3 Tue", true);
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testLANG_832(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider) throws Exception {
testSdfAndFdp(dpProvider, "'d'd", "d3", false); // OK
testSdfAndFdp(dpProvider, "'d'd'", "d3", true); // should fail (unterminated quote)
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testLang1121(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider) throws ParseException {
final TimeZone kst = TimeZones.getTimeZone("KST");
final DateParser fdp = getInstance(dpProvider, "yyyyMMdd", kst, Locale.KOREA);
assertThrows(ParseException.class, () -> fdp.parse("2015"));
// Wed Apr 29 00:00:00 KST 2015
Date actual = fdp.parse("20150429");
final Calendar cal = Calendar.getInstance(kst, Locale.KOREA);
cal.clear();
cal.set(2015, Calendar.APRIL, 29);
Date expected = cal.getTime();
assertEquals(expected, actual);
final SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd", Locale.KOREA);
sdf.setTimeZone(kst);
expected = sdf.parse("20150429113100");
// Thu Mar 16 00:00:00 KST 81724
actual = fdp.parse("20150429113100");
assertEquals(expected, actual);
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testLang1380(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider) throws ParseException {
final Calendar expected = Calendar.getInstance(TimeZones.GMT, Locale.FRANCE);
expected.clear();
expected.set(2014, Calendar.APRIL, 14);
final DateParser fdp = getInstance(dpProvider, "dd MMM yyyy", TimeZones.GMT, Locale.FRANCE);
assertEquals(expected.getTime(), fdp.parse("14 avril 2014"));
assertEquals(expected.getTime(), fdp.parse("14 avr. 2014"));
assertEquals(expected.getTime(), fdp.parse("14 avr 2014"));
}
@Test
void testLang303() throws ParseException {
DateParser parser = getInstance(YMD_SLASH);
final Calendar cal = Calendar.getInstance();
cal.set(2004, Calendar.DECEMBER, 31);
final Date date = parser.parse("2004/11/31");
parser = SerializationUtils.deserialize(SerializationUtils.serialize((Serializable) parser));
assertEquals(date, parser.parse("2004/11/31"));
}
@Test
void testLang538() throws ParseException {
final DateParser parser = getInstance("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", TimeZones.GMT);
final Calendar cal = Calendar.getInstance(TimeZones.getTimeZone("GMT-8"));
cal.clear();
cal.set(2009, Calendar.OCTOBER, 16, 8, 42, 16);
assertEquals(cal.getTime(), parser.parse("2009-10-16T16:42:16.000Z"));
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testLang996(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider) throws ParseException {
final Calendar expected = Calendar.getInstance(NEW_YORK, Locale.US);
expected.clear();
expected.set(2014, Calendar.MAY, 14);
final DateParser fdp = getInstance(dpProvider, "ddMMMyyyy", NEW_YORK, Locale.US);
assertEquals(expected.getTime(), fdp.parse("14may2014"));
assertEquals(expected.getTime(), fdp.parse("14MAY2014"));
assertEquals(expected.getTime(), fdp.parse("14May2014"));
}
@Test
void testLocaleMatches() {
final DateParser parser = getInstance(yMdHmsSZ, SWEDEN);
assertEquals(SWEDEN, parser.getLocale());
}
/**
* Tests that pre-1000AD years get padded with yyyy
*
* @throws ParseException so we don't have to catch it
*/
@Test
void testLowYearPadding() throws ParseException {
final DateParser parser = getInstance(YMD_SLASH);
final Calendar cal = Calendar.getInstance();
cal.clear();
cal.set(1, Calendar.JANUARY, 1);
assertEquals(cal.getTime(), parser.parse("0001/01/01"));
cal.set(10, Calendar.JANUARY, 1);
assertEquals(cal.getTime(), parser.parse("0010/01/01"));
cal.set(100, Calendar.JANUARY, 1);
assertEquals(cal.getTime(), parser.parse("0100/01/01"));
cal.set(999, Calendar.JANUARY, 1);
assertEquals(cal.getTime(), parser.parse("0999/01/01"));
}
@Test
void testMilleniumBug() throws ParseException {
final DateParser parser = getInstance(DMY_DOT);
final Calendar cal = Calendar.getInstance();
cal.clear();
cal.set(1000, Calendar.JANUARY, 1);
assertEquals(cal.getTime(), parser.parse("01.01.1000"));
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testParseLongShort(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider)
throws ParseException {
final Calendar cal = Calendar.getInstance(NEW_YORK, Locale.US);
cal.clear();
cal.set(2003, Calendar.FEBRUARY, 10, 15, 33, 20);
cal.set(Calendar.MILLISECOND, 989);
cal.setTimeZone(NEW_YORK);
DateParser fdf = getInstance(dpProvider, "yyyy GGGG MMMM dddd aaaa EEEE HHHH mmmm ssss SSSS ZZZZ", NEW_YORK,
Locale.US);
assertEquals(cal.getTime(), fdf.parse("2003 AD February 0010 PM Monday 0015 0033 0020 0989 GMT-05:00"));
cal.set(Calendar.ERA, GregorianCalendar.BC);
final Date parse = fdf.parse("2003 BC February 0010 PM Saturday 0015 0033 0020 0989 GMT-05:00");
assertEquals(cal.getTime(), parse);
fdf = getInstance(null, "y G M d a E H m s S Z", NEW_YORK, Locale.US);
assertEquals(cal.getTime(), fdf.parse("03 BC 2 10 PM Sat 15 33 20 989 -0500"));
cal.set(Calendar.ERA, GregorianCalendar.AD);
assertEquals(cal.getTime(), fdf.parse("03 AD 2 10 PM Saturday 15 33 20 989 -0500"));
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testParseNumerics(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider)
throws ParseException {
final Calendar cal = Calendar.getInstance(NEW_YORK, Locale.US);
cal.clear();
cal.set(2003, Calendar.FEBRUARY, 10, 15, 33, 20);
cal.set(Calendar.MILLISECOND, 989);
final DateParser fdf = getInstance(dpProvider, "yyyyMMddHHmmssSSS", NEW_YORK, Locale.US);
assertEquals(cal.getTime(), fdf.parse("20030210153320989"));
}
@Test
void testParseOffset() {
final DateParser parser = getInstance(YMD_SLASH);
final Date date = parser.parse("Today is 2015/07/04", new ParsePosition(9));
final Calendar cal = Calendar.getInstance();
cal.clear();
cal.set(2015, Calendar.JULY, 4);
assertEquals(cal.getTime(), date);
}
@CartesianTest
@CartesianTest.MethodFactory("testParsesFactory")
// Check that all Locales can parse the formats we use
void testParses(final String format, final Locale locale, final TimeZone timeZone, final int year) throws Exception {
final Calendar cal = getEraStart(year, timeZone, locale);
final Date centuryStart = cal.getTime();
cal.set(Calendar.MONTH, 1);
cal.set(Calendar.DAY_OF_MONTH, 10);
final Date in = cal.getTime();
final FastDateParser fastDateParser = new FastDateParser(format, timeZone, locale, centuryStart);
validateSdfFormatFdpParseEquality(format, locale, timeZone, fastDateParser, in, year, centuryStart);
}
/**
* Fails on Java 16 Early Access build 25 and above, last tested with build 36.
*/
@Test
void testParsesKnownJava16Ea25Failure() throws Exception {
final String format = LONG_FORMAT;
final int year = 2003;
final Locale locale = new Locale.Builder().setLanguage("sq").setRegion("MK").build();
assertEquals("sq_MK", locale.toString());
assertNotNull(locale);
final TimeZone timeZone = NEW_YORK;
final Calendar cal = getEraStart(year, timeZone, locale);
final Date centuryStart = cal.getTime();
cal.set(Calendar.MONTH, 1);
cal.set(Calendar.DAY_OF_MONTH, 10);
final Date in = cal.getTime();
final FastDateParser fastDateParser = new FastDateParser(format, timeZone, locale, centuryStart);
validateSdfFormatFdpParseEquality(format, locale, timeZone, fastDateParser, in, year, centuryStart);
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testParseZone(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider)
throws ParseException {
final Calendar cal = Calendar.getInstance(NEW_YORK, Locale.US);
cal.clear();
cal.set(2003, Calendar.JULY, 10, 16, 33, 20);
final DateParser fdf = getInstance(dpProvider, yMdHmsSZ, NEW_YORK, Locale.US);
assertEquals(cal.getTime(), fdf.parse("2003-07-10T15:33:20.000 -0500"));
assertEquals(cal.getTime(), fdf.parse("2003-07-10T15:33:20.000 GMT-05:00"));
assertEquals(cal.getTime(), fdf.parse("2003-07-10T16:33:20.000 Eastern Daylight Time"));
assertEquals(cal.getTime(), fdf.parse("2003-07-10T16:33:20.000 EDT"));
cal.setTimeZone(TimeZones.getTimeZone("GMT-3"));
cal.set(2003, Calendar.FEBRUARY, 10, 9, 0, 0);
assertEquals(cal.getTime(), fdf.parse("2003-02-10T09:00:00.000 -0300"));
cal.setTimeZone(TimeZones.getTimeZone("GMT+5"));
cal.set(2003, Calendar.FEBRUARY, 10, 15, 5, 6);
assertEquals(cal.getTime(), fdf.parse("2003-02-10T15:05:06.000 +0500"));
}
@Test
void testPatternMatches() {
final DateParser parser = getInstance(yMdHmsSZ);
assertEquals(yMdHmsSZ, parser.getPattern());
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testQuotes(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider) throws ParseException {
final Calendar cal = Calendar.getInstance(NEW_YORK, Locale.US);
cal.clear();
cal.set(2003, Calendar.FEBRUARY, 10, 15, 33, 20);
cal.set(Calendar.MILLISECOND, 989);
final DateParser fdf = getInstance(dpProvider, "''yyyyMMdd'A''B'HHmmssSSS''", NEW_YORK, Locale.US);
assertEquals(cal.getTime(), fdf.parse("'20030210A'B153320989'"));
}
private void testSdfAndFdp(final TriFunction<String, TimeZone, Locale, DateParser> dbProvider, final String format,
final String date, final boolean shouldFail) throws Exception {
Date dfdp = null;
Date dsdf = null;
Throwable f = null;
Throwable s = null;
try {
final SimpleDateFormat sdf = new SimpleDateFormat(format, Locale.US);
sdf.setTimeZone(NEW_YORK);
dsdf = sdf.parse(date);
assertFalse(shouldFail, "Expected SDF failure, but got " + dsdf + " for [" + format + ", " + date + "]");
} catch (final Exception e) {
s = e;
if (!shouldFail) {
throw e;
}
}
try {
final DateParser fdp = getInstance(dbProvider, format, NEW_YORK, Locale.US);
dfdp = fdp.parse(date);
assertFalse(shouldFail, "Expected FDF failure, but got " + dfdp + " for [" + format + ", " + date + "]");
} catch (final Exception e) {
f = e;
if (!shouldFail) {
throw e;
}
}
// SDF and FDF should produce equivalent results
assertEquals(f == null, s == null, "Should both or neither throw Exceptions");
assertEquals(dsdf, dfdp, "Parsed dates should be equal");
}
/**
* Test case for {@link FastDateParser#FastDateParser(String, TimeZone, Locale)}.
*
* @throws ParseException so we don't have to catch it
*/
@Test
void testShortDateStyleWithLocales() throws ParseException {
DateParser fdf = getDateInstance(FastDateFormat.SHORT, Locale.US);
final Calendar cal = Calendar.getInstance();
cal.clear();
cal.set(2004, Calendar.FEBRUARY, 3);
assertEquals(cal.getTime(), fdf.parse("2/3/04"));
fdf = getDateInstance(FastDateFormat.SHORT, SWEDEN);
assertEquals(cal.getTime(), fdf.parse("2004-02-03"));
}
@ParameterizedTest
@MethodSource(DATE_PARSER_PARAMETERS)
void testSpecialCharacters(final TriFunction<String, TimeZone, Locale, DateParser> dpProvider)
throws Exception {
testSdfAndFdp(dpProvider, "q", "", true); // bad pattern character (at present)
testSdfAndFdp(dpProvider, "Q", "", true); // bad pattern character
testSdfAndFdp(dpProvider, "$", "$", false); // OK
testSdfAndFdp(dpProvider, "?.d", "?.12", false); // OK
testSdfAndFdp(dpProvider, "''yyyyMMdd'A''B'HHmmssSSS''", "'20030210A'B153320989'", false); // OK
testSdfAndFdp(dpProvider, "''''yyyyMMdd'A''B'HHmmssSSS''", "''20030210A'B153320989'", false); // OK
testSdfAndFdp(dpProvider, "'$\\Ed'", "$\\Ed", false); // OK
// quoted characters are case-sensitive
testSdfAndFdp(dpProvider, "'QED'", "QED", false);
testSdfAndFdp(dpProvider, "'QED'", "qed", true);
// case-sensitive after insensitive Month field
testSdfAndFdp(dpProvider, "yyyy-MM-dd 'QED'", "2003-02-10 QED", false);
testSdfAndFdp(dpProvider, "yyyy-MM-dd 'QED'", "2003-02-10 qed", true);
}
@Test
void testTimeZoneMatches() {
final DateParser parser = getInstance(yMdHmsSZ, REYKJAVIK);
assertEquals(REYKJAVIK, parser.getTimeZone());
}
@Test
void testToStringContainsName() {
final DateParser parser = getInstance(YMD_SLASH);
assertTrue(parser.toString().startsWith("FastDate"));
}
// we cannot use historic dates to test time zone parsing, some time zones have second offsets
// as well as hours and minutes which makes the z formats a low fidelity round trip
@ParameterizedTest
@MethodSource("org.apache.commons.lang3.LocaleUtils#availableLocaleList()")
void testTzParses(final Locale locale) throws Exception {
// Check that all Locales can parse the time formats we use
final FastDateParser fdp = new FastDateParser("yyyy/MM/dd z", TimeZone.getDefault(), locale);
for (final TimeZone timeZone : new TimeZone[] { NEW_YORK, REYKJAVIK, TimeZones.GMT }) {
final Calendar cal = Calendar.getInstance(timeZone, locale);
cal.clear();
cal.set(Calendar.YEAR, 2000);
cal.set(Calendar.MONTH, 1);
cal.set(Calendar.DAY_OF_MONTH, 10);
final Date expected = cal.getTime();
final Date actual = fdp.parse("2000/02/10 " + timeZone.getDisplayName(locale));
assertEquals(expected, actual, "timeZone:" + timeZone.getID() + " locale:" + locale.getDisplayName());
}
}
private void validateSdfFormatFdpParseEquality(final String formatStr, final Locale locale, final TimeZone timeZone,
final FastDateParser dateParser, final Date inDate, final int year, final Date csDate) throws ParseException {
final SimpleDateFormat sdf = new SimpleDateFormat(formatStr, locale);
sdf.setTimeZone(timeZone);
if (formatStr.equals(SHORT_FORMAT)) {
sdf.set2DigitYearStart(csDate);
}
final String fmt = sdf.format(inDate);
// System.out.printf("[Java %s] Date: '%s' formatted with '%s' -> '%s'%n", SystemUtils.JAVA_RUNTIME_VERSION, inDate,
// formatStr, fmt);
try {
final Date out = dateParser.parse(fmt);
assertEquals(inDate, out, "format: '" + formatStr + "', locale: '" + locale + "', time zone: '"
+ timeZone.getID() + "', year: " + year + ", parse: '" + fmt);
} catch (final ParseException pe) {
if (year >= 1868 || !locale.getCountry().equals("JP")) {
// LANG-978
throw pe;
}
}
}
}
|
Expected1806
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DomainModel.java
|
{
"start": 2463,
"end": 2542
}
|
interface ____ {
String name();
Class<?> importedClass();
}
}
|
ExtraQueryImport
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogToolUtils.java
|
{
"start": 1658,
"end": 7695
}
|
class ____ {
private LogToolUtils() {}
public static final String CONTAINER_ON_NODE_PATTERN =
"Container: %s on %s";
/**
* Formats the header of an aggregated log file.
*/
private static byte[] formatContainerLogHeader(String containerId,
String nodeId, ContainerLogAggregationType logType, String fileName,
String lastModifiedTime, long fileLength) {
StringBuilder sb = new StringBuilder();
String containerStr = String.format(
LogToolUtils.CONTAINER_ON_NODE_PATTERN,
containerId, nodeId);
sb.append(containerStr + "\n")
.append("LogAggregationType: " + logType + "\n")
.append(StringUtils.repeat("=", containerStr.length()) + "\n")
.append("LogType:" + fileName + "\n")
.append("LogLastModifiedTime:" + lastModifiedTime + "\n")
.append("LogLength:" + fileLength + "\n")
.append("LogContents:\n");
return sb.toString().getBytes(StandardCharsets.UTF_8);
}
/**
* Output container log.
* @param containerId the containerId
* @param nodeId the nodeId
* @param fileName the log file name
* @param fileLength the log file length
* @param outputSize the output size
* @param lastModifiedTime the log file last modified time
* @param fis the log file input stream
* @param os the output stream
* @param buf the buffer
* @param logType the log type.
* @throws IOException if we can not access the log file.
*/
public static void outputContainerLog(String containerId, String nodeId,
String fileName, long fileLength, long outputSize,
String lastModifiedTime, InputStream fis, OutputStream os,
byte[] buf, ContainerLogAggregationType logType) throws IOException {
long toSkip = 0;
long totalBytesToRead = fileLength;
long skipAfterRead = 0;
if (outputSize < 0) {
long absBytes = Math.abs(outputSize);
if (absBytes < fileLength) {
toSkip = fileLength - absBytes;
totalBytesToRead = absBytes;
}
org.apache.hadoop.io.IOUtils.skipFully(fis, toSkip);
} else {
if (outputSize < fileLength) {
totalBytesToRead = outputSize;
skipAfterRead = fileLength - outputSize;
}
}
long curRead = 0;
long pendingRead = totalBytesToRead - curRead;
int toRead = pendingRead > buf.length ? buf.length
: (int) pendingRead;
int len = fis.read(buf, 0, toRead);
boolean keepGoing = (len != -1 && curRead < totalBytesToRead);
byte[] b = formatContainerLogHeader(containerId, nodeId, logType, fileName,
lastModifiedTime, fileLength);
os.write(b, 0, b.length);
while (keepGoing) {
os.write(buf, 0, len);
curRead += len;
pendingRead = totalBytesToRead - curRead;
toRead = pendingRead > buf.length ? buf.length
: (int) pendingRead;
len = fis.read(buf, 0, toRead);
keepGoing = (len != -1 && curRead < totalBytesToRead);
}
org.apache.hadoop.io.IOUtils.skipFully(fis, skipAfterRead);
os.flush();
}
public static void outputContainerLogThroughZeroCopy(String containerId,
String nodeId, String fileName, long fileLength, long outputSize,
String lastModifiedTime, FileInputStream fis, OutputStream os,
ContainerLogAggregationType logType) throws IOException {
long toSkip = 0;
long totalBytesToRead = fileLength;
if (outputSize < 0) {
long absBytes = Math.abs(outputSize);
if (absBytes < fileLength) {
toSkip = fileLength - absBytes;
totalBytesToRead = absBytes;
}
} else {
if (outputSize < fileLength) {
totalBytesToRead = outputSize;
}
}
// output log summary
byte[] b = formatContainerLogHeader(containerId, nodeId, logType, fileName,
lastModifiedTime, fileLength);
os.write(b, 0, b.length);
if (totalBytesToRead > 0) {
// output log content
FileChannel inputChannel = fis.getChannel();
WritableByteChannel outputChannel = Channels.newChannel(os);
long position = toSkip;
while (totalBytesToRead > 0) {
long transferred =
inputChannel.transferTo(position, totalBytesToRead, outputChannel);
totalBytesToRead -= transferred;
position += transferred;
}
os.flush();
}
}
/**
* Create the container log file under given (local directory/nodeId) and
* return the PrintStream object.
* @param localDir the Local Dir
* @param nodeId the NodeId
* @param containerId the ContainerId
* @return the printStream object
* @throws IOException if an I/O error occurs
*/
public static PrintStream createPrintStream(String localDir, String nodeId,
String containerId) throws IOException {
PrintStream out = System.out;
if(localDir != null && !localDir.isEmpty()) {
Path nodePath = new Path(localDir, LogAggregationUtils
.getNodeString(nodeId));
Files.createDirectories(Paths.get(nodePath.toString()));
Path containerLogPath = new Path(nodePath, containerId);
out = new PrintStream(containerLogPath.toString(), "UTF-8");
}
return out;
}
/**
* Redirect the {@link ContainerLogsRequest} to the NodeManager's
* NMWebServices.
*
* @param conf Configuration object
* @param webServiceClient client
* @param request the request for container logs
* @param logFile name of the log file
* @return response from NMWebServices
*/
public static Response getResponseFromNMWebService(Configuration conf,
Client webServiceClient, ContainerLogsRequest request, String logFile) {
WebTarget target =
webServiceClient.target(WebAppUtils.getHttpSchemePrefix(conf)
+ request.getNodeHttpAddress());
return target.path("ws").path("v1").path("node")
.path("containers").path(request.getContainerId()).path("logs")
.path(logFile)
.queryParam("size", Long.toString(request.getBytes()))
.request(MediaType.TEXT_PLAIN).get(Response.class);
}
}
|
LogToolUtils
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/bean/issues/RealSomething.java
|
{
"start": 859,
"end": 990
}
|
class ____ extends BaseSomething {
@Override
public Long doSomething(String name) {
return 123L;
}
}
|
RealSomething
|
java
|
spring-projects__spring-boot
|
module/spring-boot-mongodb/src/test/java/org/springframework/boot/mongodb/testcontainers/AbstractMongoContainerConnectionDetailsFactoryTests.java
|
{
"start": 1152,
"end": 1464
}
|
class ____ {
@Test
void shouldRegisterHints() {
RuntimeHints hints = ContainerConnectionDetailsFactoryHints.getRegisteredHints(getClass().getClassLoader());
assertThat(RuntimeHintsPredicates.reflection().onType(ConnectionString.class)).accepts(hints);
}
}
|
AbstractMongoContainerConnectionDetailsFactoryTests
|
java
|
google__auto
|
factory/src/test/resources/good/Generics.java
|
{
"start": 1621,
"end": 1682
}
|
class ____<M extends Bar> implements Foo<M> {}
}
|
FooImplWithClass
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/executor/ContainerLivenessContext.java
|
{
"start": 1394,
"end": 2257
}
|
class ____ {
private Container container;
private String user;
private String pid;
public Builder() {
}
public Builder setContainer(Container container) {
this.container = container;
return this;
}
public Builder setUser(String user) {
this.user = user;
return this;
}
public Builder setPid(String pid) {
this.pid = pid;
return this;
}
public ContainerLivenessContext build() {
return new ContainerLivenessContext(this);
}
}
private ContainerLivenessContext(Builder builder) {
this.container = builder.container;
this.user = builder.user;
this.pid = builder.pid;
}
public Container getContainer() {
return this.container;
}
public String getUser() {
return this.user;
}
public String getPid() {
return this.pid;
}
}
|
Builder
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/filter/factory/cache/LocalResponseCacheGatewayFilterFactoryTests.java
|
{
"start": 2177,
"end": 2854
}
|
class ____ extends BaseWebClientTests {
private static final String CUSTOM_HEADER = "X-Custom-Header";
private static Long parseMaxAge(String cacheControlValue) {
if (StringUtils.hasText(cacheControlValue)) {
Pattern maxAgePattern = Pattern.compile("\\bmax-age=(\\d+)\\b");
Matcher matcher = maxAgePattern.matcher(cacheControlValue);
if (matcher.find()) {
return Long.parseLong(matcher.group(1));
}
}
return null;
}
@Nested
@SpringBootTest(properties = { "spring.cloud.gateway.server.webflux.filter.local-response-cache.enabled=true" },
webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
public
|
LocalResponseCacheGatewayFilterFactoryTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/results/graph/entity/internal/EntityResultImpl.java
|
{
"start": 1153,
"end": 3671
}
|
class ____ extends AbstractEntityResultGraphNode
implements EntityResult, InitializerProducer<EntityResultImpl> {
private final TableGroup tableGroup;
private final String resultVariable;
public EntityResultImpl(
NavigablePath navigablePath,
EntityValuedModelPart entityValuedModelPart,
TableGroup tableGroup,
String resultVariable) {
super( entityValuedModelPart, navigablePath );
this.tableGroup = tableGroup;
this.resultVariable = resultVariable;
}
@Override
public NavigablePath resolveNavigablePath(Fetchable fetchable) {
if ( fetchable instanceof TableGroupProducer ) {
for ( TableGroupJoin tableGroupJoin : tableGroup.getTableGroupJoins() ) {
final NavigablePath navigablePath = tableGroupJoin.getNavigablePath();
if ( tableGroupJoin.getJoinedGroup().isFetched()
&& fetchable.getFetchableName().equals( navigablePath.getLocalName() )
&& tableGroupJoin.getJoinedGroup().getModelPart() == fetchable
&& castNonNull( navigablePath.getParent() ).equals( getNavigablePath() ) ) {
return navigablePath;
}
}
}
return super.resolveNavigablePath( fetchable );
}
@Override
public FetchableContainer getReferencedMappingType() {
return getReferencedMappingContainer();
}
@Override
public EntityValuedModelPart getReferencedModePart() {
return getEntityValuedModelPart();
}
@Override
public String getResultVariable() {
return resultVariable;
}
protected String getSourceAlias() {
return tableGroup.getSourceAlias();
}
@Override
public DomainResultAssembler createResultAssembler(
InitializerParent parent,
AssemblerCreationState creationState) {
return new EntityAssembler<>(
this.getResultJavaType(),
creationState.resolveInitializer( this, parent, this ).asEntityInitializer()
);
}
@Override
public Initializer<?> createInitializer(
EntityResultImpl resultGraphNode,
InitializerParent<?> parent,
AssemblerCreationState creationState) {
return resultGraphNode.createInitializer( parent, creationState );
}
@Override
public Initializer<?> createInitializer(InitializerParent<?> parent, AssemblerCreationState creationState) {
return new EntityInitializerImpl(
this,
getSourceAlias(),
getIdentifierFetch(),
getDiscriminatorFetch(),
null,
getRowIdResult(),
NotFoundAction.EXCEPTION,
false,
null,
true,
creationState
);
}
@Override
public String toString() {
return "EntityResultImpl {" + getNavigablePath() + "}";
}
}
|
EntityResultImpl
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/method/annotation/ExceptionHandlerMethodResolverTests.java
|
{
"start": 7119,
"end": 7476
}
|
class ____ {
public void handle() {}
@ExceptionHandler(IOException.class)
public void handleIOException() {
}
@ExceptionHandler(SocketException.class)
public void handleSocketException() {
}
@ExceptionHandler
public void handleIllegalArgumentException(IllegalArgumentException exception) {
}
}
@Controller
static
|
ExceptionController
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/bean/override/BeanOverrideContextCustomizerFactoryTests.java
|
{
"start": 3980,
"end": 4079
}
|
class ____ {
@DummyBean(beanName = "counterBean")
private Integer counter;
}
}
static
|
Green
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/aot/AotEntityManagerFactoryCreator.java
|
{
"start": 1722,
"end": 5183
}
|
class ____ {
private final Supplier<EntityManagerFactory> factory;
private final Object key;
private AotEntityManagerFactoryCreator(Supplier<EntityManagerFactory> factory, Object key) {
this.factory = Lazy.of(factory);
this.key = key;
}
/**
* Create a {@code PersistenceUnitContext} from the given {@link AotRepositoryContext} using Jakarta
* Persistence-annotated classes.
* <p>
* The underlying {@link jakarta.persistence.metamodel.Metamodel} requires Hibernate to build metamodel information.
*
* @param repositoryContext repository context providing classes.
*/
public static AotEntityManagerFactoryCreator from(AotRepositoryContext repositoryContext) {
List<String> typeNames = repositoryContext.getResolvedTypes().stream()
.filter(AotEntityManagerFactoryCreator::isJakartaAnnotated).map(Class::getName).toList();
return from(PersistenceManagedTypes.of(typeNames, List.of()), typeNames);
}
/**
* Create a {@code PersistenceUnitContext} from the given {@link PersistenceUnitInfo}.
* <p>
* The underlying {@link jakarta.persistence.metamodel.Metamodel} requires Hibernate to build metamodel information.
*
* @param persistenceUnitInfo persistence unit info to use.
*/
public static AotEntityManagerFactoryCreator from(PersistenceUnitInfo persistenceUnitInfo) {
return from(() -> new AotMetamodel(persistenceUnitInfo), persistenceUnitInfo);
}
/**
* Create a {@code PersistenceUnitContext} from the given {@link PersistenceManagedTypes}.
* <p>
* The underlying {@link jakarta.persistence.metamodel.Metamodel} requires Hibernate to build metamodel information.
*
* @param managedTypes managed types to use.
*/
public static AotEntityManagerFactoryCreator from(PersistenceManagedTypes managedTypes) {
return from(managedTypes, managedTypes);
}
private static AotEntityManagerFactoryCreator from(PersistenceManagedTypes managedTypes, Object cacheKey) {
return from(() -> new AotMetamodel(managedTypes), cacheKey);
}
/**
* Create a {@code PersistenceUnitContext} from the given {@link EntityManagerFactory}.
*
* @param entityManagerFactory the entity manager factory to use.
*/
public static AotEntityManagerFactoryCreator just(EntityManagerFactory entityManagerFactory) {
return new AotEntityManagerFactoryCreator(() -> entityManagerFactory, entityManagerFactory.getMetamodel());
}
private static AotEntityManagerFactoryCreator from(Supplier<? extends AotMetamodel> metamodel, Object key) {
return new AotEntityManagerFactoryCreator(() -> metamodel.get().getEntityManagerFactory(), key);
}
private static boolean isJakartaAnnotated(Class<?> cls) {
return cls.isAnnotationPresent(Entity.class) //
|| cls.isAnnotationPresent(Embeddable.class) //
|| cls.isAnnotationPresent(MappedSuperclass.class) //
|| cls.isAnnotationPresent(Converter.class);
}
/**
* Return the {@link EntityManagerFactory}.
*
* @return the entity manager factory to use during AOT processing.
*/
public EntityManagerFactory getEntityManagerFactory() {
return factory.get();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof AotEntityManagerFactoryCreator that)) {
return false;
}
return ObjectUtils.nullSafeEquals(key, that.key);
}
@Override
public int hashCode() {
return ObjectUtils.nullSafeHashCode(key);
}
@Override
public String toString() {
return "AotEntityManagerFactory{" + key + '}';
}
}
|
AotEntityManagerFactoryCreator
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/transaction/RedissonTransactionalSet.java
|
{
"start": 1227,
"end": 10311
}
|
class ____<V> extends RedissonSet<V> {
private final TransactionalSet<V> transactionalSet;
private final AtomicBoolean executed;
public RedissonTransactionalSet(CommandAsyncExecutor commandExecutor,
String name, List<TransactionalOperation> operations, long timeout, AtomicBoolean executed, String transactionId) {
super(commandExecutor, name, null);
this.executed = executed;
RedissonSet<V> innerSet = new RedissonSet<V>(commandExecutor, name, null);
this.transactionalSet = new TransactionalSet<V>(commandExecutor, timeout, operations, innerSet, transactionId);
}
public RedissonTransactionalSet(Codec codec, CommandAsyncExecutor commandExecutor,
String name, List<TransactionalOperation> operations, long timeout, AtomicBoolean executed, String transactionId) {
super(codec, commandExecutor, name, null);
this.executed = executed;
RedissonSet<V> innerSet = new RedissonSet<V>(codec, commandExecutor, name, null);
this.transactionalSet = new TransactionalSet<V>(commandExecutor, timeout, operations, innerSet, transactionId);
}
@Override
public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit, String param, String... keys) {
return transactionalSet.expireAsync(timeToLive, timeUnit, param, keys);
}
@Override
protected RFuture<Boolean> expireAtAsync(long timestamp, String param, String... keys) {
return transactionalSet.expireAtAsync(timestamp, param, keys);
}
@Override
public RFuture<Boolean> clearExpireAsync() {
return transactionalSet.clearExpireAsync();
}
@Override
public RFuture<Boolean> moveAsync(int database) {
throw new UnsupportedOperationException("move method is not supported in transaction");
}
@Override
public RFuture<Void> migrateAsync(String host, int port, int database, long timeout) {
throw new UnsupportedOperationException("migrate method is not supported in transaction");
}
@Override
public <KOut, VOut> RCollectionMapReduce<V, KOut, VOut> mapReduce() {
throw new UnsupportedOperationException("mapReduce method is not supported in transaction");
}
@Override
public ScanResult<Object> scanIterator(String name, RedisClient client, String startPos, String pattern, int count) {
checkState();
return transactionalSet.scanIterator(name, client, startPos, pattern, count);
}
@Override
public RLock getFairLock(V value) {
throw new UnsupportedOperationException("getFairLock method is not supported in transaction");
}
@Override
public RCountDownLatch getCountDownLatch(V value) {
throw new UnsupportedOperationException("getCountDownLatch method is not supported in transaction");
}
@Override
public RPermitExpirableSemaphore getPermitExpirableSemaphore(V value) {
throw new UnsupportedOperationException("getPermitExpirableSemaphore method is not supported in transaction");
}
@Override
public RSemaphore getSemaphore(V value) {
throw new UnsupportedOperationException("getSemaphore method is not supported in transaction");
}
@Override
public RLock getLock(V value) {
throw new UnsupportedOperationException("getLock method is not supported in transaction");
}
@Override
public RReadWriteLock getReadWriteLock(V value) {
throw new UnsupportedOperationException("getReadWriteLock method is not supported in transaction");
}
@Override
public RFuture<Boolean> containsAsync(Object o) {
checkState();
return transactionalSet.containsAsync(o);
}
@Override
public RFuture<Set<V>> readAllAsync() {
checkState();
return transactionalSet.readAllAsync();
}
@Override
public RFuture<Boolean> addAsync(V e) {
checkState();
return transactionalSet.addAsync(e);
}
@Override
public RFuture<V> removeRandomAsync() {
checkState();
return transactionalSet.removeRandomAsync();
}
@Override
public RFuture<Set<V>> removeRandomAsync(int amount) {
checkState();
return transactionalSet.removeRandomAsync(amount);
}
@Override
public RFuture<Boolean> removeAsync(Object o) {
checkState();
return transactionalSet.removeAsync(o);
}
@Override
public RFuture<Boolean> moveAsync(String destination, V member) {
checkState();
return transactionalSet.moveAsync(destination, member);
}
@Override
public RFuture<Boolean> addAllAsync(Collection<? extends V> c) {
checkState();
return transactionalSet.addAllAsync(c);
}
@Override
public RFuture<Boolean> retainAllAsync(Collection<?> c) {
checkState();
return transactionalSet.retainAllAsync(c);
}
@Override
public RFuture<Boolean> removeAllAsync(Collection<?> c) {
checkState();
return transactionalSet.removeAllAsync(c);
}
@Override
public RFuture<Integer> unionAsync(String... names) {
checkState();
return transactionalSet.unionAsync(names);
}
@Override
public RFuture<Integer> diffAsync(String... names) {
checkState();
return transactionalSet.diffAsync(names);
}
@Override
public RFuture<Integer> intersectionAsync(String... names) {
checkState();
return transactionalSet.intersectionAsync(names);
}
@Override
public RFuture<Set<V>> readSortAsync(SortOrder order) {
checkState();
return transactionalSet.readSortAsync(order);
}
@Override
public RFuture<Set<V>> readSortAsync(SortOrder order, int offset, int count) {
checkState();
return transactionalSet.readSortAsync(order, offset, count);
}
@Override
public RFuture<Set<V>> readSortAsync(String byPattern, SortOrder order) {
checkState();
return transactionalSet.readSortAsync(byPattern, order);
}
@Override
public <T> RFuture<Collection<T>> readSortAsync(String byPattern, List<String> getPatterns, SortOrder order,
int offset, int count) {
checkState();
return transactionalSet.readSortAsync(byPattern, getPatterns, order, offset, count);
}
@Override
public RFuture<Set<V>> readSortAlphaAsync(SortOrder order) {
return transactionalSet.readSortAlphaAsync(order);
}
@Override
public RFuture<Set<V>> readSortAlphaAsync(SortOrder order, int offset, int count) {
return transactionalSet.readSortAlphaAsync(order, offset, count);
}
@Override
public RFuture<Set<V>> readSortAlphaAsync(String byPattern, SortOrder order) {
return transactionalSet.readSortAlphaAsync(byPattern, order);
}
@Override
public RFuture<Set<V>> readSortAlphaAsync(String byPattern, SortOrder order, int offset, int count) {
return transactionalSet.readSortAlphaAsync(byPattern, order, offset, count);
}
@Override
public <T> RFuture<Collection<T>> readSortAlphaAsync(String byPattern, List<String> getPatterns, SortOrder order) {
return transactionalSet.readSortAlphaAsync(byPattern, getPatterns, order);
}
@Override
public <T> RFuture<Collection<T>> readSortAlphaAsync(String byPattern, List<String> getPatterns, SortOrder order, int offset, int count) {
return transactionalSet.readSortAlphaAsync(byPattern, getPatterns, order, offset, count);
}
@Override
public RFuture<Integer> sortToAsync(String destName, String byPattern, List<String> getPatterns, SortOrder order, int offset, int count) {
checkState();
return transactionalSet.sortToAsync(destName, byPattern, getPatterns, order, offset, count);
}
@Override
public RFuture<Set<V>> readUnionAsync(String... names) {
checkState();
return transactionalSet.readUnionAsync(names);
}
@Override
public RFuture<Set<V>> readDiffAsync(String... names) {
checkState();
return transactionalSet.readDiffAsync(names);
}
@Override
public RFuture<Set<V>> readIntersectionAsync(String... names) {
checkState();
return transactionalSet.readIntersectionAsync(names);
}
@Override
public RFuture<Boolean> unlinkAsync() {
checkState();
return transactionalSet.unlinkAsync();
}
@Override
public RFuture<Boolean> touchAsync() {
checkState();
return transactionalSet.touchAsync();
}
@Override
public RFuture<Boolean> deleteAsync() {
checkState();
return transactionalSet.deleteAsync();
}
protected void checkState() {
if (executed.get()) {
throw new IllegalStateException("Unable to execute operation. Transaction is in finished state!");
}
}
}
|
RedissonTransactionalSet
|
java
|
mapstruct__mapstruct
|
core/src/main/java/org/mapstruct/Named.java
|
{
"start": 1554,
"end": 2348
}
|
class ____ implements MovieMapper {
* private final Titles titles = new Titles();
*
* @Override
* public GermanRelease toGerman(OriginalRelease movies) {
* if ( movies == null ) {
* return null;
* }
*
* GermanRelease germanRelease = new GermanRelease();
*
* germanRelease.setTitle( titles.translateTitleEG( movies.getTitle() ) );
*
* return germanRelease;
* }
* }
* </code>
* </pre>
*
* @author Sjaak Derksen
* @see org.mapstruct.Mapping#qualifiedByName()
* @see IterableMapping#qualifiedByName()
* @see MapMapping#keyQualifiedByName()
* @see MapMapping#valueQualifiedByName()
*/
@Target( { ElementType.TYPE, ElementType.METHOD } )
@Retention( RetentionPolicy.CLASS )
@Qualifier
public @
|
MovieMapperImpl
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/webmonitor/threadinfo/VertexThreadInfoTracker.java
|
{
"start": 22050,
"end": 22788
}
|
class ____
extends ThreadInfoSampleCompletionCallback {
private final ExecutionVertexKey executionVertexKey;
ExecutionVertexThreadInfoSampleCompletionCallback(
ExecutionVertexKey executionVertexKey, String sampleName) {
super(sampleName);
this.executionVertexKey = executionVertexKey;
}
@Override
protected void handleResult(VertexThreadInfoStats threadInfoStats) {
executionVertexStatsCache.put(executionVertexKey, threadInfoStats);
}
@Override
protected void doFinally() {
pendingExecutionVertexStats.remove(executionVertexKey);
}
}
}
|
ExecutionVertexThreadInfoSampleCompletionCallback
|
java
|
spring-projects__spring-framework
|
spring-context/src/testFixtures/java/org/springframework/context/testfixture/beans/factory/ImportAwareBeanRegistrar.java
|
{
"start": 1017,
"end": 1684
}
|
class ____ implements BeanRegistrar, ImportAware {
@Nullable
private AnnotationMetadata importMetadata;
@Override
public void register(BeanRegistry registry, Environment env) {
registry.registerBean(ClassNameHolder.class, spec -> spec.supplier(context ->
new ClassNameHolder(this.importMetadata == null ? null : this.importMetadata.getClassName())));
}
@Override
public void setImportMetadata(AnnotationMetadata importMetadata) {
this.importMetadata = importMetadata;
}
public @Nullable AnnotationMetadata getImportMetadata() {
return this.importMetadata;
}
public record ClassNameHolder(@Nullable String className) {}
}
|
ImportAwareBeanRegistrar
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/server/policy/AlterConfigPolicy.java
|
{
"start": 1987,
"end": 4041
}
|
class ____ the provided parameters.
*
* This constructor is public to make testing of <code>AlterConfigPolicy</code> implementations easier.
*/
public RequestMetadata(ConfigResource resource, Map<String, String> configs) {
this.resource = resource;
this.configs = configs;
}
/**
* Return the configs in the request.
*/
public Map<String, String> configs() {
return configs;
}
public ConfigResource resource() {
return resource;
}
@Override
public int hashCode() {
return Objects.hash(resource, configs);
}
@Override
public boolean equals(Object o) {
if ((o == null) || (!o.getClass().equals(getClass()))) return false;
RequestMetadata other = (RequestMetadata) o;
return resource.equals(other.resource) &&
configs.equals(other.configs);
}
@Override
public String toString() {
return "AlterConfigPolicy.RequestMetadata(resource=" + resource +
", configs=" + configs + ")";
}
}
/**
* Validate the request parameters and throw a <code>PolicyViolationException</code> with a suitable error
* message if the alter configs request parameters for the provided resource do not satisfy this policy.
*
* Clients will receive the POLICY_VIOLATION error code along with the exception's message. Note that validation
* failure only affects the relevant resource, other resources in the request will still be processed.
*
* @param requestMetadata the alter configs request parameters for the provided resource (topic is the only resource
* type whose configs can be updated currently).
* @throws PolicyViolationException if the request parameters do not satisfy this policy.
*/
void validate(RequestMetadata requestMetadata) throws PolicyViolationException;
}
|
with
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/spi/ThreadContextMap2.java
|
{
"start": 1180,
"end": 1902
}
|
interface ____ extends ThreadContextMap {
/**
* Puts all given context map entries into the current thread's
* context map.
*
* <p>If the current thread does not have a context map it is
* created as a side effect.</p>
* @param map The map.
* @since 2.7
*/
void putAll(final Map<String, String> map);
/**
* Returns the context data for reading. Note that regardless of whether the returned context data has been
* {@linkplain StringMap#freeze() frozen} (made read-only) or not, callers should not attempt to modify
* the returned data structure.
*
* @return the {@code StringMap}
*/
StringMap getReadOnlyContextData();
}
|
ThreadContextMap2
|
java
|
quarkusio__quarkus
|
extensions/devui/deployment-spi/src/main/java/io/quarkus/devui/spi/page/MenuPageBuildItem.java
|
{
"start": 99,
"end": 445
}
|
class ____ extends AbstractPageBuildItem {
public MenuPageBuildItem() {
super();
}
public MenuPageBuildItem(PageBuilder... pageBuilder) {
super(pageBuilder);
}
public MenuPageBuildItem(String customIdentifier, PageBuilder... pageBuilder) {
super(customIdentifier, pageBuilder);
}
}
|
MenuPageBuildItem
|
java
|
google__gson
|
test-shrinker/src/main/java/com/example/ClassWithJsonAdapterAnnotation.java
|
{
"start": 2229,
"end": 2565
}
|
class ____ extends TypeAdapter<DummyClass> {
@Override
public DummyClass read(JsonReader in) throws IOException {
return new DummyClass("adapter-" + in.nextInt());
}
@Override
public void write(JsonWriter out, DummyClass value) throws IOException {
out.value("adapter-" + value);
}
}
static
|
Adapter
|
java
|
spring-projects__spring-boot
|
documentation/spring-boot-actuator-docs/src/test/java/org/springframework/boot/actuate/docs/management/ThreadDumpEndpointDocumentationTests.java
|
{
"start": 1928,
"end": 3746
}
|
class ____ extends MockMvcEndpointDocumentationTests {
@Test
void jsonThreadDump() {
ReentrantLock lock = new ReentrantLock();
CountDownLatch latch = new CountDownLatch(1);
new Thread(() -> {
try {
lock.lock();
try {
latch.await();
}
finally {
lock.unlock();
}
}
catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
}).start();
assertThat(this.mvc.get().uri("/actuator/threaddump").accept(MediaType.APPLICATION_JSON)).hasStatusOk()
.apply(MockMvcRestDocumentation
.document("threaddump/json", preprocessResponse(limit("threads")), responseFields(
fieldWithPath("threads").description("JVM's threads."),
fieldWithPath("threads.[].blockedCount")
.description("Total number of times that the thread has been blocked."),
fieldWithPath("threads.[].blockedTime")
.description("Time in milliseconds that the thread has spent "
+ "blocked. -1 if thread contention " + "monitoring is disabled."),
fieldWithPath("threads.[].daemon")
.description(
"Whether the thread is a daemon " + "thread. Only available on Java 9 or later.")
.optional()
.type(JsonFieldType.BOOLEAN),
fieldWithPath("threads.[].inNative")
.description("Whether the thread is executing native code."),
fieldWithPath("threads.[].lockName")
.description("Description of the object on which the " + "thread is blocked, if any.")
.optional()
.type(JsonFieldType.STRING),
fieldWithPath("threads.[].lockInfo")
.description("Object for which the thread is blocked waiting.")
.optional()
.type(JsonFieldType.OBJECT),
fieldWithPath("threads.[].lockInfo.className")
.description("Fully qualified
|
ThreadDumpEndpointDocumentationTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/test/MockIngestPlugin.java
|
{
"start": 1516,
"end": 2490
}
|
class ____ implements Processor.Factory {
private final String type;
private final String[] fields;
MockProcessorFactory(final Map.Entry<String, String[]> factory) {
this(factory.getKey(), factory.getValue());
}
MockProcessorFactory(final String type, final String[] fields) {
this.type = type;
this.fields = fields;
}
@Override
public Processor create(
Map<String, Processor.Factory> processorFactories,
String tag,
String description,
Map<String, Object> config,
ProjectId projectId
) throws Exception {
// read fields so the processor succeeds
for (final String field : fields) {
ConfigurationUtils.readObject(type, tag, config, field);
}
return new MockProcessor(type, tag, description);
}
}
static
|
MockProcessorFactory
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/converter/SQLConverterTest.java
|
{
"start": 1017,
"end": 1588
}
|
class ____ extends ContextTestSupport {
@Test
public void testTimestamp() {
long value = System.currentTimeMillis();
Timestamp ts = context.getTypeConverter().convertTo(Timestamp.class, value);
Timestamp expected = new Timestamp(value);
assertEquals(expected, ts);
}
@Test
public void testToLong() {
long value = System.currentTimeMillis();
Timestamp ts = new Timestamp(value);
long l = context.getTypeConverter().convertTo(Long.class, ts);
assertEquals(value, l);
}
}
|
SQLConverterTest
|
java
|
redisson__redisson
|
redisson-hibernate/redisson-hibernate-7/src/test/java/org/redisson/hibernate/TransactionalTest.java
|
{
"start": 707,
"end": 6248
}
|
class ____ extends BaseSessionFactoryFunctionalTest {
@Container
public static final GenericContainer H2 = new FixedHostPortGenericContainer("oscarfonts/h2:latest")
.withFixedExposedPort(1521, 1521);
@Container
public static final GenericContainer REDIS = new FixedHostPortGenericContainer("redis:latest")
.withFixedExposedPort(6379, 6379);
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class[] { ItemTransactional.class};
}
@BeforeEach
public void before() {
sessionFactory().getCache().evictAllRegions();
sessionFactory().getStatistics().clear();
}
@Test
public void testQuery() {
Statistics stats = sessionFactory().getStatistics();
Session s = sessionFactory().openSession();
s.beginTransaction();
ItemTransactional item = new ItemTransactional("data");
item.getEntries().addAll(Arrays.asList("a", "b", "c"));
s.persist(item);
s.flush();
s.getTransaction().commit();
s = sessionFactory().openSession();
s.beginTransaction();
Query query = s.getNamedQuery("testQuery");
query.setCacheable(true);
query.setCacheRegion("myTestQuery");
query.setParameter("name", "data");
item = (ItemTransactional) query.uniqueResult();
s.getTransaction().commit();
s.close();
Assertions.assertEquals(1, stats.getDomainDataRegionStatistics("myTestQuery").getPutCount());
s = sessionFactory().openSession();
s.beginTransaction();
Query query2 = s.getNamedQuery("testQuery");
query2.setCacheable(true);
query2.setCacheRegion("myTestQuery");
query2.setParameter("name", "data");
item = (ItemTransactional) query2.uniqueResult();
s.remove(item);
s.getTransaction().commit();
s.close();
Assertions.assertEquals(1, stats.getDomainDataRegionStatistics("myTestQuery").getHitCount());
stats.logSummary();
}
@Test
public void testCollection() {
Long id = null;
Statistics stats = sessionFactory().getStatistics();
Session s = sessionFactory().openSession();
s.beginTransaction();
ItemTransactional item = new ItemTransactional("data");
item.getEntries().addAll(Arrays.asList("a", "b", "c"));
s.persist(item);
id = item.getId();
s.flush();
s.getTransaction().commit();
s = sessionFactory().openSession();
s.beginTransaction();
item = (ItemTransactional) s.get(ItemTransactional.class, id);
assertThat(item.getEntries()).containsExactly("a", "b", "c");
s.getTransaction().commit();
s.close();
Assertions.assertEquals(1, stats.getDomainDataRegionStatistics("item_entries").getPutCount());
s = sessionFactory().openSession();
s.beginTransaction();
item = (ItemTransactional) s.get(ItemTransactional.class, id);
assertThat(item.getEntries()).containsExactly("a", "b", "c");
s.remove(item);
s.getTransaction().commit();
s.close();
Assertions.assertEquals(1, stats.getDomainDataRegionStatistics("item_entries").getHitCount());
}
@Test
public void testNaturalId() {
Statistics stats = sessionFactory().getStatistics();
Session s = sessionFactory().openSession();
s.beginTransaction();
ItemTransactional item = new ItemTransactional("data");
item.setNid("123");
s.persist(item);
s.flush();
s.getTransaction().commit();
Assertions.assertEquals(1, stats.getDomainDataRegionStatistics("item").getPutCount());
Assertions.assertEquals(1, stats.getNaturalIdStatistics(ItemTransactional.class.getName()).getCachePutCount());
s = sessionFactory().openSession();
s.beginTransaction();
item = (ItemTransactional) s.bySimpleNaturalId(ItemTransactional.class).load("123");
assertThat(item).isNotNull();
s.remove(item);
s.getTransaction().commit();
s.close();
Assertions.assertEquals(1, stats.getDomainDataRegionStatistics("item").getHitCount());
Assertions.assertEquals(1, stats.getNaturalIdStatistics(ItemTransactional.class.getName()).getCacheHitCount());
sessionFactory().getStatistics().logSummary();
}
@Test
public void testUpdateWithRefreshThenRollback() {
Statistics stats = sessionFactory().getStatistics();
Long id = null;
Session s = sessionFactory().openSession();
s.beginTransaction();
ItemTransactional item = new ItemTransactional( "data" );
s.persist( item );
id = item.getId();
s.flush();
s.getTransaction().commit();
Assertions.assertEquals(1, stats.getDomainDataRegionStatistics("item").getPutCount());
s = sessionFactory().openSession();
s.beginTransaction();
item = (ItemTransactional) s.get(ItemTransactional.class, id);
item.setName("newdata");
s.merge(item);
s.flush();
s.refresh(item);
s.getTransaction().rollback();
s.clear();
s.close();
Assertions.assertEquals(1, stats.getDomainDataRegionStatistics("item").getHitCount());
}
}
|
TransactionalTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/pool/TestSharePreparedStatements.java
|
{
"start": 943,
"end": 3181
}
|
class ____ extends TestCase {
protected void setUp() throws Exception {
DruidDataSourceStatManager.clear();
}
protected void tearDown() throws Exception {
assertEquals(0, DruidDataSourceStatManager.getInstance().getDataSourceList().size());
}
public void test_sharePreparedStatements() throws Exception {
// sharePreparedStatements
DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setMaxPoolPreparedStatementPerConnectionSize(30);
String sql = "SELECT 1";
MockPreparedStatement mockStmt = null;
{
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement(sql);
mockStmt = stmt.unwrap(MockPreparedStatement.class);
ResultSet rs = stmt.executeQuery();
rs.next();
rs.close();
stmt.close();
conn.close();
}
{
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement(sql);
assertEquals(mockStmt, stmt.unwrap(MockPreparedStatement.class));
ResultSet rs = stmt.executeQuery();
rs.next();
rs.close();
stmt.close();
conn.close();
}
{
Connection conn = dataSource.getConnection();
conn.setAutoCommit(false);
PreparedStatement stmt = conn.prepareStatement(sql);
assertSame(mockStmt, stmt.unwrap(MockPreparedStatement.class));
ResultSet rs = stmt.executeQuery();
rs.next();
rs.close();
stmt.close();
conn.close();
}
{
Connection conn = dataSource.getConnection();
conn.setAutoCommit(false);
PreparedStatement stmt = conn.prepareStatement(sql);
assertSame(mockStmt, stmt.unwrap(MockPreparedStatement.class));
ResultSet rs = stmt.executeQuery();
rs.next();
rs.close();
stmt.close();
conn.close();
}
dataSource.close();
}
}
|
TestSharePreparedStatements
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/ErrorInfoTest.java
|
{
"start": 1137,
"end": 1831
}
|
class ____ {
@Test
void testSerializationWithExceptionOutsideClassLoader() throws Exception {
final ErrorInfo error =
new ErrorInfo(new ExceptionWithCustomClassLoader(), System.currentTimeMillis());
final ErrorInfo copy = CommonTestUtils.createCopySerializable(error);
assertThat(copy.getTimestamp()).isEqualTo(error.getTimestamp());
assertThat(copy.getExceptionAsString()).isEqualTo(error.getExceptionAsString());
assertThat(copy.getException().getMessage()).isEqualTo(error.getException().getMessage());
}
// ------------------------------------------------------------------------
private static final
|
ErrorInfoTest
|
java
|
netty__netty
|
codec-classes-quic/src/main/java/io/netty/handler/codec/quic/ConnectionIdChannelMap.java
|
{
"start": 2127,
"end": 3084
}
|
class ____ implements Comparable<ConnectionIdKey> {
private final long hash;
private final ByteBuffer key;
ConnectionIdKey(long hash, ByteBuffer key) {
this.hash = hash;
this.key = key;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ConnectionIdKey that = (ConnectionIdKey) o;
return hash == that.hash && Objects.equals(key, that.key);
}
@Override
public int hashCode() {
return (int) hash;
}
@Override
public int compareTo(@NotNull ConnectionIdChannelMap.ConnectionIdKey o) {
int result = Long.compare(hash, o.hash);
return result != 0 ? result : key.compareTo(o.key);
}
}
}
|
ConnectionIdKey
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/constructor/PersonDto.java
|
{
"start": 257,
"end": 1252
}
|
class ____ {
private String name;
private int age;
private String job;
private String city;
private String address;
private List<String> children;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public String getJob() {
return job;
}
public void setJob(String job) {
this.job = job;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public List<String> getChildren() {
return children;
}
public void setChildren(List<String> children) {
this.children = children;
}
}
|
PersonDto
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/convert/spi/AutoApplicableConverterDescriptor.java
|
{
"start": 510,
"end": 997
}
|
interface ____ {
boolean isAutoApplicable();
ConverterDescriptor<?,?> getAutoAppliedConverterDescriptorForAttribute(MemberDetails memberDetails, MetadataBuildingContext context);
ConverterDescriptor<?,?> getAutoAppliedConverterDescriptorForCollectionElement(MemberDetails memberDetails, MetadataBuildingContext context);
ConverterDescriptor<?,?> getAutoAppliedConverterDescriptorForMapKey(MemberDetails memberDetails, MetadataBuildingContext context);
}
|
AutoApplicableConverterDescriptor
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockScanner.java
|
{
"start": 3497,
"end": 4056
}
|
class ____ {
public static final Logger LOG =
LoggerFactory.getLogger(TestBlockScanner.class);
@BeforeEach
public void before() {
BlockScanner.Conf.allowUnitTestSettings = true;
GenericTestUtils.setLogLevel(BlockScanner.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(VolumeScanner.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(FsVolumeImpl.LOG, Level.TRACE);
}
private static void disableBlockScanner(Configuration conf) {
conf.setLong(DFS_BLOCK_SCANNER_VOLUME_BYTES_PER_SECOND, 0L);
}
private static
|
TestBlockScanner
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java
|
{
"start": 599,
"end": 1741
}
|
class ____ extends BaseSingleUpdateApiKeyRequest {
public UpdateCrossClusterApiKeyRequest(
final String id,
@Nullable CrossClusterApiKeyRoleDescriptorBuilder roleDescriptorBuilder,
@Nullable final Map<String, Object> metadata,
@Nullable TimeValue expiration,
@Nullable CertificateIdentity certificateIdentity
) {
super(roleDescriptorBuilder == null ? null : List.of(roleDescriptorBuilder.build()), metadata, expiration, id, certificateIdentity);
}
@Override
public ApiKey.Type getType() {
return ApiKey.Type.CROSS_CLUSTER;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
if (roleDescriptors == null && metadata == null && certificateIdentity == null) {
validationException = addValidationError(
"must update [access], [metadata], or [certificate_identity] for cross-cluster API keys",
validationException
);
}
return validationException;
}
}
|
UpdateCrossClusterApiKeyRequest
|
java
|
grpc__grpc-java
|
stub/src/test/java/io/grpc/stub/AbstractFutureStubTest.java
|
{
"start": 2978,
"end": 3301
}
|
class ____ extends AbstractFutureStub<NoopFutureStub> {
NoopFutureStub(Channel channel, CallOptions options) {
super(channel, options);
}
@Override
protected NoopFutureStub build(Channel channel, CallOptions callOptions) {
return new NoopFutureStub(channel, callOptions);
}
}
}
|
NoopFutureStub
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/MappedSuperclassWithIdOnSubclassesTest.java
|
{
"start": 3308,
"end": 3604
}
|
class ____ {
private String name;
public Customer() {
}
public Customer(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity(name = "DomesticCustomer")
public static
|
Customer
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/util/AbstractParameterTool.java
|
{
"start": 1231,
"end": 9771
}
|
class ____ extends ExecutionConfig.GlobalJobParameters
implements Serializable, Cloneable {
private static final long serialVersionUID = 1L;
protected static final String NO_VALUE_KEY = "__NO_VALUE_KEY";
protected static final String DEFAULT_UNDEFINED = "<undefined>";
// ------------------ ParameterUtil ------------------------
// data which is only used on the client and does not need to be transmitted
protected transient Map<String, String> defaultData;
protected transient Set<String> unrequestedParameters;
/**
* Returns the set of parameter names which have not been requested with {@link #has(String)} or
* one of the {@code get} methods. Access to the map returned by {@link #toMap()} is not
* tracked.
*/
@PublicEvolving
public Set<String> getUnrequestedParameters() {
return Collections.unmodifiableSet(unrequestedParameters);
}
// ------------------ Get data from the util ----------------
/** Returns number of parameters in {@link AbstractParameterTool}. */
protected abstract int getNumberOfParameters();
/**
* Returns the String value for the given key. If the key does not exist it will return null.
*/
protected abstract String get(String key);
/** Check if value is set. */
public abstract boolean has(String value);
/**
* Returns the String value for the given key. If the key does not exist it will throw a {@link
* RuntimeException}.
*/
public String getRequired(String key) {
addToDefaults(key, null);
String value = get(key);
if (value == null) {
throw new RuntimeException("No data for required key '" + key + "'");
}
return value;
}
/**
* Returns the String value for the given key. If the key does not exist it will return the
* given default value.
*/
public String get(String key, String defaultValue) {
addToDefaults(key, defaultValue);
String value = get(key);
if (value == null) {
return defaultValue;
} else {
return value;
}
}
// -------------- Integer
/**
* Returns the Integer value for the given key. The method fails if the key does not exist or
* the value is not an Integer.
*/
public int getInt(String key) {
addToDefaults(key, null);
String value = getRequired(key);
return Integer.parseInt(value);
}
/**
* Returns the Integer value for the given key. If the key does not exists it will return the
* default value given. The method fails if the value is not an Integer.
*/
public int getInt(String key, int defaultValue) {
addToDefaults(key, Integer.toString(defaultValue));
String value = get(key);
if (value == null) {
return defaultValue;
}
return Integer.parseInt(value);
}
// -------------- LONG
/** Returns the Long value for the given key. The method fails if the key does not exist. */
public long getLong(String key) {
addToDefaults(key, null);
String value = getRequired(key);
return Long.parseLong(value);
}
/**
* Returns the Long value for the given key. If the key does not exists it will return the
* default value given. The method fails if the value is not a Long.
*/
public long getLong(String key, long defaultValue) {
addToDefaults(key, Long.toString(defaultValue));
String value = get(key);
if (value == null) {
return defaultValue;
}
return Long.parseLong(value);
}
// -------------- FLOAT
/** Returns the Float value for the given key. The method fails if the key does not exist. */
public float getFloat(String key) {
addToDefaults(key, null);
String value = getRequired(key);
return Float.valueOf(value);
}
/**
* Returns the Float value for the given key. If the key does not exists it will return the
* default value given. The method fails if the value is not a Float.
*/
public float getFloat(String key, float defaultValue) {
addToDefaults(key, Float.toString(defaultValue));
String value = get(key);
if (value == null) {
return defaultValue;
} else {
return Float.valueOf(value);
}
}
// -------------- DOUBLE
/** Returns the Double value for the given key. The method fails if the key does not exist. */
public double getDouble(String key) {
addToDefaults(key, null);
String value = getRequired(key);
return Double.valueOf(value);
}
/**
* Returns the Double value for the given key. If the key does not exists it will return the
* default value given. The method fails if the value is not a Double.
*/
public double getDouble(String key, double defaultValue) {
addToDefaults(key, Double.toString(defaultValue));
String value = get(key);
if (value == null) {
return defaultValue;
} else {
return Double.valueOf(value);
}
}
// -------------- BOOLEAN
/** Returns the Boolean value for the given key. The method fails if the key does not exist. */
public boolean getBoolean(String key) {
addToDefaults(key, null);
String value = getRequired(key);
return Boolean.valueOf(value);
}
/**
* Returns the Boolean value for the given key. If the key does not exists it will return the
* default value given. The method returns whether the string of the value is "true" ignoring
* cases.
*/
public boolean getBoolean(String key, boolean defaultValue) {
addToDefaults(key, Boolean.toString(defaultValue));
String value = get(key);
if (value == null) {
return defaultValue;
} else {
return Boolean.valueOf(value);
}
}
// -------------- SHORT
/** Returns the Short value for the given key. The method fails if the key does not exist. */
public short getShort(String key) {
addToDefaults(key, null);
String value = getRequired(key);
return Short.valueOf(value);
}
/**
* Returns the Short value for the given key. If the key does not exists it will return the
* default value given. The method fails if the value is not a Short.
*/
public short getShort(String key, short defaultValue) {
addToDefaults(key, Short.toString(defaultValue));
String value = get(key);
if (value == null) {
return defaultValue;
} else {
return Short.valueOf(value);
}
}
// -------------- BYTE
/** Returns the Byte value for the given key. The method fails if the key does not exist. */
public byte getByte(String key) {
addToDefaults(key, null);
String value = getRequired(key);
return Byte.valueOf(value);
}
/**
* Returns the Byte value for the given key. If the key does not exists it will return the
* default value given. The method fails if the value is not a Byte.
*/
public byte getByte(String key, byte defaultValue) {
addToDefaults(key, Byte.toString(defaultValue));
String value = get(key);
if (value == null) {
return defaultValue;
} else {
return Byte.valueOf(value);
}
}
// --------------- Internals
protected void addToDefaults(String key, String value) {
final String currentValue = defaultData.get(key);
if (currentValue == null) {
if (value == null) {
value = DEFAULT_UNDEFINED;
}
defaultData.put(key, value);
} else {
// there is already an entry for this key. Check if the value is the undefined
if (currentValue.equals(DEFAULT_UNDEFINED) && value != null) {
// update key with better default value
defaultData.put(key, value);
}
}
}
// ------------------------- Export to different targets -------------------------
@Override
protected abstract Object clone() throws CloneNotSupportedException;
// ------------------------- ExecutionConfig.UserConfig interface -------------------------
@Override
public abstract Map<String, String> toMap();
}
|
AbstractParameterTool
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/CompiledJavaVersionBuildItem.java
|
{
"start": 1552,
"end": 2482
}
|
class ____ implements JavaVersion {
private static final int JAVA_19_MAJOR = 63;
private static final int JAVA_21_MAJOR = 65;
private final int determinedMajor;
Known(int determinedMajor) {
this.determinedMajor = determinedMajor;
}
@Override
public Status isJava19OrHigher() {
return higherOrEqualStatus(JAVA_19_MAJOR);
}
@Override
public Status isJava21OrHigher() {
return higherOrEqualStatus(JAVA_21_MAJOR);
}
private Status higherOrEqualStatus(int javaMajor) {
return determinedMajor >= javaMajor ? Status.TRUE : Status.FALSE;
}
private Status equalStatus(int javaMajor) {
return determinedMajor == javaMajor ? Status.TRUE : Status.FALSE;
}
}
}
}
|
Known
|
java
|
spring-projects__spring-framework
|
spring-orm/src/main/java/org/springframework/orm/jpa/JpaTransactionManager.java
|
{
"start": 30306,
"end": 30993
}
|
class ____ extends DelegatingTransactionDefinition
implements ResourceTransactionDefinition {
private final int timeout;
private final boolean localResource;
public JpaTransactionDefinition(TransactionDefinition targetDefinition, int timeout, boolean localResource) {
super(targetDefinition);
this.timeout = timeout;
this.localResource = localResource;
}
@Override
public int getTimeout() {
return this.timeout;
}
@Override
public boolean isLocalResource() {
return this.localResource;
}
}
/**
* Holder for suspended resources.
* Used internally by {@code doSuspend} and {@code doResume}.
*/
private static final
|
JpaTransactionDefinition
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/softdelete/SoftDeleteMappedColumnTest.java
|
{
"start": 2296,
"end": 2826
}
|
class ____ {
@Id
private Long id;
private String name;
@Column( name = "is_deleted", insertable = false, updatable = false )
private boolean deleted;
public ValidEntity() {
}
public ValidEntity(Long id, String name) {
this.id = id;
this.name = name;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
public boolean isDeleted() {
return deleted;
}
}
@Entity( name = "InvalidEntity" )
@SoftDelete( columnName = "is_deleted" )
public static
|
ValidEntity
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.