language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/joincolumn/JoinColumnWithSecondaryTableTest.java
|
{
"start": 1407,
"end": 1597
}
|
class ____ extends Being {
@Column(name = "uuid", table = "animal")
private String uuid;
}
@Entity
@SecondaryTable(name = "cat")
@DiscriminatorValue(value = "CAT")
public static
|
Animal
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/xml/internal/attr/AnyMappingAttributeProcessing.java
|
{
"start": 2731,
"end": 8918
}
|
class ____ {
public static MutableMemberDetails processAnyMappingAttribute(
JaxbAnyMappingImpl jaxbHbmAnyMapping,
MutableClassDetails declarer,
AccessType classAccessType,
XmlDocumentContext xmlDocumentContext) {
final AccessType accessType = coalesce( jaxbHbmAnyMapping.getAccess(), classAccessType );
final MutableMemberDetails memberDetails = XmlProcessingHelper.getAttributeMember(
jaxbHbmAnyMapping.getName(),
accessType,
declarer
);
final AnyAnnotation anyAnn = (AnyAnnotation) memberDetails.applyAnnotationUsage(
HibernateAnnotations.ANY,
xmlDocumentContext.getModelBuildingContext()
);
applyAccess( accessType, memberDetails, xmlDocumentContext );
applyAttributeAccessor( jaxbHbmAnyMapping, memberDetails, xmlDocumentContext );
applyFetching( jaxbHbmAnyMapping, memberDetails, anyAnn, xmlDocumentContext );
applyOptionality( jaxbHbmAnyMapping, anyAnn, xmlDocumentContext );
applyOptimisticLock( jaxbHbmAnyMapping, memberDetails, xmlDocumentContext );
applyDiscriminator( memberDetails, jaxbHbmAnyMapping, xmlDocumentContext );
applyKey( memberDetails, jaxbHbmAnyMapping, xmlDocumentContext );
XmlAnnotationHelper.applyCascading( jaxbHbmAnyMapping.getCascade(), memberDetails, xmlDocumentContext );
return memberDetails;
}
static void applyDiscriminator(
MutableMemberDetails memberDetails,
JaxbAnyMapping jaxbHbmAnyMapping,
XmlDocumentContext xmlDocumentContext) {
final JaxbAnyMapping.Discriminator jaxbDiscriminator = jaxbHbmAnyMapping.getDiscriminator();
final AnyDiscriminatorAnnotation anyDiscriminatorAnn = (AnyDiscriminatorAnnotation) memberDetails.applyAnnotationUsage(
HibernateAnnotations.ANY_DISCRIMINATOR,
xmlDocumentContext.getModelBuildingContext()
);
if ( jaxbDiscriminator == null ) {
return;
}
final DiscriminatorType discriminatorType = jaxbDiscriminator.getType();
if ( discriminatorType != null ) {
anyDiscriminatorAnn.value( discriminatorType );
}
final JaxbColumnImpl jaxbColumn = jaxbDiscriminator.getColumn();
final ColumnJpaAnnotation columnAnn = (ColumnJpaAnnotation) memberDetails.applyAnnotationUsage(
JpaAnnotations.COLUMN,
xmlDocumentContext.getModelBuildingContext()
);
if ( jaxbColumn != null ) {
columnAnn.apply( jaxbColumn, xmlDocumentContext );
}
final List<? extends JaxbDiscriminatorMapping> jaxbValueMappings = jaxbDiscriminator.getValueMappings();
if ( CollectionHelper.isNotEmpty( jaxbValueMappings ) ) {
final AnyDiscriminatorValuesAnnotation discriminatorValuesUsage = (AnyDiscriminatorValuesAnnotation) memberDetails.replaceAnnotationUsage(
ANY_DISCRIMINATOR_VALUE,
HibernateAnnotations.ANY_DISCRIMINATOR_VALUES,
xmlDocumentContext.getModelBuildingContext()
);
discriminatorValuesUsage.value( collectDiscriminatorValues(
jaxbValueMappings,
xmlDocumentContext
) );
}
}
private static AnyDiscriminatorValue[] collectDiscriminatorValues(
List<? extends JaxbDiscriminatorMapping> jaxbValueMappings,
XmlDocumentContext xmlDocumentContext) {
final AnyDiscriminatorValue[] values = new AnyDiscriminatorValue[jaxbValueMappings.size()];
for ( int i = 0; i < jaxbValueMappings.size(); i++ ) {
final AnyDiscriminatorValueAnnotation valueAnn = ANY_DISCRIMINATOR_VALUE.createUsage( xmlDocumentContext.getModelBuildingContext() );
values[i] = valueAnn;
final JaxbDiscriminatorMapping jaxbValue = jaxbValueMappings.get( i );
valueAnn.discriminator( jaxbValue.getDiscriminatorValue() );
final String name = StringHelper.qualifyConditionally(
xmlDocumentContext.getXmlDocument().getDefaults().getPackage(),
jaxbValue.getCorrespondingEntityName()
);
final ClassDetails entityClassDetails = xmlDocumentContext.getModelBuildingContext().getClassDetailsRegistry().resolveClassDetails( name );
valueAnn.entity( entityClassDetails.toJavaClass() );
}
return values;
}
static void applyKey(
MutableMemberDetails memberDetails,
JaxbAnyMapping jaxbHbmAnyMapping,
XmlDocumentContext xmlDocumentContext) {
final JaxbAnyMapping.Key jaxbKey = jaxbHbmAnyMapping.getKey();
if ( StringHelper.isNotEmpty( jaxbKey.getType() ) ) {
final AnyKeTypeAnnotation keyTypeUsage = (AnyKeTypeAnnotation) memberDetails.applyAnnotationUsage(
HibernateAnnotations.ANY_KEY_TYPE,
xmlDocumentContext.getModelBuildingContext()
);
keyTypeUsage.value( jaxbKey.getType() );
}
else if ( StringHelper.isNotEmpty( jaxbKey.getJavaClass() ) ) {
final AnyKeyJavaClassAnnotation keyJavaType = (AnyKeyJavaClassAnnotation) memberDetails.applyAnnotationUsage(
HibernateAnnotations.ANY_KEY_JAVA_CLASS,
xmlDocumentContext.getModelBuildingContext()
);
keyJavaType.value( resolveKeyType( jaxbKey.getJavaClass(), xmlDocumentContext ) );
}
if ( jaxbKey.getColumns().isEmpty() ) {
memberDetails.applyAnnotationUsage( JpaAnnotations.JOIN_COLUMN, xmlDocumentContext.getModelBuildingContext() );
}
else {
final JoinColumnsJpaAnnotation joinColumnsUsage = (JoinColumnsJpaAnnotation) memberDetails.replaceAnnotationUsage(
JOIN_COLUMN,
JpaAnnotations.JOIN_COLUMNS,
xmlDocumentContext.getModelBuildingContext()
);
final JoinColumn[] joinColumns = new JoinColumn[jaxbKey.getColumns().size()];
joinColumnsUsage.value( joinColumns );
for ( int i = 0; i < jaxbKey.getColumns().size(); i++ ) {
final JoinColumnJpaAnnotation joinColumn = JOIN_COLUMN.createUsage( xmlDocumentContext.getModelBuildingContext() );
joinColumns[i] = joinColumn;
final JaxbColumnImpl jaxbJoinColumn = jaxbKey.getColumns().get( i );
joinColumn.apply( jaxbJoinColumn, xmlDocumentContext );
}
}
}
private static Class<?> resolveKeyType(String name, XmlDocumentContext xmlDocumentContext) {
final SimpleTypeInterpretation simpleTypeInterpretation = SimpleTypeInterpretation.interpret( name );
if ( simpleTypeInterpretation != null ) {
return simpleTypeInterpretation.getJavaType();
}
return xmlDocumentContext
.getBootstrapContext()
.getModelsContext()
.getClassLoading()
.classForName( xmlDocumentContext.resolveClassName( name ) );
}
}
|
AnyMappingAttributeProcessing
|
java
|
spring-projects__spring-framework
|
spring-context/src/testFixtures/java/org/springframework/context/testfixture/jndi/SimpleNamingContext.java
|
{
"start": 9344,
"end": 10926
}
|
class ____<T> implements NamingEnumeration<T> {
private final Iterator<T> iterator;
private AbstractNamingEnumeration(SimpleNamingContext context, String proot) throws NamingException {
if (!proot.isEmpty() && !proot.endsWith("/")) {
proot = proot + "/";
}
String root = context.root + proot;
Map<String, T> contents = new HashMap<>();
for (String boundName : context.boundObjects.keySet()) {
if (boundName.startsWith(root)) {
int startIndex = root.length();
int endIndex = boundName.indexOf('/', startIndex);
String strippedName =
(endIndex != -1 ? boundName.substring(startIndex, endIndex) : boundName.substring(startIndex));
if (!contents.containsKey(strippedName)) {
try {
contents.put(strippedName, createObject(strippedName, context.lookup(proot + strippedName)));
}
catch (NameNotFoundException ex) {
// cannot happen
}
}
}
}
if (contents.size() == 0) {
throw new NamingException("Invalid root: [" + context.root + proot + "]");
}
this.iterator = contents.values().iterator();
}
protected abstract T createObject(String strippedName, Object obj);
@Override
public boolean hasMore() {
return this.iterator.hasNext();
}
@Override
public T next() {
return this.iterator.next();
}
@Override
public boolean hasMoreElements() {
return this.iterator.hasNext();
}
@Override
public T nextElement() {
return this.iterator.next();
}
@Override
public void close() {
}
}
private static final
|
AbstractNamingEnumeration
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/Version.java
|
{
"start": 841,
"end": 1827
}
|
class ____ {
private static final Logger log = LoggerFactory.getLogger(Version.class);
public static void logVersion() {
try {
Enumeration<URL> resources = Version.class.getClassLoader().getResources("META-INF/MANIFEST.MF");
while (resources.hasMoreElements()) {
try (InputStream inputStream = resources.nextElement().openStream()) {
Manifest manifest = new Manifest(inputStream);
Attributes attrs = manifest.getMainAttributes();
if (attrs == null) {
continue;
}
String name = attrs.getValue("Bundle-Name");
if ("Redisson".equals(name)) {
log.info("Redisson {}", attrs.getValue("Bundle-Version"));
break;
}
}
}
} catch (Exception E) {
// skip it
}
}
}
|
Version
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/jdk8/LocalDateTest4.java
|
{
"start": 167,
"end": 533
}
|
class ____ extends TestCase {
public void test_for_issue() throws Exception {
VO vo = JSON.parseObject("{\"date\":\"20160506\"}", VO.class);
Assert.assertEquals(2016, vo.date.getYear());
Assert.assertEquals(5, vo.date.getMonthValue());
Assert.assertEquals(6, vo.date.getDayOfMonth());
}
public static
|
LocalDateTest4
|
java
|
apache__flink
|
flink-kubernetes/src/main/java/org/apache/flink/kubernetes/KubernetesResourceManagerDriver.java
|
{
"start": 22187,
"end": 22275
}
|
enum ____ {
ADDED,
MODIFIED,
DELETED,
ERROR
}
}
|
PodEvent
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/onetoone/OneToOneLazyTest.java
|
{
"start": 1843,
"end": 2264
}
|
class ____ {
@Id
private Long id;
@OneToOne(fetch = FetchType.LAZY)
private Title title;
public Book() {
}
public Book(Long id, Title title) {
this.id = id;
this.title = title;
title.setBook( this );
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Title getTitle() {
return title;
}
}
@Entity(name = "Title")
public static
|
Book
|
java
|
apache__camel
|
components/camel-aws/camel-aws-config/src/main/java/org/apache/camel/component/aws/config/client/impl/AWSConfigClientStandardImpl.java
|
{
"start": 1888,
"end": 5150
}
|
class ____ implements AWSConfigInternalClient {
private static final Logger LOG = LoggerFactory.getLogger(AWSConfigClientStandardImpl.class);
private AWSConfigConfiguration configuration;
/**
* Constructor that uses the config file.
*/
public AWSConfigClientStandardImpl(AWSConfigConfiguration configuration) {
LOG.trace("Creating an AWS Config manager using static credentials.");
this.configuration = configuration;
}
/**
* Getting the Config AWS client that is used.
*
* @return Amazon Config Client.
*/
@Override
public ConfigClient getConfigClient() {
ConfigClient client = null;
ConfigClientBuilder clientBuilder = ConfigClient.builder();
ProxyConfiguration.Builder proxyConfig = null;
ApacheHttpClient.Builder httpClientBuilder = null;
boolean isClientConfigFound = false;
if (ObjectHelper.isNotEmpty(configuration.getProxyHost()) && ObjectHelper.isNotEmpty(configuration.getProxyPort())) {
proxyConfig = ProxyConfiguration.builder();
URI proxyEndpoint = URI.create(configuration.getProxyProtocol() + "://" + configuration.getProxyHost() + ":"
+ configuration.getProxyPort());
proxyConfig.endpoint(proxyEndpoint);
httpClientBuilder = ApacheHttpClient.builder().proxyConfiguration(proxyConfig.build());
isClientConfigFound = true;
}
if (configuration.getAccessKey() != null && configuration.getSecretKey() != null) {
AwsBasicCredentials cred = AwsBasicCredentials.create(configuration.getAccessKey(), configuration.getSecretKey());
if (isClientConfigFound) {
clientBuilder = clientBuilder.httpClientBuilder(httpClientBuilder)
.credentialsProvider(StaticCredentialsProvider.create(cred));
} else {
clientBuilder = clientBuilder.credentialsProvider(StaticCredentialsProvider.create(cred));
}
} else {
if (!isClientConfigFound) {
clientBuilder = clientBuilder.httpClientBuilder(httpClientBuilder);
}
}
if (ObjectHelper.isNotEmpty(configuration.getRegion())) {
clientBuilder = clientBuilder.region(Region.of(configuration.getRegion()));
}
if (configuration.isOverrideEndpoint()) {
clientBuilder.endpointOverride(URI.create(configuration.getUriEndpointOverride()));
}
if (configuration.isTrustAllCertificates()) {
if (httpClientBuilder == null) {
httpClientBuilder = ApacheHttpClient.builder();
}
SdkHttpClient ahc = httpClientBuilder.buildWithDefaults(AttributeMap
.builder()
.put(
SdkHttpConfigurationOption.TRUST_ALL_CERTIFICATES,
Boolean.TRUE)
.build());
// set created http client to use instead of builder
clientBuilder.httpClient(ahc);
clientBuilder.httpClientBuilder(null);
}
client = clientBuilder.build();
return client;
}
}
|
AWSConfigClientStandardImpl
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/autoconfigure/MetricsAspectsAutoConfiguration.java
|
{
"start": 3168,
"end": 3791
}
|
class ____ {
@Bean
@ConditionalOnMissingBean
CountedMeterTagAnnotationHandler countedMeterTagAnnotationHandler(BeanFactory beanFactory,
ValueExpressionResolver valueExpressionResolver) {
return new CountedMeterTagAnnotationHandler(beanFactory::getBean, (ignored) -> valueExpressionResolver);
}
@Bean
@ConditionalOnMissingBean
MeterTagAnnotationHandler meterTagAnnotationHandler(BeanFactory beanFactory,
ValueExpressionResolver valueExpressionResolver) {
return new MeterTagAnnotationHandler(beanFactory::getBean, (ignored) -> valueExpressionResolver);
}
}
}
|
TagAnnotationHandlersConfiguration
|
java
|
elastic__elasticsearch
|
modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/AdjacencyMatrixIT.java
|
{
"start": 18235,
"end": 18838
}
|
class ____ extends HashMap<String, QueryBuilder> {
public MapBuilder add(String name, QueryBuilder builder) {
put(name, builder);
return this;
}
}
static AdjacencyMatrixAggregationBuilder adjacencyMatrix(String name, Map<String, QueryBuilder> filters) {
return new AdjacencyMatrixAggregationBuilder(name, filters);
}
static AdjacencyMatrixAggregationBuilder adjacencyMatrix(String name, String separator, Map<String, QueryBuilder> filters) {
return new AdjacencyMatrixAggregationBuilder(name, separator, filters);
}
}
|
MapBuilder
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AContentEncoding.java
|
{
"start": 2025,
"end": 3595
}
|
class ____ tests");
removeBaseAndBucketOverrides(conf, CONTENT_ENCODING);
conf.set(CONTENT_ENCODING, GZIP);
return conf;
}
@Test
public void testCreatedObjectsHaveEncoding() throws Throwable {
try {
S3AFileSystem fs = getFileSystem();
Path dir = methodPath();
fs.mkdirs(dir);
// even with content encoding enabled, directories do not have
// encoding.
Assertions.assertThat(getEncoding(dir))
.describedAs("Encoding of object %s", dir)
.isNull();
Path path = new Path(dir, "1");
ContractTestUtils.touch(fs, path);
assertObjectHasEncoding(path);
Path path2 = new Path(dir, "2");
fs.rename(path, path2);
assertObjectHasEncoding(path2);
} catch (AWSUnsupportedFeatureException e) {
LOG.warn("Object store does not support {} content encoding", GZIP, e);
raiseAsAssumption(e);
}
}
/**
* Assert that a given object has gzip encoding specified.
* @param path path
*
*/
private void assertObjectHasEncoding(Path path) throws Throwable {
Assertions.assertThat(getEncoding(path))
.describedAs("Encoding of object %s", path)
.isEqualTo(GZIP);
}
/**
* Get the encoding of a path.
* @param path path
* @return encoding string or null
* @throws IOException IO Failure.
*/
private String getEncoding(Path path) throws IOException {
S3AFileSystem fs = getFileSystem();
Map<String, byte[]> xAttrs = fs.getXAttrs(path);
return decodeBytes(xAttrs.get(XA_CONTENT_ENCODING));
}
}
|
ACL
|
java
|
apache__camel
|
components/camel-jooq/src/main/java/org/apache/camel/component/jooq/JooqConsumer.java
|
{
"start": 1290,
"end": 1375
}
|
class ____ extends ScheduledBatchPollingConsumer {
private static final
|
JooqConsumer
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/TestWriter.java
|
{
"start": 514,
"end": 1081
}
|
class ____ implements MessageBodyWriter<TestClass> {
@Override
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return true;
}
@Override
public void writeTo(TestClass t, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream)
throws IOException, WebApplicationException {
entityStream.write("WRITER".getBytes(StandardCharsets.UTF_8));
}
}
|
TestWriter
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java
|
{
"start": 2922,
"end": 3717
}
|
class ____ extends BucketCollector {
private int count = 0;
TotalHitCountBucketCollector() {}
@Override
public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx) {
return new LeafBucketCollector() {
@Override
public void collect(int doc, long bucket) throws IOException {
count++;
}
};
}
@Override
public ScoreMode scoreMode() {
return ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public void preCollection() {}
@Override
public void postCollection() {}
int getTotalHits() {
return count;
}
}
private static
|
TotalHitCountBucketCollector
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/DefaultListableBeanFactoryTests.java
|
{
"start": 136735,
"end": 136875
}
|
class ____ {
public UnsatisfiedConstructorDependency(TestBean t, SideEffectBean b) {
}
}
public static
|
UnsatisfiedConstructorDependency
|
java
|
quarkusio__quarkus
|
integration-tests/test-extension/tests/src/test/java/io/quarkus/it/testsupport/commandmode/QuarkusMainTestWithTestProfileAndFailingApplicationTestCase.java
|
{
"start": 517,
"end": 774
}
|
class ____ {
@Test
@Launch(value = {}, exitCode = 1)
public void testLaunchCommand(LaunchResult result) {
assertThat(result.getOutput()).contains("dummy");
}
public static
|
QuarkusMainTestWithTestProfileAndFailingApplicationTestCase
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFailure.java
|
{
"start": 1015,
"end": 2710
}
|
class ____ implements Writeable, ToXContentObject {
public static final ParseField INDICES_FIELD = new ParseField("indices");
public static final ParseField FAILURE_FIELD = new ParseField("failure");
private final List<String> indices;
private final Exception exception;
public FieldCapabilitiesFailure(String[] indices, Exception exception) {
this.indices = new ArrayList<>(Arrays.asList(Objects.requireNonNull(indices)));
this.exception = Objects.requireNonNull(exception);
}
public FieldCapabilitiesFailure(StreamInput in) throws IOException {
this.indices = in.readStringCollectionAsList();
this.exception = in.readException();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
builder.stringListField(INDICES_FIELD.getPreferredName(), indices);
builder.startObject(FAILURE_FIELD.getPreferredName());
{
ElasticsearchException.generateFailureXContent(builder, params, exception, true);
}
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeStringCollection(indices);
out.writeException(exception);
}
public String[] getIndices() {
return indices.toArray(String[]::new);
}
public Exception getException() {
return exception;
}
FieldCapabilitiesFailure addIndex(String index) {
this.indices.add(index);
return this;
}
}
|
FieldCapabilitiesFailure
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/KeyedProcessOperatorTest.java
|
{
"start": 22628,
"end": 23912
}
|
class ____
extends KeyedProcessFunction<Integer, Integer, String> {
private static final long serialVersionUID = 1L;
private final Integer expectedKey;
public BothTriggeringFlatMapFunction(Integer expectedKey) {
this.expectedKey = expectedKey;
}
@Override
public void processElement(Integer value, Context ctx, Collector<String> out)
throws Exception {
final TimerService timerService = ctx.timerService();
timerService.registerProcessingTimeTimer(3);
timerService.registerEventTimeTimer(4);
timerService.registerProcessingTimeTimer(5);
timerService.registerEventTimeTimer(6);
timerService.deleteProcessingTimeTimer(3);
timerService.deleteEventTimeTimer(4);
}
@Override
public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out)
throws Exception {
assertThat(ctx.getCurrentKey()).isEqualTo(expectedKey);
if (TimeDomain.EVENT_TIME.equals(ctx.timeDomain())) {
out.collect("EVENT:1777");
} else {
out.collect("PROC:1777");
}
}
}
}
|
BothTriggeringFlatMapFunction
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/resource/ResourceUrlProviderTests.java
|
{
"start": 7512,
"end": 7921
}
|
class ____ {
@Bean
public SimpleUrlHandlerMapping simpleUrlHandlerMapping() {
return new SimpleUrlHandlerMapping(
Collections.singletonMap("/resources/**", new ResourceHttpRequestHandler()));
}
@Bean
public ResourceUrlProvider resourceUrlProvider() {
return new ResourceUrlProvider();
}
}
@Configuration
@SuppressWarnings({"unused", "WeakerAccess"})
static
|
HandlerMappingConfiguration
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/action/AnthropicActionCreator.java
|
{
"start": 1125,
"end": 2094
}
|
class ____ implements AnthropicActionVisitor {
private static final String ERROR_PREFIX = "Anthropic chat completions";
private final Sender sender;
private final ServiceComponents serviceComponents;
public AnthropicActionCreator(Sender sender, ServiceComponents serviceComponents) {
this.sender = Objects.requireNonNull(sender);
this.serviceComponents = Objects.requireNonNull(serviceComponents);
}
@Override
public ExecutableAction create(AnthropicChatCompletionModel model, Map<String, Object> taskSettings) {
var overriddenModel = AnthropicChatCompletionModel.of(model, taskSettings);
var requestCreator = AnthropicCompletionRequestManager.of(overriddenModel, serviceComponents.threadPool());
var errorMessage = constructFailedToSendRequestMessage(ERROR_PREFIX);
return new SingleInputSenderExecutableAction(sender, requestCreator, errorMessage, ERROR_PREFIX);
}
}
|
AnthropicActionCreator
|
java
|
micronaut-projects__micronaut-core
|
inject-java-test/src/test/groovy/io/micronaut/inject/visitor/beans/builder/TestBuildMe2.java
|
{
"start": 990,
"end": 1461
}
|
class ____ {
private Builder() {
}
private String name;
private int age;
public Builder name(String name) {
this.name = name;
return this;
}
public Builder age(int age) {
this.age = age;
return this;
}
public TestBuildMe2 build() {
return new TestBuildMe2(
name,
age
);
}
}
}
|
Builder
|
java
|
apache__flink
|
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSourceFactoryBase.java
|
{
"start": 3807,
"end": 9303
}
|
class ____ implements TableFactory {
@Override
public Map<String, String> requiredContext() {
Map<String, String> context = new HashMap<>();
context.put(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE);
context.put(TableFactoryService.FORMAT_TYPE, FORMAT_TYPE_VALUE);
context.put(CONNECTOR_PROPERTY_VERSION, "1");
context.put(TableFactoryService.FORMAT_PROPERTY_VERSION, "1");
return context;
}
public List<String> supportedProperties() {
List<String> properties = new ArrayList<>();
// connector
properties.add(CONNECTOR_PATH);
// format
properties.add(FORMAT_FIELDS + ".#." + DescriptorProperties.TYPE);
properties.add(FORMAT_FIELDS + ".#." + DescriptorProperties.DATA_TYPE);
properties.add(FORMAT_FIELDS + ".#." + DescriptorProperties.NAME);
properties.add(TableFactoryService.FORMAT_DERIVE_SCHEMA);
properties.add(FORMAT_FIELD_DELIMITER);
properties.add(FORMAT_LINE_DELIMITER);
properties.add(FORMAT_QUOTE_CHARACTER);
properties.add(FORMAT_COMMENT_PREFIX);
properties.add(FORMAT_IGNORE_FIRST_LINE);
properties.add(FORMAT_IGNORE_PARSE_ERRORS);
properties.add(CONNECTOR_PATH);
// schema
properties.add(SCHEMA + ".#." + DescriptorProperties.TYPE);
properties.add(SCHEMA + ".#." + DescriptorProperties.DATA_TYPE);
properties.add(SCHEMA + ".#." + DescriptorProperties.NAME);
properties.add(SCHEMA + ".#." + DescriptorProperties.EXPR);
// watermark
properties.add(SCHEMA + "." + WATERMARK + ".#." + WATERMARK_ROWTIME);
properties.add(SCHEMA + "." + WATERMARK + ".#." + WATERMARK_STRATEGY_EXPR);
properties.add(SCHEMA + "." + WATERMARK + ".#." + WATERMARK_STRATEGY_DATA_TYPE);
// index
properties.add(SCHEMA + "." + INDEX + ".#." + INDEX_NAME);
properties.add(SCHEMA + "." + INDEX + ".#." + INDEX_COLUMNS);
// table constraint
properties.add(SCHEMA + "." + DescriptorProperties.PRIMARY_KEY_NAME);
properties.add(SCHEMA + "." + DescriptorProperties.PRIMARY_KEY_COLUMNS);
// comment
properties.add(COMMENT);
return properties;
}
protected CsvTableSource createTableSource(
Boolean isStreaming, Map<String, String> properties) {
DescriptorProperties params = new DescriptorProperties();
params.putProperties(properties);
// validate
new FileSystemValidator().validate(params);
new OldCsvValidator().validate(params);
new SchemaValidator(isStreaming, false, false).validate(params);
// build
CsvTableSource.Builder csvTableSourceBuilder = new CsvTableSource.Builder();
TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA));
// if a schema is defined, no matter derive schema is set or not, will use the defined
// schema
final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS);
if (hasSchema) {
TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS);
// the CsvTableSource needs some rework first
// for now the schema must be equal to the encoding
// Ignore conversion classes in DataType
if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) {
throw new TableException(
String.format(
"Encodings that differ from the schema are not supported yet for"
+ " CsvTableSource, format schema is '%s', but table schema is '%s'.",
formatSchema, tableSchema));
}
}
params.getOptionalString(CONNECTOR_PATH).ifPresent(csvTableSourceBuilder::path);
params.getOptionalString(FORMAT_FIELD_DELIMITER)
.ifPresent(csvTableSourceBuilder::fieldDelimiter);
params.getOptionalString(FORMAT_LINE_DELIMITER)
.ifPresent(csvTableSourceBuilder::lineDelimiter);
for (int i = 0; i < tableSchema.getFieldCount(); ++i) {
csvTableSourceBuilder.field(
tableSchema.getFieldNames()[i], tableSchema.getFieldDataTypes()[i]);
}
params.getOptionalCharacter(FORMAT_QUOTE_CHARACTER)
.ifPresent(csvTableSourceBuilder::quoteCharacter);
params.getOptionalString(FORMAT_COMMENT_PREFIX)
.ifPresent(csvTableSourceBuilder::commentPrefix);
params.getOptionalBoolean(FORMAT_IGNORE_FIRST_LINE)
.ifPresent(
flag -> {
if (flag) {
csvTableSourceBuilder.ignoreFirstLine();
}
});
params.getOptionalBoolean(FORMAT_IGNORE_PARSE_ERRORS)
.ifPresent(
flag -> {
if (flag) {
csvTableSourceBuilder.ignoreParseErrors();
}
});
return csvTableSourceBuilder.build();
}
public static List<LogicalType> getFieldLogicalTypes(TableSchema schema) {
return Arrays.stream(schema.getFieldDataTypes())
.map(DataType::getLogicalType)
.collect(Collectors.toList());
}
}
|
CsvTableSourceFactoryBase
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
|
{
"start": 17797,
"end": 17947
}
|
class ____ extends LocalFileSystem {
public LocalFileSystemExtn() {
super(new RawLocalFileSystemExtn());
}
}
static
|
LocalFileSystemExtn
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/resource/OAuth2ResourceServerConfigurerTests.java
|
{
"start": 83495,
"end": 83931
}
|
class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.sessionManagement((management) -> management
.sessionCreationPolicy(SessionCreationPolicy.ALWAYS))
.oauth2ResourceServer((server) -> server
.jwt(Customizer.withDefaults()));
return http.build();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static
|
AlwaysSessionCreationConfig
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/creators/JsonCreatorModeForEnum3566Test.java
|
{
"start": 2064,
"end": 2666
}
|
enum ____ {
A("AType"),
B("BType");
private final String type;
EnumB(String type) {
this.type = type;
}
public String getType() {
return type;
}
@JsonCreator(mode = JsonCreator.Mode.DELEGATING)
public static EnumB fromString(String type) {
for (EnumB e : EnumB.values()) {
if(e.type.equals(type)) {
return e;
}
}
throw new RuntimeException();
}
}
@JsonFormat(shape = JsonFormat.Shape.OBJECT)
|
EnumB
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/SystemUtils.java
|
{
"start": 70779,
"end": 71264
}
|
class ____ loaded.
* </p>
*
* @since 2.4
*/
public static final boolean IS_OS_WINDOWS_VISTA = getOsNameMatches(OS_NAME_WINDOWS_PREFIX + " Vista");
/**
* The constant {@code true} if this is Windows 7.
* <p>
* The result depends on the value of the {@link #OS_NAME} constant.
* </p>
* <p>
* The field will return {@code false} if {@link #OS_NAME} is {@code null}.
* </p>
* <p>
* This value is initialized when the
|
is
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/TimestampITCase.java
|
{
"start": 3909,
"end": 31867
}
|
class ____ extends TestLogger {
@Rule public TemporaryFolder tmpFolder = new TemporaryFolder();
private static final int NUM_TASK_MANAGERS = 2;
private static final int NUM_TASK_SLOTS = 3;
private static final int PARALLELISM = NUM_TASK_MANAGERS * NUM_TASK_SLOTS;
// this is used in some tests to synchronize
static MultiShotLatch latch;
@ClassRule
public static final MiniClusterWithClientResource CLUSTER =
new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(getConfiguration())
.setNumberTaskManagers(NUM_TASK_MANAGERS)
.setNumberSlotsPerTaskManager(NUM_TASK_SLOTS)
.build());
private static Configuration getConfiguration() {
Configuration config = new Configuration();
config.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, MemorySize.parse("12m"));
return config;
}
@Before
public void setupLatch() {
// ensure that we get a fresh latch for each test
latch = new MultiShotLatch();
}
/**
* These check whether custom timestamp emission works at sources and also whether timestamps
* arrive at operators throughout a topology.
*
* <p>This also checks whether watermarks keep propagating if a source closes early.
*
* <p>This only uses map to test the workings of watermarks in a complete, running topology. All
* tasks and stream operators have dedicated tests that test the watermark propagation
* behaviour.
*/
@Test
public void testWatermarkPropagation() throws Exception {
final int numWatermarks = 10;
long initialTime = 0L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
DataStream<Integer> source1 =
env.addSource(new MyTimestampSource(initialTime, numWatermarks));
DataStream<Integer> source2 =
env.addSource(new MyTimestampSource(initialTime, numWatermarks / 2));
source1.union(source2)
.map(new IdentityMap())
.connect(source2)
.map(new IdentityCoMap())
.transform("Custom Operator", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true))
.sinkTo(new DiscardingSink<>());
env.execute();
// verify that all the watermarks arrived at the final custom operator
for (int i = 0; i < PARALLELISM; i++) {
// we are only guaranteed to see NUM_WATERMARKS / 2 watermarks because the
// other source stops emitting after that
for (int j = 0; j < numWatermarks / 2; j++) {
if (!CustomOperator.finalWatermarks[i]
.get(j)
.equals(new Watermark(initialTime + j))) {
System.err.println("All Watermarks: ");
for (int k = 0; k <= numWatermarks / 2; k++) {
System.err.println(CustomOperator.finalWatermarks[i].get(k));
}
fail("Wrong watermark.");
}
}
assertEquals(
Watermark.MAX_WATERMARK,
CustomOperator.finalWatermarks[i].get(
CustomOperator.finalWatermarks[i].size() - 1));
}
}
@Test
public void testSelfUnionWatermarkPropagation() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
DataStream<Integer> dataStream1 = env.fromData(1, 2, 3);
dataStream1
.union(dataStream1)
.transform(
"Custom Operator", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(false))
.sinkTo(new DiscardingSink<>());
env.execute();
assertEquals(
Watermark.MAX_WATERMARK,
CustomOperator.finalWatermarks[0].get(
CustomOperator.finalWatermarks[0].size() - 1));
}
@Test
public void testWatermarkPropagationNoFinalWatermarkOnStop() throws Exception {
// for this test to work, we need to be sure that no other jobs are being executed
final ClusterClient<?> clusterClient = CLUSTER.getClusterClient();
while (!getRunningJobs(clusterClient).isEmpty()) {
Thread.sleep(100);
}
final int numWatermarks = 10;
long initialTime = 0L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
DataStream<Integer> source1 =
env.addSource(new MyTimestampSourceInfinite(initialTime, numWatermarks));
DataStream<Integer> source2 =
env.addSource(new MyTimestampSourceInfinite(initialTime, numWatermarks / 2));
source1.union(source2)
.map(new IdentityMap())
.connect(source2)
.map(new IdentityCoMap())
.transform("Custom Operator", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true))
.sinkTo(new DiscardingSink<Integer>());
Thread t =
new Thread("stopper") {
@Override
public void run() {
try {
// try until we get the running jobs
List<JobID> running = getRunningJobs(clusterClient);
while (running.isEmpty()) {
Thread.sleep(10);
running = getRunningJobs(clusterClient);
}
JobID id = running.get(0);
waitUntilAllTasksAreRunning(CLUSTER.getRestClusterClient(), id);
// send stop until the job is stopped
final String savepointDirName = tmpFolder.newFolder().getAbsolutePath();
do {
try {
clusterClient
.stopWithSavepoint(
id,
false,
savepointDirName,
SavepointFormatType.CANONICAL)
.get();
} catch (Exception e) {
boolean ignoreException =
ExceptionUtils.findThrowable(
e, CheckpointException.class)
.map(
CheckpointException
::getCheckpointFailureReason)
.map(
reason ->
reason
== CheckpointFailureReason
.NOT_ALL_REQUIRED_TASKS_RUNNING)
.orElse(false);
if (!ignoreException) {
throw e;
}
}
Thread.sleep(10);
} while (!getRunningJobs(clusterClient).isEmpty());
} catch (Throwable t) {
t.printStackTrace();
}
}
};
t.start();
env.execute();
// verify that all the watermarks arrived at the final custom operator
for (List<Watermark> subtaskWatermarks : CustomOperator.finalWatermarks) {
// we are only guaranteed to see NUM_WATERMARKS / 2 watermarks because the
// other source stops emitting after that
for (int j = 0; j < subtaskWatermarks.size(); j++) {
if (subtaskWatermarks.get(j).getTimestamp() != initialTime + j) {
System.err.println("All Watermarks: ");
for (int k = 0; k <= numWatermarks / 2; k++) {
System.err.println(subtaskWatermarks.get(k));
}
fail("Wrong watermark.");
}
}
// if there are watermarks, the final one must not be the MAX watermark
if (subtaskWatermarks.size() > 0) {
assertNotEquals(
Watermark.MAX_WATERMARK,
subtaskWatermarks.get(subtaskWatermarks.size() - 1));
}
}
t.join();
}
/**
* These check whether timestamps are properly assigned at the sources and handled in network
* transmission and between chained operators when timestamps are enabled.
*/
@Test
public void testTimestampHandling() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
DataStream<Integer> source1 = env.addSource(new MyTimestampSource(0L, numElements));
DataStream<Integer> source2 = env.addSource(new MyTimestampSource(0L, numElements));
source1.map(new IdentityMap())
.connect(source2)
.map(new IdentityCoMap())
.transform(
"Custom Operator",
BasicTypeInfo.INT_TYPE_INFO,
new TimestampCheckingOperator())
.sinkTo(new DiscardingSink<Integer>());
env.execute();
}
/**
* Verifies that we don't have timestamps when the source doesn't emit them with the records.
*/
@Test
public void testDisabledTimestamps() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
DataStream<Integer> source1 = env.addSource(new MyNonWatermarkingSource(numElements));
DataStream<Integer> source2 = env.addSource(new MyNonWatermarkingSource(numElements));
source1.map(new IdentityMap())
.connect(source2)
.map(new IdentityCoMap())
.transform(
"Custom Operator",
BasicTypeInfo.INT_TYPE_INFO,
new DisabledTimestampCheckingOperator())
.sinkTo(new DiscardingSink<Integer>());
env.execute();
}
/**
* This tests whether timestamps are properly extracted in the timestamp extractor and whether
* watermarks are also correctly forwarded from this with the auto watermark interval.
*/
@Test
public void testTimestampExtractorWithAutoInterval() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setAutoWatermarkInterval(10);
env.setParallelism(1);
DataStream<Integer> source1 =
env.addSource(
new SourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int index = 1;
while (index <= numElements) {
ctx.collect(index);
latch.await();
index++;
}
}
@Override
public void cancel() {}
});
DataStream<Integer> extractOp =
source1.assignTimestampsAndWatermarks(
AscendingRecordTimestampsWatermarkStrategy.create(Long::valueOf));
extractOp
.transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true))
.transform(
"Timestamp Check",
BasicTypeInfo.INT_TYPE_INFO,
new TimestampCheckingOperator());
// verify that extractor picks up source parallelism
Assert.assertEquals(
extractOp.getTransformation().getParallelism(),
source1.getTransformation().getParallelism());
env.execute();
// verify that we get NUM_ELEMENTS watermarks
for (int j = 0; j < numElements; j++) {
if (!CustomOperator.finalWatermarks[0].get(j).equals(new Watermark(j))) {
long wm = CustomOperator.finalWatermarks[0].get(j).getTimestamp();
Assert.fail(
"Wrong watermark. Expected: "
+ j
+ " Found: "
+ wm
+ " All: "
+ CustomOperator.finalWatermarks[0]);
}
}
// the input is finite, so it should have a MAX Watermark
assertEquals(
Watermark.MAX_WATERMARK,
CustomOperator.finalWatermarks[0].get(
CustomOperator.finalWatermarks[0].size() - 1));
}
/**
* This tests whether timestamps are properly extracted in the timestamp extractor and whether
* watermark are correctly forwarded from the custom watermark emit function.
*/
@Test
public void testTimestampExtractorWithCustomWatermarkEmit() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setAutoWatermarkInterval(10);
env.setParallelism(1);
DataStream<Integer> source1 =
env.addSource(
new SourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int index = 1;
while (index <= numElements) {
ctx.collect(index);
latch.await();
index++;
}
}
@Override
public void cancel() {}
});
source1.assignTimestampsAndWatermarks(
new WatermarkStrategy<Integer>() {
@Override
public TimestampAssigner<Integer> createTimestampAssigner(
TimestampAssignerSupplier.Context context) {
return (element, recordTimestamp) -> element;
}
@Override
public WatermarkGenerator<Integer> createWatermarkGenerator(
WatermarkGeneratorSupplier.Context context) {
return new WatermarkGenerator<Integer>() {
@Override
public void onEvent(
Integer event,
long eventTimestamp,
WatermarkOutput output) {
output.emitWatermark(
new org.apache.flink.api.common.eventtime.Watermark(
eventTimestamp - 1));
}
@Override
public void onPeriodicEmit(WatermarkOutput output) {}
};
}
})
.transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true))
.transform(
"Timestamp Check",
BasicTypeInfo.INT_TYPE_INFO,
new TimestampCheckingOperator());
env.execute();
// verify that we get NUM_ELEMENTS watermarks
for (int j = 0; j < numElements; j++) {
if (!CustomOperator.finalWatermarks[0].get(j).equals(new Watermark(j))) {
Assert.fail("Wrong watermark.");
}
}
// the input is finite, so it should have a MAX Watermark
assertEquals(
Watermark.MAX_WATERMARK,
CustomOperator.finalWatermarks[0].get(
CustomOperator.finalWatermarks[0].size() - 1));
}
/** This test verifies that the timestamp extractor does not emit decreasing watermarks. */
@Test
public void testTimestampExtractorWithDecreasingCustomWatermarkEmit() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setAutoWatermarkInterval(1);
env.setParallelism(1);
DataStream<Integer> source1 =
env.addSource(
new SourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int index = 1;
while (index <= numElements) {
ctx.collect(index);
Thread.sleep(100);
ctx.collect(index - 1);
latch.await();
index++;
}
}
@Override
public void cancel() {}
});
source1.assignTimestampsAndWatermarks(
new WatermarkStrategy<Integer>() {
@Override
public TimestampAssigner<Integer> createTimestampAssigner(
TimestampAssignerSupplier.Context context) {
return (element, recordTimestamp) -> element;
}
@Override
public WatermarkGenerator<Integer> createWatermarkGenerator(
WatermarkGeneratorSupplier.Context context) {
return new WatermarkGenerator<Integer>() {
@Override
public void onEvent(
Integer event,
long eventTimestamp,
WatermarkOutput output) {
output.emitWatermark(
new org.apache.flink.api.common.eventtime.Watermark(
eventTimestamp - 1));
}
@Override
public void onPeriodicEmit(WatermarkOutput output) {}
};
}
})
.transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true))
.transform(
"Timestamp Check",
BasicTypeInfo.INT_TYPE_INFO,
new TimestampCheckingOperator());
env.execute();
// verify that we get NUM_ELEMENTS watermarks
for (int j = 0; j < numElements; j++) {
if (!CustomOperator.finalWatermarks[0].get(j).equals(new Watermark(j))) {
Assert.fail("Wrong watermark.");
}
}
// the input is finite, so it should have a MAX Watermark
assertEquals(
Watermark.MAX_WATERMARK,
CustomOperator.finalWatermarks[0].get(
CustomOperator.finalWatermarks[0].size() - 1));
}
/** This test verifies that the timestamp extractor forwards Long.MAX_VALUE watermarks. */
@Test
public void testTimestampExtractorWithLongMaxWatermarkFromSource() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setAutoWatermarkInterval(1);
env.setParallelism(2);
DataStream<Integer> source1 =
env.addSource(
new SourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int index = 1;
while (index <= numElements) {
ctx.collectWithTimestamp(index, index);
ctx.collectWithTimestamp(index - 1, index - 1);
index++;
ctx.emitWatermark(new Watermark(index - 2));
}
// emit the final Long.MAX_VALUE watermark, do it twice and verify
// that
// we only see one in the result
ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
}
@Override
public void cancel() {}
});
source1.assignTimestampsAndWatermarks(
(WatermarkStrategy<Integer>) context -> new NoWatermarksGenerator<>())
.transform(
"Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true));
env.execute();
Assert.assertTrue(CustomOperator.finalWatermarks[0].size() == 1);
Assert.assertTrue(
CustomOperator.finalWatermarks[0].get(0).getTimestamp() == Long.MAX_VALUE);
}
/**
* This test verifies that the timestamp extractor forwards Long.MAX_VALUE watermarks.
*
* <p>Same test as before, but using a different timestamp extractor.
*/
@Test
public void testTimestampExtractorWithLongMaxWatermarkFromSource2() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setAutoWatermarkInterval(10);
env.setParallelism(2);
DataStream<Integer> source1 =
env.addSource(
new SourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int index = 1;
while (index <= numElements) {
ctx.collectWithTimestamp(index, index);
ctx.collectWithTimestamp(index - 1, index - 1);
index++;
ctx.emitWatermark(new Watermark(index - 2));
}
// emit the final Long.MAX_VALUE watermark, do it twice and verify
// that
// we only see one in the result
ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
}
@Override
public void cancel() {}
});
source1.assignTimestampsAndWatermarks(
(WatermarkStrategy<Integer>) context -> new NoWatermarksGenerator<>())
.transform(
"Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true));
env.execute();
Assert.assertTrue(CustomOperator.finalWatermarks[0].size() == 1);
Assert.assertTrue(
CustomOperator.finalWatermarks[0].get(0).getTimestamp() == Long.MAX_VALUE);
}
@Test
public void testErrorOnEventTimeOverProcessingTime() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(2);
DataStream<Tuple2<String, Integer>> source1 =
env.fromData(new Tuple2<>("a", 1), new Tuple2<>("b", 2));
source1.keyBy(x -> x.f0)
.window(TumblingEventTimeWindows.of(Duration.ofSeconds(5)))
.reduce(
new ReduceFunction<Tuple2<String, Integer>>() {
@Override
public Tuple2<String, Integer> reduce(
Tuple2<String, Integer> value1,
Tuple2<String, Integer> value2) {
return value1;
}
})
.print();
try {
env.execute();
fail("this should fail with an exception");
} catch (Exception e) {
// expected
}
}
@Test
public void testErrorOnEventTimeWithoutTimestamps() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(2);
DataStream<Tuple2<String, Integer>> source1 =
env.fromData(new Tuple2<>("a", 1), new Tuple2<>("b", 2));
source1.keyBy(x -> x.f0)
.window(TumblingEventTimeWindows.of(Duration.ofSeconds(5)))
.reduce(
new ReduceFunction<Tuple2<String, Integer>>() {
@Override
public Tuple2<String, Integer> reduce(
Tuple2<String, Integer> value1,
Tuple2<String, Integer> value2) {
return value1;
}
})
.print();
try {
env.execute();
fail("this should fail with an exception");
} catch (Exception e) {
// expected
}
}
// ------------------------------------------------------------------------
// Custom Operators and Functions
// ------------------------------------------------------------------------
@SuppressWarnings("unchecked")
private static
|
TimestampITCase
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/lifecycle/beanwithprivatepredestroy/B.java
|
{
"start": 813,
"end": 1317
}
|
class ____ implements Closeable {
boolean noArgsDestroyCalled = false;
boolean injectedDestroyCalled = false;
@Inject
protected A another;
private A a;
@Inject
void setA(A a ) {
this.a = a;
}
A getA() {
return a;
}
@Override
@PreDestroy
public void close() {
noArgsDestroyCalled = true;
}
@PreDestroy
private void another(C c) {
if(c != null) {
injectedDestroyCalled = true;
}
}
}
|
B
|
java
|
processing__processing4
|
java/test/processing/mode/java/preproc/MissingGenericTypeMessageSimplifierStrategyTest.java
|
{
"start": 272,
"end": 906
}
|
class ____ {
private PreprocessIssueMessageSimplifier.PreprocIssueMessageSimplifierStrategy strategy;
@Before
public void setup() {
strategy = PreprocessIssueMessageSimplifier.get().createInvalidGenericDefinitionStrategy();
}
@Test
public void testPresent() {
Optional<PdeIssueEmitter.IssueMessageSimplification> msg = strategy.simplify("<>'");
Assert.assertTrue(msg.isPresent());
}
@Test
public void testNotPresent() {
Optional<PdeIssueEmitter.IssueMessageSimplification> msg = strategy.simplify("class {");
Assert.assertTrue(msg.isEmpty());
}
}
|
MissingGenericTypeMessageSimplifierStrategyTest
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KserveComponentBuilderFactory.java
|
{
"start": 1426,
"end": 1928
}
|
interface ____ {
/**
* KServe (camel-kserve)
* Provide access to AI model servers with the KServe standard to run
* inference with remote models
*
* Category: ai
* Since: 4.10
* Maven coordinates: org.apache.camel:camel-kserve
*
* @return the dsl builder
*/
static KserveComponentBuilder kserve() {
return new KserveComponentBuilderImpl();
}
/**
* Builder for the KServe component.
*/
|
KserveComponentBuilderFactory
|
java
|
apache__camel
|
core/camel-api/src/generated/java/org/apache/camel/spi/ApiParams.java
|
{
"start": 1420,
"end": 2496
}
|
interface ____ {
/**
* The API name (grouping) of this configuration class.
*/
String apiName() default "";
/**
* Returns a description of the API.
*
* This is used for documentation and tooling only.
*/
String description() default "";
/**
* Whether this API can only be used as a producer.
*
* By default its assumed the API can be used as both consumer and producer.
*/
boolean producerOnly() default false;
/**
* Whether this API can only be used as a consumer.
*
* By default its assumed the API can be used as both consumer and producer.
*/
boolean consumerOnly() default false;
/**
* The API methods that the API provides of this configuration class.
*/
ApiMethod[] apiMethods();
/**
* Returns the method alias(s) of this api method. The syntax for an alias is pattern=name where pattern is a
* regular expression.
*
* This is used for documentation and tooling only.
*/
String[] aliases() default "";
}
|
ApiParams
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/EmbeddedRocksDBStateBackend.java
|
{
"start": 42753,
"end": 45749
}
|
class ____"
// to avoid that, we need to add a random element to the library file path
// (I know, seems like an unnecessary hack, since the JVM obviously can
// handle multiple
// instances of the same JNI library being loaded in different class
// loaders, but
// apparently not when coming from the same file path, so there we go)
rocksLibFolder = new File(tempDirParent, "rocksdb-lib-" + new AbstractID());
// make sure the temp path exists
LOG.debug(
"Attempting to create RocksDB native library folder {}",
rocksLibFolder);
// noinspection ResultOfMethodCallIgnored
rocksLibFolder.mkdirs();
// explicitly load the JNI dependency if it has not been loaded before
nativeLibraryLoaderSupplier
.get()
.loadLibrary(rocksLibFolder.getAbsolutePath());
// this initialization here should validate that the loading succeeded
RocksDB.loadLibrary();
// seems to have worked
LOG.info("Successfully loaded RocksDB native library");
rocksDbInitialized = true;
return;
} catch (Throwable t) {
lastException = t;
LOG.debug("RocksDB JNI library loading attempt {} failed", attempt, t);
// try to force RocksDB to attempt reloading the library
try {
resetRocksDBLoadedFlag();
} catch (Throwable tt) {
LOG.debug(
"Failed to reset 'initialized' flag in RocksDB native code loader",
tt);
}
FileUtils.deleteDirectoryQuietly(rocksLibFolder);
}
}
throw new IOException("Could not load the native RocksDB library", lastException);
}
}
}
@VisibleForTesting
static void resetRocksDBLoadedFlag() throws Exception {
final Field initField =
org.rocksdb.NativeLibraryLoader.class.getDeclaredField("initialized");
initField.setAccessible(true);
initField.setBoolean(null, false);
}
// ---------------------------------------------------------------------------------------------
// Enums
// ---------------------------------------------------------------------------------------------
/** The options to chose for the type of priority queue state. */
public
|
loader
|
java
|
quarkusio__quarkus
|
independent-projects/qute/core/src/main/java/io/quarkus/qute/TemplateInstanceBase.java
|
{
"start": 3061,
"end": 4898
}
|
class ____ implements Mapper {
private final Map<String, Object> map = new HashMap<>();
private Map<String, Function<String, Object>> computations = null;
void put(String key, Object value) {
map.put(key, value);
}
void computed(String key, Function<String, Object> function) {
if (!map.containsKey(key)) {
if (computations == null) {
computations = new HashMap<>();
}
computations.put(key, function);
}
}
@Override
public Object get(String key) {
Object val = map.get(key);
if (val == null) {
if (key.equals(DATA_MAP_KEY)) {
return true;
} else if (computations != null) {
Function<String, Object> fun = computations.get(key);
if (fun != null) {
return fun.apply(key);
}
}
}
return val;
}
@Override
public boolean appliesTo(String key) {
return map.containsKey(key) || (computations != null && computations.containsKey(key));
}
@Override
public Set<String> mappedKeys() {
Set<String> ret = new HashSet<>(map.keySet());
if (computations != null) {
ret.addAll(computations.keySet());
}
return ret;
}
public void forEachData(BiConsumer<String, Object> action) {
map.forEach(action);
}
public void forEachComputedData(BiConsumer<String, Function<String, Object>> action) {
if (computations != null) {
computations.forEach(action);
}
}
}
}
|
DataMap
|
java
|
spring-projects__spring-boot
|
integration-test/spring-boot-loader-integration-tests/spring-boot-loader-tests-app/src/main/java/org/springframework/boot/loaderapp/LoaderTestApplication.java
|
{
"start": 1361,
"end": 2780
}
|
class ____ {
@Bean
public CommandLineRunner commandLineRunner(ServletContext servletContext) {
return (args) -> {
File temp = new File(System.getProperty("java.io.tmpdir"));
URL resourceUrl = servletContext.getResource("webjars/jquery/3.5.0/jquery.js");
JarURLConnection connection = (JarURLConnection) resourceUrl.openConnection();
String jarName = connection.getJarFile().getName();
System.out.println(">>>>> jar file " + jarName);
if(jarName.contains(temp.getAbsolutePath())) {
System.out.println(">>>>> jar written to temp");
}
byte[] resourceContent = FileCopyUtils.copyToByteArray(resourceUrl.openStream());
URL directUrl = new URL(resourceUrl.toExternalForm());
byte[] directContent = FileCopyUtils.copyToByteArray(directUrl.openStream());
String message = (!Arrays.equals(resourceContent, directContent)) ? "NO MATCH"
: directContent.length + " BYTES";
System.out.println(">>>>> " + message + " from " + resourceUrl);
testGh7161();
};
}
private void testGh7161() {
try {
Resource resource = new ClassPathResource("gh-7161");
Path path = Paths.get(resource.getURI());
System.out.println(">>>>> gh-7161 " + Files.list(path).toList());
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
public static void main(String[] args) {
SpringApplication.run(LoaderTestApplication.class, args).close();
}
}
|
LoaderTestApplication
|
java
|
netty__netty
|
microbench/src/main/java/io/netty/buffer/ByteBufAccessBenchmark.java
|
{
"start": 1606,
"end": 2618
}
|
class ____ extends WrappedByteBuf {
private final ByteBuffer byteBuffer;
private final CleanableDirectBuffer cleanable;
NioFacade(CleanableDirectBuffer buffer) {
super(Unpooled.EMPTY_BUFFER);
byteBuffer = buffer.buffer();
cleanable = buffer;
}
@Override
public ByteBuf setLong(int index, long value) {
byteBuffer.putLong(index, value);
return this;
}
@Override
public long getLong(int index) {
return byteBuffer.getLong(index);
}
@Override
public byte readByte() {
return byteBuffer.get();
}
@Override
public ByteBuf touch() {
// hack since WrappedByteBuf.readerIndex(int) is final
byteBuffer.position(0);
return this;
}
@Override
public boolean release() {
cleanable.clean();
return true;
}
}
public
|
NioFacade
|
java
|
google__guava
|
android/guava/src/com/google/common/graph/DirectedNetworkConnections.java
|
{
"start": 1098,
"end": 2192
}
|
class ____<N, E> extends AbstractDirectedNetworkConnections<N, E> {
DirectedNetworkConnections(Map<E, N> inEdgeMap, Map<E, N> outEdgeMap, int selfLoopCount) {
super(inEdgeMap, outEdgeMap, selfLoopCount);
}
static <N, E> DirectedNetworkConnections<N, E> of() {
return new DirectedNetworkConnections<>(
HashBiMap.<E, N>create(EXPECTED_DEGREE), HashBiMap.<E, N>create(EXPECTED_DEGREE), 0);
}
static <N, E> DirectedNetworkConnections<N, E> ofImmutable(
Map<E, N> inEdges, Map<E, N> outEdges, int selfLoopCount) {
return new DirectedNetworkConnections<>(
ImmutableBiMap.copyOf(inEdges), ImmutableBiMap.copyOf(outEdges), selfLoopCount);
}
@Override
public Set<N> predecessors() {
return Collections.unmodifiableSet(((BiMap<E, N>) inEdgeMap).values());
}
@Override
public Set<N> successors() {
return Collections.unmodifiableSet(((BiMap<E, N>) outEdgeMap).values());
}
@Override
public Set<E> edgesConnecting(N node) {
return new EdgesConnecting<>(((BiMap<E, N>) outEdgeMap).inverse(), node);
}
}
|
DirectedNetworkConnections
|
java
|
apache__camel
|
components/camel-dhis2/camel-dhis2-component/src/test/java/org/apache/camel/component/dhis2/Dhis2GetIT.java
|
{
"start": 1619,
"end": 3771
}
|
class ____ extends AbstractDhis2TestSupport {
private static final Logger LOG = LoggerFactory.getLogger(Dhis2GetIT.class);
private static final String PATH_PREFIX = Dhis2ApiCollection.getCollection().getApiName(Dhis2GetApiMethod.class).getName();
@Test
public void testCollection() {
final Map<String, Object> headers = new HashMap<>();
headers.put("CamelDhis2.path", "organisationUnits");
headers.put("CamelDhis2.arrayName", "organisationUnits");
headers.put("CamelDhis2.paging", true);
headers.put("CamelDhis2.fields", null);
headers.put("CamelDhis2.filter", null);
headers.put("CamelDhis2.queryParams", new HashMap<>());
final List<OrganisationUnit> result = requestBodyAndHeaders("direct://COLLECTION", null, headers);
assertTrue(result.size() >= 2);
LOG.debug("collection: {}", result);
}
@Test
public void testResource() {
final Map<String, Object> headers = new HashMap<>();
headers.put("CamelDhis2.path", String.format("organisationUnits/%s", Environment.ORG_UNIT_ID_UNDER_TEST));
headers.put("CamelDhis2.fields", null);
headers.put("CamelDhis2.filter", null);
headers.put("CamelDhis2.queryParams", null);
final java.io.InputStream result = requestBodyAndHeaders("direct://RESOURCE", null, headers);
assertNotNull(result, "resource result");
LOG.debug("Result: {}", result);
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() {
// test route for collection
from("direct://COLLECTION")
.to("dhis2://" + PATH_PREFIX + "/collection?paging=false")
.split().body().aggregationStrategy(new GroupedBodyAggregationStrategy())
.convertBodyTo(OrganisationUnit.class);
// test route for resource
from("direct://RESOURCE")
.to("dhis2://" + PATH_PREFIX + "/resource");
}
};
}
}
|
Dhis2GetIT
|
java
|
apache__kafka
|
clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/ClientsTestUtils.java
|
{
"start": 2979,
"end": 14015
}
|
class ____ {
private static final String KEY_PREFIX = "key ";
private static final String VALUE_PREFIX = "value ";
private ClientsTestUtils() {}
public static <K, V> List<ConsumerRecord<K, V>> consumeRecords(
Consumer<K, V> consumer,
int numRecords
) throws InterruptedException {
return consumeRecords(consumer, numRecords, Integer.MAX_VALUE);
}
public static <K, V> List<ConsumerRecord<K, V>> consumeRecords(
Consumer<K, V> consumer,
int numRecords,
int maxPollRecords
) throws InterruptedException {
List<ConsumerRecord<K, V>> consumedRecords = new ArrayList<>();
TestUtils.waitForCondition(() -> {
var records = consumer.poll(Duration.ofMillis(100));
records.forEach(consumedRecords::add);
assertTrue(records.count() <= maxPollRecords);
return consumedRecords.size() >= numRecords;
}, 60000, "Timed out before consuming expected " + numRecords + " records.");
return consumedRecords;
}
public static void consumeAndVerifyRecords(
Consumer<byte[], byte[]> consumer,
TopicPartition tp,
int numRecords,
int startingOffset,
int startingKeyAndValueIndex,
long startingTimestamp,
long timestampIncrement
) throws InterruptedException {
consumeAndVerifyRecords(
consumer,
tp,
numRecords,
Integer.MAX_VALUE,
startingOffset,
startingKeyAndValueIndex,
startingTimestamp,
timestampIncrement
);
}
public static void pollUntilTrue(
Consumer<byte[], byte[]> consumer,
Supplier<Boolean> testCondition,
String msg
) throws InterruptedException {
pollUntilTrue(consumer, Duration.ofMillis(100), testCondition, 15_000L, msg);
}
public static void pollUntilTrue(
Consumer<byte[], byte[]> consumer,
Supplier<Boolean> testCondition,
long waitTimeMs,
String msg
) throws InterruptedException {
pollUntilTrue(consumer, Duration.ofMillis(100), testCondition, waitTimeMs, msg);
}
public static void pollUntilTrue(
Consumer<byte[], byte[]> consumer,
Duration timeout,
Supplier<Boolean> testCondition,
long waitTimeMs,
String msg
) throws InterruptedException {
TestUtils.waitForCondition(() -> {
consumer.poll(timeout);
return testCondition.get();
}, waitTimeMs, msg);
}
public static void consumeAndVerifyRecordsWithTimeTypeLogAppend(
Consumer<byte[], byte[]> consumer,
TopicPartition tp,
int numRecords,
long startingTimestamp
) throws InterruptedException {
var records = consumeRecords(consumer, numRecords, Integer.MAX_VALUE);
var now = System.currentTimeMillis();
for (var i = 0; i < numRecords; i++) {
var record = records.get(i);
assertEquals(tp.topic(), record.topic());
assertEquals(tp.partition(), record.partition());
assertTrue(record.timestamp() >= startingTimestamp && record.timestamp() <= now,
"Got unexpected timestamp " + record.timestamp() + ". Timestamp should be between [" + startingTimestamp + ", " + now + "]");
assertEquals(i, record.offset());
assertEquals(KEY_PREFIX + i, new String(record.key()));
assertEquals(VALUE_PREFIX + i, new String(record.value()));
// this is true only because K and V are byte arrays
assertEquals((KEY_PREFIX + i).length(), record.serializedKeySize());
assertEquals((VALUE_PREFIX + i).length(), record.serializedValueSize());
}
}
public static void consumeAndVerifyRecords(
Consumer<byte[], byte[]> consumer,
TopicPartition tp,
int numRecords,
int maxPollRecords,
int startingOffset,
int startingKeyAndValueIndex,
long startingTimestamp,
long timestampIncrement
) throws InterruptedException {
var records = consumeRecords(consumer, numRecords, maxPollRecords);
for (var i = 0; i < numRecords; i++) {
var record = records.get(i);
var offset = startingOffset + i;
assertEquals(tp.topic(), record.topic());
assertEquals(tp.partition(), record.partition());
assertEquals(TimestampType.CREATE_TIME, record.timestampType());
var timestamp = startingTimestamp + i * (timestampIncrement > 0 ? timestampIncrement : 1);
assertEquals(timestamp, record.timestamp());
assertEquals(offset, record.offset());
var keyAndValueIndex = startingKeyAndValueIndex + i;
assertEquals(KEY_PREFIX + keyAndValueIndex, new String(record.key()));
assertEquals(VALUE_PREFIX + keyAndValueIndex, new String(record.value()));
// this is true only because K and V are byte arrays
assertEquals((KEY_PREFIX + keyAndValueIndex).length(), record.serializedKeySize());
assertEquals((VALUE_PREFIX + keyAndValueIndex).length(), record.serializedValueSize());
}
}
public static void consumeAndVerifyRecords(
Consumer<byte[], byte[]> consumer,
TopicPartition tp,
int numRecords,
int startingOffset,
int startingKeyAndValueIndex,
long startingTimestamp
) throws InterruptedException {
consumeAndVerifyRecords(consumer, tp, numRecords, startingOffset, startingKeyAndValueIndex, startingTimestamp, -1);
}
public static void consumeAndVerifyRecords(
Consumer<byte[], byte[]> consumer,
TopicPartition tp,
int numRecords,
int startingOffset
) throws InterruptedException {
consumeAndVerifyRecords(consumer, tp, numRecords, startingOffset, 0, 0, -1);
}
public static void sendRecords(
ClusterInstance cluster,
TopicPartition tp,
int numRecords,
long startingTimestamp,
long timestampIncrement
) {
try (Producer<byte[], byte[]> producer = cluster.producer()) {
for (var i = 0; i < numRecords; i++) {
sendRecord(producer, tp, startingTimestamp, i, timestampIncrement);
}
producer.flush();
}
}
public static void sendRecords(
ClusterInstance cluster,
TopicPartition tp,
int numRecords,
long startingTimestamp
) {
sendRecords(cluster, tp, numRecords, startingTimestamp, -1);
}
public static void sendRecords(
ClusterInstance cluster,
TopicPartition tp,
int numRecords
) {
sendRecords(cluster, tp, numRecords, System.currentTimeMillis());
}
public static List<ProducerRecord<byte[], byte[]>> sendRecords(
Producer<byte[], byte[]> producer,
TopicPartition tp,
int numRecords,
long startingTimestamp,
long timestampIncrement
) {
List<ProducerRecord<byte[], byte[]>> records = new ArrayList<>();
for (var i = 0; i < numRecords; i++) {
var record = sendRecord(producer, tp, startingTimestamp, i, timestampIncrement);
records.add(record);
}
producer.flush();
return records;
}
public static void sendRecords(
Producer<byte[], byte[]> producer,
TopicPartition tp,
int numRecords,
long startingTimestamp
) {
for (var i = 0; i < numRecords; i++) {
sendRecord(producer, tp, startingTimestamp, i, -1);
}
producer.flush();
}
public static void awaitAssignment(
Consumer<byte[], byte[]> consumer,
Set<TopicPartition> expectedAssignment
) throws InterruptedException {
TestUtils.waitForCondition(() -> {
consumer.poll(Duration.ofMillis(100));
return consumer.assignment().equals(expectedAssignment);
}, () -> "Timed out while awaiting expected assignment " + expectedAssignment + ". " +
"The current assignment is " + consumer.assignment()
);
}
private static ProducerRecord<byte[], byte[]> sendRecord(
Producer<byte[], byte[]> producer,
TopicPartition tp,
long startingTimestamp,
int numRecord,
long timestampIncrement
) {
var timestamp = startingTimestamp + numRecord * (timestampIncrement > 0 ? timestampIncrement : 1);
var record = new ProducerRecord<>(
tp.topic(),
tp.partition(),
timestamp,
(KEY_PREFIX + numRecord).getBytes(),
(VALUE_PREFIX + numRecord).getBytes()
);
producer.send(record);
return record;
}
public static <K, V> void sendAndAwaitAsyncCommit(
Consumer<K, V> consumer,
Optional<Map<TopicPartition, OffsetAndMetadata>> offsetsOpt
) throws InterruptedException {
var commitCallback = new RetryCommitCallback<>(consumer, offsetsOpt);
sendAsyncCommit(consumer, commitCallback, offsetsOpt);
TestUtils.waitForCondition(() -> {
consumer.poll(Duration.ofMillis(100));
return commitCallback.isComplete;
}, "Failed to observe commit callback before timeout");
assertEquals(Optional.empty(), commitCallback.error);
}
public static void awaitRebalance(
Consumer<byte[], byte[]> consumer,
TestConsumerReassignmentListener rebalanceListener
) throws InterruptedException {
var numReassignments = rebalanceListener.callsToAssigned;
TestUtils.waitForCondition(() -> {
consumer.poll(Duration.ofMillis(100));
return rebalanceListener.callsToAssigned > numReassignments;
}, "Timed out before expected rebalance completed");
}
public static void ensureNoRebalance(
Consumer<byte[], byte[]> consumer,
TestConsumerReassignmentListener rebalanceListener
) throws InterruptedException {
// The best way to verify that the current membership is still active is to commit offsets.
// This would fail if the group had rebalanced.
var initialRevokeCalls = rebalanceListener.callsToRevoked;
sendAndAwaitAsyncCommit(consumer, Optional.empty());
assertEquals(initialRevokeCalls, rebalanceListener.callsToRevoked);
}
public static void waitForPollThrowException(
Consumer<byte[], byte[]> consumer,
Class<? extends Exception> exceptedException
) throws InterruptedException {
TestUtils.waitForCondition(() -> {
try {
consumer.poll(Duration.ZERO);
return false;
} catch (Exception e) {
return exceptedException.isInstance(e);
}
}, "Continuous poll not fail");
}
/**
* This
|
ClientsTestUtils
|
java
|
apache__rocketmq
|
example/src/main/java/org/apache/rocketmq/example/simple/PullScheduleService.java
|
{
"start": 1353,
"end": 2961
}
|
class ____ {
public static void main(String[] args) throws MQClientException {
final MQPullConsumerScheduleService scheduleService = new MQPullConsumerScheduleService("GroupName1");
scheduleService.setMessageModel(MessageModel.CLUSTERING);
scheduleService.registerPullTaskCallback("TopicTest", new PullTaskCallback() {
@Override
public void doPullTask(MessageQueue mq, PullTaskContext context) {
MQPullConsumer consumer = context.getPullConsumer();
try {
long offset = consumer.fetchConsumeOffset(mq, false);
if (offset < 0)
offset = 0;
PullResult pullResult = consumer.pull(mq, "*", offset, 32);
System.out.printf("%s%n", offset + "\t" + mq + "\t" + pullResult);
switch (pullResult.getPullStatus()) {
case FOUND:
break;
case NO_MATCHED_MSG:
break;
case NO_NEW_MSG:
case OFFSET_ILLEGAL:
break;
default:
break;
}
consumer.updateConsumeOffset(mq, pullResult.getNextBeginOffset());
context.setPullNextDelayTimeMillis(100);
} catch (Exception e) {
e.printStackTrace();
}
}
});
scheduleService.start();
}
}
|
PullScheduleService
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/source/internal/hbm/EntityNamingSourceImpl.java
|
{
"start": 404,
"end": 1325
}
|
class ____ implements EntityNamingSource {
private final String entityName;
private final String className;
private final String jpaEntityName;
private final String typeName;
public EntityNamingSourceImpl(String entityName, String className, String jpaEntityName) {
this.entityName = entityName;
this.className = className;
this.jpaEntityName = jpaEntityName;
this.typeName = isNotEmpty( className ) ? className : entityName;
}
public EntityNamingSourceImpl(PersistentClass entityBinding) {
this( entityBinding.getEntityName(),
entityBinding.getClassName(),
entityBinding.getJpaEntityName() );
}
@Override
public String getEntityName() {
return entityName;
}
@Override
public String getClassName() {
return className;
}
@Override
public String getJpaEntityName() {
return jpaEntityName;
}
@Override
public String getTypeName() {
return typeName;
}
}
|
EntityNamingSourceImpl
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/PromqlBaseParser.java
|
{
"start": 30489,
"end": 32668
}
|
class ____ extends ParserRuleContext {
public FunctionContext function() {
return getRuleContext(FunctionContext.class,0);
}
public SelectorContext selector() {
return getRuleContext(SelectorContext.class,0);
}
public ConstantContext constant() {
return getRuleContext(ConstantContext.class,0);
}
@SuppressWarnings("this-escape")
public ValueContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_value; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof PromqlBaseParserListener ) ((PromqlBaseParserListener)listener).enterValue(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof PromqlBaseParserListener ) ((PromqlBaseParserListener)listener).exitValue(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof PromqlBaseParserVisitor ) return ((PromqlBaseParserVisitor<? extends T>)visitor).visitValue(this);
else return visitor.visitChildren(this);
}
}
public final ValueContext value() throws RecognitionException {
ValueContext _localctx = new ValueContext(_ctx, getState());
enterRule(_localctx, 6, RULE_value);
try {
setState(132);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
setState(129);
function();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
setState(130);
selector();
}
break;
case 3:
enterOuterAlt(_localctx, 3);
{
setState(131);
constant();
}
break;
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static
|
ValueContext
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnusedExceptionTest.java
|
{
"start": 7340,
"end": 7842
}
|
class ____ {",
" void test() {",
" try {",
" } catch (Exception e) {",
// Not refactored as MyException(int, Throwable) isn't visible.
" throw new MyException(1);",
" }",
" }",
"}")
.expectUnchanged()
.doTest();
}
@Test
public void interruptedException_noFinding() {
compilationHelper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
micronaut-projects__micronaut-core
|
core/src/main/java/io/micronaut/core/bind/ArgumentBinderRegistry.java
|
{
"start": 836,
"end": 2525
}
|
interface ____<S> {
/**
* Adds a request argument binder to the registry.
* @param binder The binder
* @param <T> The argument type
* @param <ST> The source type
* @since 2.0
* @deprecated replaced with {@link #addArgumentBinder(ArgumentBinder)}
*/
@Deprecated(since = "4", forRemoval = true)
default <T, ST> void addRequestArgumentBinder(ArgumentBinder<T, ST> binder) {
addArgumentBinder((ArgumentBinder) binder);
}
/**
* Adds a request argument binder to the registry.
* @param binder The binder
* @param <T> The argument type
* @since 4.0.0
*/
default <T> void addArgumentBinder(ArgumentBinder<T, S> binder) {
throw new UnsupportedOperationException("Binder registry is not mutable");
}
/**
* Locate an {@link ArgumentBinder} for the given argument and source type.
*
* @param argument The argument
* @param source The source
* @param <T> The argument type
* @return An {@link Optional} of {@link ArgumentBinder}
* @deprecated replaced with {@link #findArgumentBinder(Argument)}
*/
@Deprecated(since = "4", forRemoval = true)
default <T> Optional<ArgumentBinder<T, S>> findArgumentBinder(Argument<T> argument, S source) {
return findArgumentBinder(argument);
}
/**
* Locate an {@link ArgumentBinder} for the given argument.
*
* @param argument The argument
* @param <T> The argument type
* @return An {@link Optional} of {@link ArgumentBinder}
* @since 4.0.0
*/
<T> Optional<ArgumentBinder<T, S>> findArgumentBinder(Argument<T> argument);
}
|
ArgumentBinderRegistry
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/taskmanager/TaskManagerThreadDumpHeaders.java
|
{
"start": 1362,
"end": 2711
}
|
class ____
implements RuntimeMessageHeaders<
EmptyRequestBody, ThreadDumpInfo, TaskManagerMessageParameters> {
private static final TaskManagerThreadDumpHeaders INSTANCE = new TaskManagerThreadDumpHeaders();
private static final String URL =
String.format("/taskmanagers/:%s/thread-dump", TaskManagerIdPathParameter.KEY);
private TaskManagerThreadDumpHeaders() {}
@Override
public Class<EmptyRequestBody> getRequestClass() {
return EmptyRequestBody.class;
}
@Override
public TaskManagerMessageParameters getUnresolvedMessageParameters() {
return new TaskManagerMessageParameters();
}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.GET;
}
@Override
public String getTargetRestEndpointURL() {
return URL;
}
public static TaskManagerThreadDumpHeaders getInstance() {
return INSTANCE;
}
@Override
public Class<ThreadDumpInfo> getResponseClass() {
return ThreadDumpInfo.class;
}
@Override
public HttpResponseStatus getResponseStatusCode() {
return HttpResponseStatus.OK;
}
@Override
public String getDescription() {
return "Returns the thread dump of the requested TaskManager.";
}
}
|
TaskManagerThreadDumpHeaders
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/builditem/DockerStatusBuildItem.java
|
{
"start": 542,
"end": 1225
}
|
interface ____ check if the Docker runtime is working
*/
public DockerStatusBuildItem(IsDockerWorking isDockerWorking) {
super(isDockerWorking);
}
/**
* Checks if the Docker runtime is available.
* <p>
* This method is deprecated and will be removed in a future release.
* Use {@link #isContainerRuntimeAvailable()} instead.
* </p>
*
* @return {@code true} if the Docker runtime is available, {@code false} otherwise
* @deprecated Use {@link #isContainerRuntimeAvailable()} instead
*/
@Deprecated(forRemoval = true)
public boolean isDockerAvailable() {
return isContainerRuntimeAvailable();
}
}
|
to
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/DisabledProcessorTest.java
|
{
"start": 1014,
"end": 2080
}
|
class ____ extends ContextTestSupport {
@Test
public void testDisabled() throws Exception {
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:bar").expectedMessageCount(1);
getMockEndpoint("mock:baz").expectedMessageCount(0);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
// the EIPs are disabled but there are still 4 outputs
Assertions.assertEquals(4, context.getRouteDefinitions().get(0).getOutputs().size());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.to("mock:foo").disabled()
.to("mock:bar").disabled(false)
.to("mock:baz").disabled(true)
.to("mock:result");
}
};
}
}
|
DisabledProcessorTest
|
java
|
apache__camel
|
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
|
{
"start": 300056,
"end": 306453
}
|
class ____ extends YamlDeserializerBase<DeleteDefinition> {
public DeleteDefinitionDeserializer() {
super(DeleteDefinition.class);
}
@Override
protected DeleteDefinition newInstance() {
return new DeleteDefinition();
}
@Override
protected boolean setProperty(DeleteDefinition target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "apiDocs": {
String val = asText(node);
target.setApiDocs(val);
break;
}
case "bindingMode": {
String val = asText(node);
target.setBindingMode(val);
break;
}
case "clientRequestValidation": {
String val = asText(node);
target.setClientRequestValidation(val);
break;
}
case "clientResponseValidation": {
String val = asText(node);
target.setClientResponseValidation(val);
break;
}
case "consumes": {
String val = asText(node);
target.setConsumes(val);
break;
}
case "deprecated": {
String val = asText(node);
target.setDeprecated(val);
break;
}
case "disabled": {
String val = asText(node);
target.setDisabled(val);
break;
}
case "enableCORS": {
String val = asText(node);
target.setEnableCORS(val);
break;
}
case "enableNoContentResponse": {
String val = asText(node);
target.setEnableNoContentResponse(val);
break;
}
case "outType": {
String val = asText(node);
target.setOutType(val);
break;
}
case "param": {
java.util.List<org.apache.camel.model.rest.ParamDefinition> val = asFlatList(node, org.apache.camel.model.rest.ParamDefinition.class);
target.setParams(val);
break;
}
case "path": {
String val = asText(node);
target.setPath(val);
break;
}
case "produces": {
String val = asText(node);
target.setProduces(val);
break;
}
case "responseMessage": {
java.util.List<org.apache.camel.model.rest.ResponseMessageDefinition> val = asFlatList(node, org.apache.camel.model.rest.ResponseMessageDefinition.class);
target.setResponseMsgs(val);
break;
}
case "routeId": {
String val = asText(node);
target.setRouteId(val);
break;
}
case "security": {
java.util.List<org.apache.camel.model.rest.SecurityDefinition> val = asFlatList(node, org.apache.camel.model.rest.SecurityDefinition.class);
target.setSecurity(val);
break;
}
case "skipBindingOnErrorCode": {
String val = asText(node);
target.setSkipBindingOnErrorCode(val);
break;
}
case "streamCache": {
String val = asText(node);
target.setStreamCache(val);
break;
}
case "to": {
org.apache.camel.model.ToDefinition val = asType(node, org.apache.camel.model.ToDefinition.class);
target.setTo(val);
break;
}
case "type": {
String val = asText(node);
target.setType(val);
break;
}
case "id": {
String val = asText(node);
target.setId(val);
break;
}
case "description": {
String val = asText(node);
target.setDescription(val);
break;
}
case "note": {
String val = asText(node);
target.setNote(val);
break;
}
default: {
return false;
}
}
return true;
}
}
@YamlType(
nodes = "dfdl",
inline = true,
types = org.apache.camel.model.dataformat.DfdlDataFormat.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "DFDL",
description = "Transforms fixed format data such as EDI message from/to XML using a Data Format Description Language (DFDL).",
deprecated = false,
properties = {
@YamlProperty(name = "id", type = "string", description = "The id of this node", displayName = "Id"),
@YamlProperty(name = "rootElement", type = "string", description = "The root element name of the schema to use. If not specified, the first root element in the schema will be used.", displayName = "Root Element"),
@YamlProperty(name = "rootNamespace", type = "string", description = "The root namespace of the schema to use.", displayName = "Root Namespace"),
@YamlProperty(name = "schemaUri", type = "string", required = true, description = "The path to the DFDL schema file.", displayName = "Schema Uri")
}
)
public static
|
DeleteDefinitionDeserializer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java
|
{
"start": 9659,
"end": 10785
}
|
class ____ implements MutableLicenseService {
@Override
public void registerLicense(PutLicenseRequest request, ActionListener<PutLicenseResponse> listener) {}
@Override
public void removeLicense(
TimeValue masterNodeTimeout,
TimeValue ackTimeout,
ActionListener<? extends AcknowledgedResponse> listener
) {}
@Override
public void startBasicLicense(PostStartBasicRequest request, ActionListener<PostStartBasicResponse> listener) {}
@Override
public void startTrialLicense(PostStartTrialRequest request, ActionListener<PostStartTrialResponse> listener) {}
@Override
public License getLicense() {
return license;
}
@Override
public Lifecycle.State lifecycleState() {
return null;
}
@Override
public void addLifecycleListener(LifecycleListener listener) {}
@Override
public void start() {}
@Override
public void stop() {}
@Override
public void close() {}
}
}
|
TestLicenseService
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/ComposedMessageProcessorTest.java
|
{
"start": 5874,
"end": 6445
}
|
class ____ implements AggregationStrategy {
@Override
public Exchange aggregate(Exchange oldExchange, Exchange newExchange) {
if (oldExchange == null) {
return newExchange;
}
List<OrderItem> order = new ArrayList<>(2);
order.add(oldExchange.getIn().getBody(OrderItem.class));
order.add(newExchange.getIn().getBody(OrderItem.class));
oldExchange.getIn().setBody(order);
return oldExchange;
}
}
// END SNIPPET: e7
}
|
MyOrderAggregationStrategy
|
java
|
netty__netty
|
codec-http2/src/main/java/io/netty/handler/codec/http2/Http2FrameCodec.java
|
{
"start": 35770,
"end": 36391
}
|
class ____ implements Http2RemoteFlowController.Listener {
@Override
public void writabilityChanged(Http2Stream stream) {
DefaultHttp2FrameStream frameStream = stream.getProperty(streamKey);
if (frameStream == null) {
return;
}
onHttp2StreamWritabilityChanged(
ctx, frameStream, connection().remote().flowController().isWritable(stream));
}
}
/**
* {@link Http2FrameStream} implementation.
*/
// TODO(buchgr): Merge Http2FrameStream and Http2Stream.
static
|
Http2RemoteFlowControllerListener
|
java
|
apache__camel
|
dsl/camel-kamelet-main/src/main/java/org/apache/camel/main/download/TypeConverterLoaderDownloadListener.java
|
{
"start": 1386,
"end": 4271
}
|
class ____ implements ArtifactDownloadListener, CamelContextAware {
private static final Logger LOG = LoggerFactory.getLogger(TypeConverterLoaderDownloadListener.class);
private CamelContext camelContext;
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public void onDownloadedFile(File file) {
try {
loadTypeConverters(file);
} catch (Exception e) {
// ignore
}
}
protected void loadTypeConverters(File file) throws Exception {
// use isolated classloader to load the service file as we only want to check this file
// (and not what is already in the existing classloader)
DependencyDownloaderClassLoader cl = new DependencyDownloaderClassLoader(null);
cl.addFile(file);
// load names for custom type converters from the downloaded JAR
Collection<String> loaders = new ArrayList<>();
findTypeConverterLoaderClasses(loaders,
cl.getResourceAsStream(BaseTypeConverterRegistry.META_INF_SERVICES_TYPE_CONVERTER_LOADER));
findTypeConverterLoaderClasses(loaders,
cl.getResourceAsStream(BaseTypeConverterRegistry.META_INF_SERVICES_FALLBACK_TYPE_CONVERTER));
loadTypeConverters(loaders);
}
protected void findTypeConverterLoaderClasses(Collection<String> loaders, InputStream is) throws IOException {
if (is != null) {
BufferedReader reader = IOHelper.buffered(new InputStreamReader(is, StandardCharsets.UTF_8));
String line;
do {
line = reader.readLine();
if (line != null && !line.startsWith("#") && !line.isEmpty()) {
loaders.add(line);
}
} while (line != null);
IOHelper.close(reader);
}
}
protected void loadTypeConverters(Collection<String> loaders) throws ClassNotFoundException {
for (String name : loaders) {
LOG.debug("Resolving TypeConverterLoader: {}", name);
Class<?> clazz = getCamelContext().getClassResolver().resolveMandatoryClass(name);
Object obj = getCamelContext().getInjector().newInstance(clazz, false);
CamelContextAware.trySetCamelContext(obj, getCamelContext());
if (obj instanceof TypeConverterLoader) {
TypeConverterLoader loader = (TypeConverterLoader) obj;
CamelContextAware.trySetCamelContext(loader, getCamelContext());
LOG.debug("TypeConverterLoader: {} loading converters", name);
loader.load(getCamelContext().getTypeConverterRegistry());
}
}
}
}
|
TypeConverterLoaderDownloadListener
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java
|
{
"start": 1590,
"end": 4620
}
|
class ____ implements ReservedProjectStateHandler<List<PutRoleMappingRequest>> {
public static final String NAME = "role_mappings";
@Override
public String name() {
return NAME;
}
@Override
public TransformState transform(ProjectId projectId, List<PutRoleMappingRequest> source, TransformState prevState) throws Exception {
Set<ExpressionRoleMapping> roleMappings = validateAndTranslate(source);
ProjectMetadata projectMetadata = prevState.state().getMetadata().getProject(projectId);
RoleMappingMetadata newRoleMappingMetadata = new RoleMappingMetadata(roleMappings);
if (newRoleMappingMetadata.equals(RoleMappingMetadata.getFromProject(projectMetadata))) {
return prevState;
} else {
ProjectMetadata newProjectMetadata = newRoleMappingMetadata.updateProject(projectMetadata);
Set<String> entities = newRoleMappingMetadata.getRoleMappings()
.stream()
.map(ExpressionRoleMapping::getName)
.collect(Collectors.toSet());
return new TransformState(ClusterState.builder(prevState.state()).putProjectMetadata(newProjectMetadata).build(), entities);
}
}
@Override
public ClusterState remove(ProjectId projectId, TransformState prevState) throws Exception {
return transform(projectId, List.of(), prevState).state();
}
@Override
public List<PutRoleMappingRequest> fromXContent(XContentParser parser) throws IOException {
List<PutRoleMappingRequest> result = new ArrayList<>();
Map<String, ?> source = parser.map();
for (String name : source.keySet()) {
@SuppressWarnings("unchecked")
Map<String, ?> content = (Map<String, ?>) source.get(name);
try (XContentParser mappingParser = mapToXContentParser(XContentParserConfiguration.EMPTY, content)) {
result.add(new PutRoleMappingRequestBuilder(null).source(name, mappingParser).request());
}
}
return result;
}
private Set<ExpressionRoleMapping> validateAndTranslate(List<PutRoleMappingRequest> roleMappings) {
var exceptions = new ArrayList<Exception>();
for (var roleMapping : roleMappings) {
// File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX
var exception = roleMapping.validate(false);
if (exception != null) {
exceptions.add(exception);
}
}
if (exceptions.isEmpty() == false) {
var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests");
exceptions.forEach(illegalArgumentException::addSuppressed);
throw illegalArgumentException;
}
return roleMappings.stream()
.map(r -> RoleMappingMetadata.copyWithNameInMetadata(r.getMapping()))
.collect(Collectors.toUnmodifiableSet());
}
}
|
ReservedRoleMappingAction
|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-netty4/src/main/java/org/apache/dubbo/remoting/transport/netty4/logging/FormattingTuple.java
|
{
"start": 990,
"end": 2174
}
|
class ____ {
static final FormattingTuple NULL = new FormattingTuple(null);
private final String message;
private final Throwable throwable;
private final Object[] argArray;
FormattingTuple(String message) {
this(message, null, null);
}
FormattingTuple(String message, Object[] argArray, Throwable throwable) {
this.message = message;
this.throwable = throwable;
if (throwable == null) {
this.argArray = argArray;
} else {
this.argArray = trimmedCopy(argArray);
}
}
static Object[] trimmedCopy(Object[] argArray) {
if (ArrayUtils.isEmpty(argArray)) {
throw new IllegalStateException("non-sensical empty or null argument array");
}
final int trimmedLen = argArray.length - 1;
Object[] trimmed = new Object[trimmedLen];
System.arraycopy(argArray, 0, trimmed, 0, trimmedLen);
return trimmed;
}
public String getMessage() {
return message;
}
public Object[] getArgArray() {
return argArray;
}
public Throwable getThrowable() {
return throwable;
}
}
|
FormattingTuple
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/dialect/LimitLimitHandlerTest.java
|
{
"start": 296,
"end": 465
}
|
class ____ extends AbstractLimitHandlerTest {
@Override
protected AbstractLimitHandler getLimitHandler() {
return LimitLimitHandler.INSTANCE;
}
}
|
LimitLimitHandlerTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/InconsistentCapitalizationTest.java
|
{
"start": 9483,
"end": 10044
}
|
class ____ extends A {
Nested(Object aa) {
super.aa = aa;
if (super.aa == aa) {}
super.aa = aa;
}
}
}
}
""")
.doTest();
}
@Test
public void
correctsInconsistentVariableNameToFieldCaseInAnonymousClassAndQualifiesNestedChildClassField() {
refactoringHelper
.addInputLines(
"in/Test.java",
"""
import java.util.function.Function;
|
Nested
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/json/JsonContent.java
|
{
"start": 1088,
"end": 3336
}
|
class ____ implements AssertProvider<JsonContentAssert> {
private final String json;
private final @Nullable JsonConverterDelegate converterDelegate;
/**
* Create a new {@code JsonContent} instance with the message converter to
* use to deserialize content.
* @param json the actual JSON content
* @param converterDelegate the content converter to use
*/
public JsonContent(String json, @Nullable JsonConverterDelegate converterDelegate) {
Assert.notNull(json, "JSON must not be null");
this.json = json;
this.converterDelegate = converterDelegate;
}
/**
* Create a new {@code JsonContent} instance with the message converter to
* use to deserialize content.
* @param json the actual JSON content
* @param converter the content converter to use
* @deprecated in favour of {@link #JsonContent(String, JsonConverterDelegate)}
*/
@SuppressWarnings("removal")
@Deprecated(since = "7.0", forRemoval = true)
public JsonContent(String json, @Nullable HttpMessageContentConverter converter) {
this(json, (JsonConverterDelegate) converter);
}
/**
* Create a new {@code JsonContent} instance.
* @param json the actual JSON content
*/
public JsonContent(String json) {
this(json, (JsonConverterDelegate) null);
}
/**
* Use AssertJ's {@link org.assertj.core.api.Assertions#assertThat assertThat}
* instead.
*/
@Override
public JsonContentAssert assertThat() {
return new JsonContentAssert(this);
}
/**
* Return the actual JSON content string.
*/
public String getJson() {
return this.json;
}
/**
* Return the {@link JsonConverterDelegate} to use to decode JSON content.
* @since 7.0
*/
public @Nullable JsonConverterDelegate getJsonConverterDelegate() {
return this.converterDelegate;
}
/**
* Return the {@link HttpMessageContentConverter} to use to deserialize content.
* @deprecated in favour of {@link #getJsonConverterDelegate()}
*/
@SuppressWarnings("removal")
@Deprecated(since = "7.0", forRemoval = true)
@Nullable HttpMessageContentConverter getContentConverter() {
return (this.converterDelegate instanceof HttpMessageContentConverter cc ? cc : null);
}
@Override
public String toString() {
return "JsonContent " + this.json;
}
}
|
JsonContent
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/resilience/retry/AbstractRetryInterceptor.java
|
{
"start": 4339,
"end": 4437
}
|
class ____ avoid a hard dependency on Reactive Streams and Reactor at runtime.
*/
private static
|
to
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/simp/stomp/StompClientSupport.java
|
{
"start": 1672,
"end": 6828
}
|
class ____ {
private MessageConverter messageConverter = new SimpleMessageConverter();
private @Nullable TaskScheduler taskScheduler;
private long[] defaultHeartbeat = new long[] {10000, 10000};
private long receiptTimeLimit = TimeUnit.SECONDS.toMillis(15);
/**
* Set the {@link MessageConverter} to use to convert the payload of incoming
* and outgoing messages to and from {@code byte[]} based on object type
* and the "content-type" header.
* <p>By default, {@link SimpleMessageConverter} is configured.
* @param messageConverter the message converter to use
*/
public void setMessageConverter(MessageConverter messageConverter) {
Assert.notNull(messageConverter, "MessageConverter must not be null");
this.messageConverter = messageConverter;
}
/**
* Return the configured {@link MessageConverter}.
*/
public MessageConverter getMessageConverter() {
return this.messageConverter;
}
/**
* Configure a scheduler to use for heartbeats and for receipt tracking.
* <p><strong>Note:</strong> Some transports have built-in support to work
* with heartbeats and therefore do not require a TaskScheduler.
* Receipts however, if needed, do require a TaskScheduler to be configured.
* <p>By default, this is not set.
*/
public void setTaskScheduler(@Nullable TaskScheduler taskScheduler) {
this.taskScheduler = taskScheduler;
}
/**
* The configured TaskScheduler.
*/
public @Nullable TaskScheduler getTaskScheduler() {
return this.taskScheduler;
}
/**
* Configure the default value for the "heart-beat" header of the STOMP
* CONNECT frame. The first number represents how often the client will write
* or send a heart-beat. The second is how often the server should write.
* A value of 0 means no heart-beats.
* <p>By default this is set to "10000,10000" but subclasses may override
* that default and for example set it to "0,0" if they require a
* TaskScheduler to be configured first.
* <p><strong>Note:</strong> that a heartbeat is sent only in case of
* inactivity, i.e. when no other messages are sent. This can present a
* challenge when using an external broker since messages with a non-broker
* destination represent activity but aren't actually forwarded to the broker.
* In that case you can configure a `TaskScheduler` through the
* {@link org.springframework.messaging.simp.config.StompBrokerRelayRegistration}
* which ensures a heartbeat is forwarded to the broker also when only
* messages with a non-broker destination are sent.
* @param heartbeat the value for the CONNECT "heart-beat" header
* @see <a href="https://stomp.github.io/stomp-specification-1.2.html#Heart-beating">
* https://stomp.github.io/stomp-specification-1.2.html#Heart-beating</a>
*/
public void setDefaultHeartbeat(long[] heartbeat) {
if (heartbeat.length != 2 || heartbeat[0] < 0 || heartbeat[1] < 0) {
throw new IllegalArgumentException("Invalid heart-beat: " + Arrays.toString(heartbeat));
}
this.defaultHeartbeat = heartbeat;
}
/**
* Return the configured default heart-beat value (never {@code null}).
*/
public long[] getDefaultHeartbeat() {
return this.defaultHeartbeat;
}
/**
* Determine whether heartbeats are enabled.
* <p>Returns {@code false} if {@link #setDefaultHeartbeat defaultHeartbeat}
* is set to "0,0", and {@code true} otherwise.
*/
public boolean isDefaultHeartbeatEnabled() {
long[] heartbeat = getDefaultHeartbeat();
return (heartbeat[0] != 0 && heartbeat[1] != 0);
}
/**
* Configure the number of milliseconds before a receipt is considered expired.
* <p>By default set to 15,000 (15 seconds).
*/
public void setReceiptTimeLimit(long receiptTimeLimit) {
Assert.isTrue(receiptTimeLimit > 0, "Receipt time limit must be larger than zero");
this.receiptTimeLimit = receiptTimeLimit;
}
/**
* Return the configured receipt time limit.
*/
public long getReceiptTimeLimit() {
return this.receiptTimeLimit;
}
/**
* Factory method for create and configure a new session.
* @param connectHeaders headers for the STOMP CONNECT frame
* @param handler the handler for the STOMP session
* @return the created session
*/
protected ConnectionHandlingStompSession createSession(
@Nullable StompHeaders connectHeaders, StompSessionHandler handler) {
connectHeaders = processConnectHeaders(connectHeaders);
DefaultStompSession session = new DefaultStompSession(handler, connectHeaders);
session.setMessageConverter(getMessageConverter());
session.setTaskScheduler(getTaskScheduler());
session.setReceiptTimeLimit(getReceiptTimeLimit());
return session;
}
/**
* Further initialize the StompHeaders, for example setting the heart-beat
* header if necessary.
* @param connectHeaders the headers to modify
* @return the modified headers
*/
protected StompHeaders processConnectHeaders(@Nullable StompHeaders connectHeaders) {
connectHeaders = (connectHeaders != null ? connectHeaders : new StompHeaders());
if (connectHeaders.getHeartbeat() == null) {
connectHeaders.setHeartbeat(getDefaultHeartbeat());
}
return connectHeaders;
}
}
|
StompClientSupport
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/http/ReadOnlyHttpHeaders.java
|
{
"start": 1693,
"end": 9471
}
|
class ____ extends HttpHeaders {
private final CharSequence[] nameValuePairs;
/**
* Create a new instance.
* @param validateHeaders {@code true} to validate the contents of each header name.
* @param nameValuePairs An array of the structure {@code [<name,value>,<name,value>,...]}.
* A copy will <strong>NOT</strong> be made of this array. If the contents of this array
* may be modified externally you are responsible for passing in a copy.
*/
public ReadOnlyHttpHeaders(boolean validateHeaders, CharSequence... nameValuePairs) {
if ((nameValuePairs.length & 1) != 0) {
throw newInvalidArraySizeException();
}
if (validateHeaders) {
validateHeaders(nameValuePairs);
}
this.nameValuePairs = nameValuePairs;
}
private static IllegalArgumentException newInvalidArraySizeException() {
return new IllegalArgumentException("nameValuePairs must be arrays of [name, value] pairs");
}
private static void validateHeaders(CharSequence... keyValuePairs) {
for (int i = 0; i < keyValuePairs.length; i += 2) {
DefaultHttpHeadersFactory.headersFactory().getNameValidator().validateName(keyValuePairs[i]);
}
}
private CharSequence get0(CharSequence name) {
final int nameHash = AsciiString.hashCode(name);
for (int i = 0; i < nameValuePairs.length; i += 2) {
CharSequence roName = nameValuePairs[i];
if (AsciiString.hashCode(roName) == nameHash && contentEqualsIgnoreCase(roName, name)) {
// Suppress a warning out of bounds access since the constructor allows only pairs
return nameValuePairs[i + 1];
}
}
return null;
}
@Override
public String get(String name) {
CharSequence value = get0(name);
return value == null ? null : value.toString();
}
@Override
public Integer getInt(CharSequence name) {
CharSequence value = get0(name);
return value == null ? null : INSTANCE.convertToInt(value);
}
@Override
public int getInt(CharSequence name, int defaultValue) {
CharSequence value = get0(name);
return value == null ? defaultValue : INSTANCE.convertToInt(value);
}
@Override
public Short getShort(CharSequence name) {
CharSequence value = get0(name);
return value == null ? null : INSTANCE.convertToShort(value);
}
@Override
public short getShort(CharSequence name, short defaultValue) {
CharSequence value = get0(name);
return value == null ? defaultValue : INSTANCE.convertToShort(value);
}
@Override
public Long getTimeMillis(CharSequence name) {
CharSequence value = get0(name);
return value == null ? null : INSTANCE.convertToTimeMillis(value);
}
@Override
public long getTimeMillis(CharSequence name, long defaultValue) {
CharSequence value = get0(name);
return value == null ? defaultValue : INSTANCE.convertToTimeMillis(value);
}
@Override
public List<String> getAll(String name) {
if (isEmpty()) {
return Collections.emptyList();
}
final int nameHash = AsciiString.hashCode(name);
List<String> values = new ArrayList<String>(4);
for (int i = 0; i < nameValuePairs.length; i += 2) {
CharSequence roName = nameValuePairs[i];
if (AsciiString.hashCode(roName) == nameHash && contentEqualsIgnoreCase(roName, name)) {
values.add(nameValuePairs[i + 1].toString());
}
}
return values;
}
@Override
public List<Map.Entry<String, String>> entries() {
if (isEmpty()) {
return Collections.emptyList();
}
List<Map.Entry<String, String>> entries = new ArrayList<Map.Entry<String, String>>(size());
for (int i = 0; i < nameValuePairs.length; i += 2) {
entries.add(new SimpleImmutableEntry<String, String>(nameValuePairs[i].toString(),
nameValuePairs[i + 1].toString())); // [java/index-out-of-bounds]
}
return entries;
}
@Override
public boolean contains(String name) {
return get0(name) != null;
}
@Override
public boolean contains(String name, String value, boolean ignoreCase) {
return containsValue(name, value, ignoreCase);
}
@Override
public boolean containsValue(CharSequence name, CharSequence value, boolean ignoreCase) {
if (ignoreCase) {
for (int i = 0; i < nameValuePairs.length; i += 2) {
if (contentEqualsIgnoreCase(nameValuePairs[i], name) &&
contentEqualsIgnoreCase(nameValuePairs[i + 1], value)) {
return true;
}
}
} else {
for (int i = 0; i < nameValuePairs.length; i += 2) {
if (contentEqualsIgnoreCase(nameValuePairs[i], name) &&
contentEquals(nameValuePairs[i + 1], value)) {
return true;
}
}
}
return false;
}
@Override
public Iterator<String> valueStringIterator(CharSequence name) {
return new ReadOnlyStringValueIterator(name);
}
@Override
public Iterator<CharSequence> valueCharSequenceIterator(CharSequence name) {
return new ReadOnlyValueIterator(name);
}
@Override
public Iterator<Map.Entry<String, String>> iterator() {
return new ReadOnlyStringIterator();
}
@Override
public Iterator<Map.Entry<CharSequence, CharSequence>> iteratorCharSequence() {
return new ReadOnlyIterator();
}
@Override
public boolean isEmpty() {
return nameValuePairs.length == 0;
}
@Override
public int size() {
return nameValuePairs.length >>> 1;
}
@Override
public Set<String> names() {
if (isEmpty()) {
return Collections.emptySet();
}
Set<String> names = new LinkedHashSet<String>(size());
for (int i = 0; i < nameValuePairs.length; i += 2) {
names.add(nameValuePairs[i].toString());
}
return names;
}
@Override
public HttpHeaders add(String name, Object value) {
throw new UnsupportedOperationException("read only");
}
@Override
public HttpHeaders add(String name, Iterable<?> values) {
throw new UnsupportedOperationException("read only");
}
@Override
public HttpHeaders addInt(CharSequence name, int value) {
throw new UnsupportedOperationException("read only");
}
@Override
public HttpHeaders addShort(CharSequence name, short value) {
throw new UnsupportedOperationException("read only");
}
@Override
public HttpHeaders set(String name, Object value) {
throw new UnsupportedOperationException("read only");
}
@Override
public HttpHeaders set(String name, Iterable<?> values) {
throw new UnsupportedOperationException("read only");
}
@Override
public HttpHeaders setInt(CharSequence name, int value) {
throw new UnsupportedOperationException("read only");
}
@Override
public HttpHeaders setShort(CharSequence name, short value) {
throw new UnsupportedOperationException("read only");
}
@Override
public HttpHeaders remove(String name) {
throw new UnsupportedOperationException("read only");
}
@Override
public HttpHeaders clear() {
throw new UnsupportedOperationException("read only");
}
private final
|
ReadOnlyHttpHeaders
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ConditionalPipe.java
|
{
"start": 850,
"end": 1976
}
|
class ____ extends MultiPipe {
private final ConditionalOperation operation;
public ConditionalPipe(Source source, Expression expression, List<Pipe> children, ConditionalOperation operation) {
super(source, expression, children);
this.operation = operation;
}
@Override
protected NodeInfo<ConditionalPipe> info() {
return NodeInfo.create(this, ConditionalPipe::new, expression(), children(), operation);
}
@Override
public Pipe replaceChildren(List<Pipe> newChildren) {
return new ConditionalPipe(source(), expression(), newChildren, operation);
}
@Override
public Processor asProcessor(List<Processor> procs) {
return new ConditionalProcessor(procs, operation);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), operation);
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj)) {
ConditionalPipe other = (ConditionalPipe) obj;
return Objects.equals(operation, other.operation);
}
return false;
}
}
|
ConditionalPipe
|
java
|
quarkusio__quarkus
|
integration-tests/gradle/src/main/resources/conditional-dependencies/ext-o/deployment/src/main/java/org/acme/quarkus/ext/o/deployment/AcmeQuarkusExtProcessor.java
|
{
"start": 155,
"end": 355
}
|
class ____ {
private static final String FEATURE = "acme-quarkus-ext-o";
@BuildStep
FeatureBuildItem feature() {
return new FeatureBuildItem(FEATURE);
}
}
|
AcmeQuarkusExtProcessor
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/jdk8/ObservableStageSubscriberOrDefaultTest.java
|
{
"start": 987,
"end": 14052
}
|
class ____ extends RxJavaTest {
@Test
public void firstJust() throws Exception {
Integer v = Observable.just(1)
.firstStage(null)
.toCompletableFuture()
.get();
assertEquals((Integer)1, v);
}
@Test
public void firstEmpty() throws Exception {
Integer v = Observable.<Integer>empty()
.firstStage(2)
.toCompletableFuture()
.get();
assertEquals((Integer)2, v);
}
@Test
public void firstCancels() throws Exception {
BehaviorSubject<Integer> source = BehaviorSubject.createDefault(1);
Integer v = source
.firstStage(null)
.toCompletableFuture()
.get();
assertEquals((Integer)1, v);
assertFalse(source.hasObservers());
}
@Test
public void firstCompletableFutureCancels() throws Exception {
PublishSubject<Integer> source = PublishSubject.create();
CompletableFuture<Integer> cf = source
.firstStage(null)
.toCompletableFuture();
assertTrue(source.hasObservers());
cf.cancel(true);
assertTrue(cf.isCancelled());
assertFalse(source.hasObservers());
}
@Test
public void firstCompletableManualCompleteCancels() throws Exception {
PublishSubject<Integer> source = PublishSubject.create();
CompletableFuture<Integer> cf = source
.firstStage(null)
.toCompletableFuture();
assertTrue(source.hasObservers());
cf.complete(1);
assertTrue(cf.isDone());
assertFalse(cf.isCompletedExceptionally());
assertFalse(cf.isCancelled());
assertFalse(source.hasObservers());
assertEquals((Integer)1, cf.get());
}
@Test
public void firstCompletableManualCompleteExceptionallyCancels() throws Exception {
PublishSubject<Integer> source = PublishSubject.create();
CompletableFuture<Integer> cf = source
.firstStage(null)
.toCompletableFuture();
assertTrue(source.hasObservers());
cf.completeExceptionally(new TestException());
assertTrue(cf.isDone());
assertTrue(cf.isCompletedExceptionally());
assertFalse(cf.isCancelled());
assertFalse(source.hasObservers());
TestHelper.assertError(cf, TestException.class);
}
@Test
public void firstError() throws Exception {
CompletableFuture<Integer> cf = Observable.<Integer>error(new TestException())
.firstStage(null)
.toCompletableFuture();
assertTrue(cf.isDone());
assertTrue(cf.isCompletedExceptionally());
assertFalse(cf.isCancelled());
TestHelper.assertError(cf, TestException.class);
}
@Test
public void firstSourceIgnoresCancel() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Integer v = new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
observer.onError(new TestException());
observer.onComplete();
}
}
.firstStage(null)
.toCompletableFuture()
.get();
assertEquals((Integer)1, v);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
});
}
@Test
public void firstDoubleOnSubscribe() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Integer v = new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
}
}
.firstStage(null)
.toCompletableFuture()
.get();
assertEquals((Integer)1, v);
TestHelper.assertError(errors, 0, ProtocolViolationException.class);
});
}
@Test
public void singleJust() throws Exception {
Integer v = Observable.just(1)
.singleStage(null)
.toCompletableFuture()
.get();
assertEquals((Integer)1, v);
}
@Test
public void singleEmpty() throws Exception {
Integer v = Observable.<Integer>empty()
.singleStage(2)
.toCompletableFuture()
.get();
assertEquals((Integer)2, v);
}
@Test
public void singleTooManyCancels() throws Exception {
ReplaySubject<Integer> source = ReplaySubject.create();
source.onNext(1);
source.onNext(2);
TestHelper.assertError(source
.singleStage(null)
.toCompletableFuture(), IllegalArgumentException.class);
assertFalse(source.hasObservers());
}
@Test
public void singleCompletableFutureCancels() throws Exception {
PublishSubject<Integer> source = PublishSubject.create();
CompletableFuture<Integer> cf = source
.singleStage(null)
.toCompletableFuture();
assertTrue(source.hasObservers());
cf.cancel(true);
assertTrue(cf.isCancelled());
assertFalse(source.hasObservers());
}
@Test
public void singleCompletableManualCompleteCancels() throws Exception {
PublishSubject<Integer> source = PublishSubject.create();
CompletableFuture<Integer> cf = source
.singleStage(null)
.toCompletableFuture();
assertTrue(source.hasObservers());
cf.complete(1);
assertTrue(cf.isDone());
assertFalse(cf.isCompletedExceptionally());
assertFalse(cf.isCancelled());
assertFalse(source.hasObservers());
assertEquals((Integer)1, cf.get());
}
@Test
public void singleCompletableManualCompleteExceptionallyCancels() throws Exception {
PublishSubject<Integer> source = PublishSubject.create();
CompletableFuture<Integer> cf = source
.singleStage(null)
.toCompletableFuture();
assertTrue(source.hasObservers());
cf.completeExceptionally(new TestException());
assertTrue(cf.isDone());
assertTrue(cf.isCompletedExceptionally());
assertFalse(cf.isCancelled());
assertFalse(source.hasObservers());
TestHelper.assertError(cf, TestException.class);
}
@Test
public void singleError() throws Exception {
CompletableFuture<Integer> cf = Observable.<Integer>error(new TestException())
.singleStage(null)
.toCompletableFuture();
assertTrue(cf.isDone());
assertTrue(cf.isCompletedExceptionally());
assertFalse(cf.isCancelled());
TestHelper.assertError(cf, TestException.class);
}
@Test
public void singleSourceIgnoresCancel() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Integer v = new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
observer.onComplete();
observer.onError(new TestException());
observer.onComplete();
}
}
.singleStage(null)
.toCompletableFuture()
.get();
assertEquals((Integer)1, v);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
});
}
@Test
public void singleDoubleOnSubscribe() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Integer v = new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
observer.onComplete();
}
}
.singleStage(null)
.toCompletableFuture()
.get();
assertEquals((Integer)1, v);
TestHelper.assertError(errors, 0, ProtocolViolationException.class);
});
}
@Test
public void lastJust() throws Exception {
Integer v = Observable.just(1)
.lastStage(null)
.toCompletableFuture()
.get();
assertEquals((Integer)1, v);
}
@Test
public void lastRange() throws Exception {
Integer v = Observable.range(1, 5)
.lastStage(null)
.toCompletableFuture()
.get();
assertEquals((Integer)5, v);
}
@Test
public void lastEmpty() throws Exception {
Integer v = Observable.<Integer>empty()
.lastStage(2)
.toCompletableFuture()
.get();
assertEquals((Integer)2, v);
}
@Test
public void lastCompletableFutureCancels() throws Exception {
PublishSubject<Integer> source = PublishSubject.create();
CompletableFuture<Integer> cf = source
.lastStage(null)
.toCompletableFuture();
assertTrue(source.hasObservers());
cf.cancel(true);
assertTrue(cf.isCancelled());
assertFalse(source.hasObservers());
}
@Test
public void lastCompletableManualCompleteCancels() throws Exception {
PublishSubject<Integer> source = PublishSubject.create();
CompletableFuture<Integer> cf = source
.lastStage(null)
.toCompletableFuture();
assertTrue(source.hasObservers());
cf.complete(1);
assertTrue(cf.isDone());
assertFalse(cf.isCompletedExceptionally());
assertFalse(cf.isCancelled());
assertFalse(source.hasObservers());
assertEquals((Integer)1, cf.get());
}
@Test
public void lastCompletableManualCompleteExceptionallyCancels() throws Exception {
PublishSubject<Integer> source = PublishSubject.create();
CompletableFuture<Integer> cf = source
.lastStage(null)
.toCompletableFuture();
assertTrue(source.hasObservers());
cf.completeExceptionally(new TestException());
assertTrue(cf.isDone());
assertTrue(cf.isCompletedExceptionally());
assertFalse(cf.isCancelled());
assertFalse(source.hasObservers());
TestHelper.assertError(cf, TestException.class);
}
@Test
public void lastError() throws Exception {
CompletableFuture<Integer> cf = Observable.<Integer>error(new TestException())
.lastStage(null)
.toCompletableFuture();
assertTrue(cf.isDone());
assertTrue(cf.isCompletedExceptionally());
assertFalse(cf.isCancelled());
TestHelper.assertError(cf, TestException.class);
}
@Test
public void lastSourceIgnoresCancel() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Integer v = new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
observer.onComplete();
observer.onError(new TestException());
observer.onComplete();
}
}
.lastStage(null)
.toCompletableFuture()
.get();
assertEquals((Integer)1, v);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
});
}
@Test
public void lastDoubleOnSubscribe() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Integer v = new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
observer.onComplete();
}
}
.lastStage(null)
.toCompletableFuture()
.get();
assertEquals((Integer)1, v);
TestHelper.assertError(errors, 0, ProtocolViolationException.class);
});
}
}
|
ObservableStageSubscriberOrDefaultTest
|
java
|
square__moshi
|
moshi/src/test/java/com/squareup/moshi/PromoteNameToValueTest.java
|
{
"start": 1007,
"end": 13227
}
|
class ____ {
@Parameter public JsonCodecFactory factory;
@Parameters(name = "{0}")
public static List<Object[]> parameters() {
return JsonCodecFactory.factories();
}
@Test
public void readerStringValue() throws Exception {
JsonReader reader = factory.newReader("{\"a\":1}");
reader.beginObject();
reader.promoteNameToValue();
assertThat(reader.getPath()).isEqualTo("$.a");
assertThat(reader.peek()).isEqualTo(JsonReader.Token.STRING);
assertThat(reader.nextString()).isEqualTo("a");
assertThat(reader.getPath()).isEqualTo("$.a");
assertThat(reader.nextInt()).isEqualTo(1);
assertThat(reader.getPath()).isEqualTo("$.a");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void readerIntegerValue() throws Exception {
JsonReader reader = factory.newReader("{\"5\":1}");
reader.beginObject();
reader.promoteNameToValue();
assertThat(reader.getPath()).isEqualTo("$.5");
assertThat(reader.peek()).isEqualTo(JsonReader.Token.STRING);
assertThat(reader.nextInt()).isEqualTo(5);
assertThat(reader.getPath()).isEqualTo("$.5");
assertThat(reader.nextInt()).isEqualTo(1);
assertThat(reader.getPath()).isEqualTo("$.5");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void readerDoubleValue() throws Exception {
JsonReader reader = factory.newReader("{\"5.5\":1}");
reader.beginObject();
reader.promoteNameToValue();
assertThat(reader.getPath()).isEqualTo("$.5.5");
assertThat(reader.peek()).isEqualTo(JsonReader.Token.STRING);
assertThat(reader.nextDouble()).isEqualTo(5.5d);
assertThat(reader.getPath()).isEqualTo("$.5.5");
assertThat(reader.nextInt()).isEqualTo(1);
assertThat(reader.getPath()).isEqualTo("$.5.5");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void readerBooleanValue() throws Exception {
JsonReader reader = factory.newReader("{\"true\":1}");
reader.beginObject();
reader.promoteNameToValue();
assertThat(reader.getPath()).isEqualTo("$.true");
assertThat(reader.peek()).isEqualTo(JsonReader.Token.STRING);
try {
reader.nextBoolean();
fail();
} catch (JsonDataException e) {
assertThat(e)
.hasMessageThat()
.isAnyOf(
"Expected BOOLEAN but was true, a java.lang.String, at path $.true",
"Expected a boolean but was STRING at path $.true");
}
assertThat(reader.getPath()).isEqualTo("$.true");
assertThat(reader.nextString()).isEqualTo("true");
assertThat(reader.getPath()).isEqualTo("$.true");
assertThat(reader.nextInt()).isEqualTo(1);
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void readerLongValue() throws Exception {
JsonReader reader = factory.newReader("{\"5\":1}");
reader.beginObject();
reader.promoteNameToValue();
assertThat(reader.getPath()).isEqualTo("$.5");
assertThat(reader.peek()).isEqualTo(JsonReader.Token.STRING);
assertThat(reader.nextLong()).isEqualTo(5L);
assertThat(reader.getPath()).isEqualTo("$.5");
assertThat(reader.nextInt()).isEqualTo(1);
assertThat(reader.getPath()).isEqualTo("$.5");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void readerNullValue() throws Exception {
JsonReader reader = factory.newReader("{\"null\":1}");
reader.beginObject();
reader.promoteNameToValue();
assertThat(reader.getPath()).isEqualTo("$.null");
assertThat(reader.peek()).isEqualTo(JsonReader.Token.STRING);
try {
reader.nextNull();
fail();
} catch (JsonDataException e) {
assertThat(e)
.hasMessageThat()
.isAnyOf(
"Expected NULL but was null, a java.lang.String, at path $.null",
"Expected null but was STRING at path $.null");
}
assertThat(reader.nextString()).isEqualTo("null");
assertThat(reader.getPath()).isEqualTo("$.null");
assertThat(reader.nextInt()).isEqualTo(1);
assertThat(reader.getPath()).isEqualTo("$.null");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void readerMultipleValueObject() throws Exception {
JsonReader reader = factory.newReader("{\"a\":1,\"b\":2}");
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.nextInt()).isEqualTo(1);
reader.promoteNameToValue();
assertThat(reader.getPath()).isEqualTo("$.b");
assertThat(reader.peek()).isEqualTo(JsonReader.Token.STRING);
assertThat(reader.nextString()).isEqualTo("b");
assertThat(reader.getPath()).isEqualTo("$.b");
assertThat(reader.nextInt()).isEqualTo(2);
assertThat(reader.getPath()).isEqualTo("$.b");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void readerEmptyValueObject() throws Exception {
JsonReader reader = factory.newReader("{}");
reader.beginObject();
assertThat(reader.peek()).isEqualTo(JsonReader.Token.END_OBJECT);
reader.promoteNameToValue();
assertThat(reader.getPath()).isEqualTo("$.");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void readerUnusedPromotionDoesntPersist() throws Exception {
JsonReader reader = factory.newReader("[{},{\"a\":5}]");
reader.beginArray();
reader.beginObject();
reader.promoteNameToValue();
reader.endObject();
reader.beginObject();
try {
reader.nextString();
fail();
} catch (JsonDataException expected) {
}
assertThat(reader.nextName()).isEqualTo("a");
}
@Test
public void readerUnquotedIntegerValue() throws Exception {
JsonReader reader = factory.newReader("{5:1}");
reader.setLenient(true);
reader.beginObject();
reader.promoteNameToValue();
assertThat(reader.nextInt()).isEqualTo(5);
assertThat(reader.nextInt()).isEqualTo(1);
reader.endObject();
}
@Test
public void readerUnquotedLongValue() throws Exception {
JsonReader reader = factory.newReader("{5:1}");
reader.setLenient(true);
reader.beginObject();
reader.promoteNameToValue();
assertThat(reader.nextLong()).isEqualTo(5L);
assertThat(reader.nextInt()).isEqualTo(1);
reader.endObject();
}
@Test
public void readerUnquotedDoubleValue() throws Exception {
JsonReader reader = factory.newReader("{5:1}");
reader.setLenient(true);
reader.beginObject();
reader.promoteNameToValue();
assertThat(reader.nextDouble()).isEqualTo(5d);
assertThat(reader.nextInt()).isEqualTo(1);
reader.endObject();
}
@Test
public void writerStringValue() throws Exception {
JsonWriter writer = factory.newWriter();
writer.beginObject();
writer.promoteValueToName();
writer.value("a");
assertThat(writer.getPath()).isEqualTo("$.a");
writer.value(1);
assertThat(writer.getPath()).isEqualTo("$.a");
writer.endObject();
assertThat(writer.getPath()).isEqualTo("$");
assertThat(factory.json()).isEqualTo("{\"a\":1}");
}
@Test
public void writerIntegerValue() throws Exception {
JsonWriter writer = factory.newWriter();
writer.beginObject();
writer.promoteValueToName();
writer.value(5);
assertThat(writer.getPath()).isEqualTo("$.5");
writer.value(1);
assertThat(writer.getPath()).isEqualTo("$.5");
writer.endObject();
assertThat(writer.getPath()).isEqualTo("$");
assertThat(factory.json()).isEqualTo("{\"5\":1}");
}
@Test
public void writerDoubleValue() throws Exception {
JsonWriter writer = factory.newWriter();
writer.beginObject();
writer.promoteValueToName();
writer.value(5.5d);
assertThat(writer.getPath()).isEqualTo("$.5.5");
writer.value(1);
assertThat(writer.getPath()).isEqualTo("$.5.5");
writer.endObject();
assertThat(writer.getPath()).isEqualTo("$");
assertThat(factory.json()).isEqualTo("{\"5.5\":1}");
}
@Test
public void writerBooleanValue() throws Exception {
JsonWriter writer = factory.newWriter();
writer.beginObject();
writer.promoteValueToName();
try {
writer.value(true);
fail();
} catch (IllegalStateException e) {
assertThat(e)
.hasMessageThat()
.isEqualTo("Boolean cannot be used as a map key in JSON at path $.");
}
writer.value("true");
assertThat(writer.getPath()).isEqualTo("$.true");
writer.value(1);
writer.endObject();
assertThat(writer.getPath()).isEqualTo("$");
assertThat(factory.json()).isEqualTo("{\"true\":1}");
}
@Test
public void writerLongValue() throws Exception {
JsonWriter writer = factory.newWriter();
writer.beginObject();
writer.promoteValueToName();
writer.value(5L);
assertThat(writer.getPath()).isEqualTo("$.5");
writer.value(1);
assertThat(writer.getPath()).isEqualTo("$.5");
writer.endObject();
assertThat(writer.getPath()).isEqualTo("$");
assertThat(factory.json()).isEqualTo("{\"5\":1}");
}
@Test
public void writerNullValue() throws Exception {
JsonWriter writer = factory.newWriter();
writer.beginObject();
writer.promoteValueToName();
try {
writer.nullValue();
fail();
} catch (IllegalStateException e) {
assertThat(e)
.hasMessageThat()
.isEqualTo("null cannot be used as a map key in JSON at path $.");
}
writer.value("null");
assertThat(writer.getPath()).isEqualTo("$.null");
writer.value(1);
assertThat(writer.getPath()).isEqualTo("$.null");
writer.endObject();
assertThat(writer.getPath()).isEqualTo("$");
assertThat(factory.json()).isEqualTo("{\"null\":1}");
}
@Test
public void writerMultipleValueObject() throws Exception {
JsonWriter writer = factory.newWriter();
writer.beginObject();
writer.name("a");
writer.value(1);
writer.promoteValueToName();
writer.value("b");
assertThat(writer.getPath()).isEqualTo("$.b");
writer.value(2);
assertThat(writer.getPath()).isEqualTo("$.b");
writer.endObject();
assertThat(writer.getPath()).isEqualTo("$");
assertThat(factory.json()).isEqualTo("{\"a\":1,\"b\":2}");
}
@Test
public void writerEmptyValueObject() throws Exception {
JsonWriter writer = factory.newWriter();
writer.beginObject();
writer.promoteValueToName();
assertThat(writer.getPath()).isEqualTo("$.");
writer.endObject();
assertThat(writer.getPath()).isEqualTo("$");
assertThat(factory.json()).isEqualTo("{}");
}
@Test
public void writerUnusedPromotionDoesntPersist() throws Exception {
JsonWriter writer = factory.newWriter();
writer.beginArray();
writer.beginObject();
writer.promoteValueToName();
writer.endObject();
writer.beginObject();
try {
writer.value("a");
fail();
} catch (IllegalStateException expected) {
}
writer.name("a");
}
@Test
public void writerSourceValueFails() throws Exception {
JsonWriter writer = factory.newWriter();
writer.beginObject();
writer.promoteValueToName();
try {
writer.value(new Buffer().writeUtf8("\"a\""));
fail();
} catch (IllegalStateException expected) {
assertThat(expected)
.hasMessageThat()
.isEqualTo("BufferedSource cannot be used as a map key in JSON at path $.");
}
writer.value("a");
writer.value("a value");
writer.endObject();
assertThat(factory.json()).isEqualTo("{\"a\":\"a value\"}");
}
@Test
public void writerValueSinkFails() throws Exception {
JsonWriter writer = factory.newWriter();
writer.beginObject();
writer.promoteValueToName();
try {
writer.valueSink();
fail();
} catch (IllegalStateException expected) {
assertThat(expected)
.hasMessageThat()
.isEqualTo("BufferedSink cannot be used as a map key in JSON at path $.");
}
writer.value("a");
writer.value("a value");
writer.endObject();
assertThat(factory.json()).isEqualTo("{\"a\":\"a value\"}");
}
}
|
PromoteNameToValueTest
|
java
|
google__truth
|
extensions/proto/src/main/java/com/google/common/truth/extensions/proto/FieldScopeLogicMap.java
|
{
"start": 1557,
"end": 1617
}
|
class ____ intended only for small
* numbers of entries.
*/
|
is
|
java
|
spring-projects__spring-boot
|
configuration-metadata/spring-boot-configuration-processor/src/main/java/org/springframework/boot/configurationprocessor/ConfigurationPropertiesSourceResolver.java
|
{
"start": 4924,
"end": 6837
}
|
class ____ extends PropertyDescriptor {
private final PropertyDescriptor delegate;
private final ItemMetadata sourceItemMetadata;
private final ItemHint sourceItemHint;
SourcePropertyDescriptor(PropertyDescriptor delegate, ItemMetadata sourceItemMetadata,
ItemHint sourceItemHint) {
super(delegate.getName(), delegate.getType(), delegate.getDeclaringElement(), delegate.getGetter());
this.delegate = delegate;
this.sourceItemMetadata = sourceItemMetadata;
this.sourceItemHint = sourceItemHint;
}
@Override
protected ItemHint resolveItemHint(String prefix, MetadataGenerationEnvironment environment) {
return (this.sourceItemHint != null) ? this.sourceItemHint.applyPrefix(prefix)
: super.resolveItemHint(prefix, environment);
}
@Override
protected boolean isMarkedAsNested(MetadataGenerationEnvironment environment) {
return this.delegate.isMarkedAsNested(environment);
}
@Override
protected String resolveDescription(MetadataGenerationEnvironment environment) {
String description = this.delegate.resolveDescription(environment);
return (description != null) ? description : this.sourceItemMetadata.getDescription();
}
@Override
protected Object resolveDefaultValue(MetadataGenerationEnvironment environment) {
Object defaultValue = this.delegate.resolveDefaultValue(environment);
return (defaultValue != null) ? defaultValue : this.sourceItemMetadata.getDefaultValue();
}
@Override
protected ItemDeprecation resolveItemDeprecation(MetadataGenerationEnvironment environment) {
ItemDeprecation itemDeprecation = this.delegate.resolveItemDeprecation(environment);
return (itemDeprecation != null) ? itemDeprecation : this.sourceItemMetadata.getDeprecation();
}
@Override
boolean isProperty(MetadataGenerationEnvironment environment) {
return this.delegate.isProperty(environment);
}
}
}
|
SourcePropertyDescriptor
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/cache/config/CacheAdviceParserTests.java
|
{
"start": 993,
"end": 1303
}
|
class ____ {
@Test
void keyAndKeyGeneratorCannotBeSetTogether() {
assertThatThrownBy(() -> new GenericXmlApplicationContext(
"/org/springframework/cache/config/cache-advice-invalid.xml")
).isInstanceOf(BeanDefinitionStoreException.class);
// TODO better exception handling
}
}
|
CacheAdviceParserTests
|
java
|
quarkusio__quarkus
|
core/deployment/src/test/java/io/quarkus/deployment/conditionaldeps/DependencyConditionMatchesConditionalDependencyTest.java
|
{
"start": 486,
"end": 3281
}
|
class ____ extends BootstrapFromOriginalJarTestBase {
@Override
protected TsArtifact composeApplication() {
final TsQuarkusExt extA = new TsQuarkusExt("ext-a");
final TsQuarkusExt extB = new TsQuarkusExt("ext-b");
extB.setDependencyCondition(extA);
final TsQuarkusExt extD = new TsQuarkusExt("ext-d");
extD.setDependencyCondition(extB);
extD.setConditionalDeps(extB);
final TsQuarkusExt extC = new TsQuarkusExt("ext-c");
extC.setConditionalDeps(extD);
install(extA);
install(extB);
install(extC);
install(extD);
addToExpectedLib(extA.getRuntime());
addToExpectedLib(extB.getRuntime());
addToExpectedLib(extC.getRuntime());
addToExpectedLib(extD.getRuntime());
return TsArtifact.jar("app")
.addManagedDependency(platformDescriptor())
.addManagedDependency(platformProperties())
.addDependency(extC)
.addDependency(extA);
}
@Override
protected void assertAppModel(ApplicationModel appModel) {
var extensions = new HashMap<String, ResolvedDependency>();
for (var d : appModel.getDependencies()) {
extensions.put(d.getArtifactId(), d);
}
assertThat(extensions).hasSize(8);
if (!BootstrapAppModelResolver.isLegacyModelResolver(null)) {
var extA = extensions.get("ext-a");
assertThat(extA.getDependencies()).isEmpty();
var extADeployment = extensions.get("ext-a-deployment");
assertThat(extADeployment.getDependencies()).containsExactly(extA);
var extB = extensions.get("ext-b");
assertThat(extB.getDependencies()).isEmpty();
var extBDeployment = extensions.get("ext-b-deployment");
assertThat(extBDeployment.getDependencies()).containsExactly(extB);
var extD = extensions.get("ext-d");
assertThat(extD.getDependencies()).containsExactly(extB);
var extDDeployment = extensions.get("ext-d-deployment");
assertThat(extDDeployment.getDependencies()).containsExactlyInAnyOrder(extD, extBDeployment);
var extC = extensions.get("ext-c");
assertThat(extC.getDependencies()).containsExactly(extD);
var extCDeployment = extensions.get("ext-c-deployment");
assertThat(extCDeployment.getDependencies()).containsExactlyInAnyOrder(extC, extDDeployment);
}
}
@Override
protected String[] expectedExtensionDependencies() {
return new String[] {
"ext-a",
"ext-b",
"ext-c",
"ext-d"
};
}
}
|
DependencyConditionMatchesConditionalDependencyTest
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldBeEqualIgnoringCase.java
|
{
"start": 843,
"end": 1510
}
|
class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldBeEqualIgnoringCase}</code>.
* @param actual the actual value in the failed assertion.
* @param expected the expected value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBeEqual(CharSequence actual, CharSequence expected) {
return new ShouldBeEqualIgnoringCase(actual, expected);
}
private ShouldBeEqualIgnoringCase(CharSequence actual, CharSequence expected) {
super("%nexpected: %s%n but was: %s%nignoring case considerations", expected, actual);
}
}
|
ShouldBeEqualIgnoringCase
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/load/engine/executor/RuntimeCompat.java
|
{
"start": 324,
"end": 2317
}
|
class ____ {
private static final String TAG = "GlideRuntimeCompat";
private static final String CPU_NAME_REGEX = "cpu[0-9]+";
private static final String CPU_LOCATION = "/sys/devices/system/cpu/";
private RuntimeCompat() {
// Utility class.
}
/** Determines the number of cores available on the device. */
static int availableProcessors() {
int cpus = Runtime.getRuntime().availableProcessors();
if (Build.VERSION.SDK_INT < 17) {
cpus = Math.max(getCoreCountPre17(), cpus);
}
return cpus;
}
/**
* Determines the number of cores available on the device (pre-v17).
*
* <p>Before Jellybean, {@link Runtime#availableProcessors()} returned the number of awake cores,
* which may not be the number of available cores depending on the device's current state. See
* https://stackoverflow.com/a/30150409.
*
* @return the maximum number of processors available to the VM; never smaller than one
*/
@SuppressWarnings("PMD")
private static int getCoreCountPre17() {
// We override the current ThreadPolicy to allow disk reads.
// This shouldn't actually do disk-IO and accesses a device file.
// See: https://github.com/bumptech/glide/issues/1170
File[] cpus = null;
ThreadPolicy originalPolicy = StrictMode.allowThreadDiskReads();
try {
File cpuInfo = new File(CPU_LOCATION);
final Pattern cpuNamePattern = Pattern.compile(CPU_NAME_REGEX);
cpus =
cpuInfo.listFiles(
new FilenameFilter() {
@Override
public boolean accept(File file, String s) {
return cpuNamePattern.matcher(s).matches();
}
});
} catch (Throwable t) {
if (Log.isLoggable(TAG, Log.ERROR)) {
Log.e(TAG, "Failed to calculate accurate cpu count", t);
}
} finally {
StrictMode.setThreadPolicy(originalPolicy);
}
return Math.max(1, cpus != null ? cpus.length : 0);
}
}
|
RuntimeCompat
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/error/future/ShouldNotHaveFailed_create_Test.java
|
{
"start": 1003,
"end": 1564
}
|
class ____ {
@Test
void should_create_error_message() {
// GIVEN
CompletableFuture<Object> future = new CompletableFuture<>();
future.completeExceptionally(new RuntimeException());
// WHEN
String error = shouldNotHaveFailed(future).create(new TestDescription("TEST"));
// THEN
then(error).startsWith("[TEST] %nExpecting%n <CompletableFuture[Failed with the following stack trace:%njava.lang.RuntimeException".formatted())
.endsWith("to not have failed.%n%s".formatted(WARNING));
}
}
|
ShouldNotHaveFailed_create_Test
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/circulartypevariable/RoleAccess.java
|
{
"start": 418,
"end": 648
}
|
class ____<
TRoleAccess extends RoleAccess<TRoleAccess,TUser>,
TUser extends User<TUser,TRoleAccess>> {
private Long id;
@Id
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
|
RoleAccess
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/data/WithGroupField.java
|
{
"start": 676,
"end": 988
}
|
class ____ {
public Object group;
public WithGroupField(Object value) {
this.group = value;
}
public static WithGroupField of(Object value) {
return new WithGroupField(value);
}
@Override
public String toString() {
return "WithGroupField group=%s".formatted(group);
}
}
|
WithGroupField
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/AbstractFuture.java
|
{
"start": 10255,
"end": 10859
}
|
class ____ {
static final Failure FALLBACK_INSTANCE =
new Failure(
new Throwable("Failure occurred while trying to finish a future" +
".") {
@Override
public synchronized Throwable fillInStackTrace() {
return this; // no stack trace
}
});
final Throwable exception;
Failure(Throwable exception) {
this.exception = Preconditions.checkNotNull(exception);
}
}
/**
* A special value to represent cancellation and the 'wasInterrupted' bit.
*/
private static final
|
Failure
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/deploy/DeployState.java
|
{
"start": 879,
"end": 1288
}
|
enum ____ {
/**
* Unknown state
*/
UNKNOWN,
/**
* Pending, wait for start
*/
PENDING,
/**
* Starting
*/
STARTING,
/**
* Started
*/
STARTED,
/**
* Completion
*/
COMPLETION,
/**
* Stopping
*/
STOPPING,
/**
* Stopped
*/
STOPPED,
/**
* Failed
*/
FAILED
}
|
DeployState
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-jose/src/main/java/org/springframework/security/oauth2/jwt/NimbusReactiveJwtDecoder.java
|
{
"start": 31929,
"end": 37064
}
|
class ____ {
private static final JOSEObjectTypeVerifier<JWKSecurityContext> JWT_TYPE_VERIFIER = new DefaultJOSEObjectTypeVerifier<>(
JOSEObjectType.JWT, null);
private static final JOSEObjectTypeVerifier<JWKSecurityContext> NO_TYPE_VERIFIER = (header, context) -> {
};
private final Function<SignedJWT, Flux<JWK>> jwkSource;
private JWSAlgorithm jwsAlgorithm = JWSAlgorithm.RS256;
private JOSEObjectTypeVerifier<JWKSecurityContext> typeVerifier = NO_TYPE_VERIFIER;
private Consumer<ConfigurableJWTProcessor<JWKSecurityContext>> jwtProcessorCustomizer;
private JwkSourceReactiveJwtDecoderBuilder(Function<SignedJWT, Flux<JWK>> jwkSource) {
Assert.notNull(jwkSource, "jwkSource cannot be null");
this.jwkSource = jwkSource;
this.jwtProcessorCustomizer = (processor) -> {
};
}
/**
* Use the given signing
* <a href="https://tools.ietf.org/html/rfc7515#section-4.1.1" target=
* "_blank">algorithm</a>.
* @param jwsAlgorithm the algorithm to use
* @return a {@link JwkSourceReactiveJwtDecoderBuilder} for further configurations
*/
public JwkSourceReactiveJwtDecoderBuilder jwsAlgorithm(JwsAlgorithm jwsAlgorithm) {
Assert.notNull(jwsAlgorithm, "jwsAlgorithm cannot be null");
this.jwsAlgorithm = JWSAlgorithm.parse(jwsAlgorithm.getName());
return this;
}
/**
* Whether to use Nimbus's typ header verification. This is {@code true} by
* default, however it may change to {@code false} in a future major release.
*
* <p>
* By turning off this feature, {@link NimbusReactiveJwtDecoder} expects
* applications to check the {@code typ} header themselves in order to determine
* what kind of validation is needed
* </p>
*
* <p>
* This is done for you when you use {@link JwtValidators} to construct a
* validator.
*
* <p>
* That means that this: <code>
* NimbusReactiveJwtDecoder jwtDecoder = NimbusReactiveJwtDecoder.withJwkSource(issuer).build();
* jwtDecoder.setJwtValidator(JwtValidators.createDefaultWithIssuer(issuer);
* </code>
*
* <p>
* Is equivalent to this: <code>
* NimbusReactiveJwtDecoder jwtDecoder = NimbusReactiveJwtDecoder.withJwkSource(key)
* .validateType(false)
* .build();
* jwtDecoder.setJwtValidator(JwtValidators.createDefaultWithValidators(
* new JwtIssuerValidator(issuer), JwtTypeValidator.jwt());
* </code>
*
* <p>
* The difference is that by setting this to {@code false}, it allows you to
* provide validation by type, like for {@code at+jwt}:
*
* <code>
* NimbusReactiveJwtDecoder jwtDecoder = NimbusReactiveJwtDecoder.withJwkSource(key)
* .validateType(false)
* .build();
* jwtDecoder.setJwtValidator(new MyAtJwtValidator());
* </code>
* @param shouldValidateTypHeader whether Nimbus should validate the typ header or
* not
* @return a {@link JwkSourceReactiveJwtDecoderBuilder} for further configurations
* @since 6.5
*/
public JwkSourceReactiveJwtDecoderBuilder validateType(boolean shouldValidateTypHeader) {
this.typeVerifier = shouldValidateTypHeader ? JWT_TYPE_VERIFIER : NO_TYPE_VERIFIER;
return this;
}
/**
* Use the given {@link Consumer} to customize the {@link JWTProcessor
* ConfigurableJWTProcessor} before passing it to the build
* {@link NimbusReactiveJwtDecoder}.
* @param jwtProcessorCustomizer the callback used to alter the processor
* @return a {@link JwkSourceReactiveJwtDecoderBuilder} for further configurations
* @since 5.4
*/
public JwkSourceReactiveJwtDecoderBuilder jwtProcessorCustomizer(
Consumer<ConfigurableJWTProcessor<JWKSecurityContext>> jwtProcessorCustomizer) {
Assert.notNull(jwtProcessorCustomizer, "jwtProcessorCustomizer cannot be null");
this.jwtProcessorCustomizer = jwtProcessorCustomizer;
return this;
}
/**
* Build the configured {@link NimbusReactiveJwtDecoder}.
* @return the configured {@link NimbusReactiveJwtDecoder}
*/
public NimbusReactiveJwtDecoder build() {
return new NimbusReactiveJwtDecoder(processor());
}
Converter<JWT, Mono<JWTClaimsSet>> processor() {
JWKSecurityContextJWKSet jwkSource = new JWKSecurityContextJWKSet();
JWSKeySelector<JWKSecurityContext> jwsKeySelector = new JWSVerificationKeySelector<>(this.jwsAlgorithm,
jwkSource);
DefaultJWTProcessor<JWKSecurityContext> jwtProcessor = new DefaultJWTProcessor<>();
jwtProcessor.setJWSKeySelector(jwsKeySelector);
jwtProcessor.setJWSTypeVerifier(this.typeVerifier);
jwtProcessor.setJWTClaimsSetVerifier((claims, context) -> {
});
this.jwtProcessorCustomizer.accept(jwtProcessor);
return (jwt) -> {
if (jwt instanceof SignedJWT) {
return this.jwkSource.apply((SignedJWT) jwt)
.onErrorMap((e) -> new IllegalStateException("Could not obtain the keys", e))
.collectList()
.map((jwks) -> createClaimsSet(jwtProcessor, jwt, new JWKSecurityContext(jwks)));
}
throw new BadJwtException("Unsupported algorithm of " + jwt.getHeader().getAlgorithm());
};
}
}
}
|
JwkSourceReactiveJwtDecoderBuilder
|
java
|
apache__camel
|
components/camel-http/src/test/java/org/apache/camel/component/http/HttpQueryTest.java
|
{
"start": 1180,
"end": 5145
}
|
class ____ extends BaseHttpTest {
private HttpServer localServer;
private String baseUrl;
private final String DANISH_CHARACTERS_UNICODE = "\u00e6\u00f8\u00e5\u00C6\u00D8\u00C5";
@Override
public void setupResources() throws Exception {
localServer = ServerBootstrap.bootstrap()
.setCanonicalHostName("localhost").setHttpProcessor(getBasicHttpProcessor())
.setConnectionReuseStrategy(getConnectionReuseStrategy()).setResponseFactory(getHttpResponseFactory())
.setSslContext(getSSLContext())
.register("/", new BasicValidationHandler(GET.name(), "hl=en&q=camel", null, getExpectedContent()))
.register("/test/", new BasicValidationHandler(GET.name(), "my=@+camel", null, getExpectedContent()))
.register("/user/pass",
new BasicValidationHandler(GET.name(), "password=baa&username=foo", null, getExpectedContent()))
.register("/user/passwd",
new BasicValidationHandler(
GET.name(), "password='PasswordWithCharsThatNeedEscaping!≥≤!'&username=NotFromTheUSofA", null,
getExpectedContent()))
.register("/danish-accepted",
new BasicValidationHandler(
GET.name(), "characters='" + DANISH_CHARACTERS_UNICODE + "'", null, getExpectedContent()))
.create();
localServer.start();
baseUrl = "http://localhost:" + localServer.getLocalPort();
}
@Override
public void cleanupResources() throws Exception {
if (localServer != null) {
localServer.stop();
}
}
@Test
public void httpQuery() {
Exchange exchange = template.request(baseUrl + "/?hl=en&q=camel", exchange1 -> {
});
assertExchange(exchange);
}
@Test
public void httpQueryHeader() {
Exchange exchange = template.request(baseUrl + "/",
exchange1 -> exchange1.getIn().setHeader(Exchange.HTTP_QUERY, "hl=en&q=camel"));
assertExchange(exchange);
}
@Test
public void httpQueryWithEscapedCharacter() {
Exchange exchange = template.request(baseUrl + "/test/?my=%40%20camel", exchange1 -> {
});
assertExchange(exchange);
}
@Test
public void httpQueryWithUsernamePassword() {
Exchange exchange = template.request(baseUrl + "/user/pass?password=baa&username=foo", exchange1 -> {
});
assertExchange(exchange);
}
@Test
public void httpQueryWithPasswordContainingNonAsciiCharacter() {
Exchange exchange = template.request(
baseUrl + "/user/passwd?password='PasswordWithCharsThatNeedEscaping!≥≤!'&username=NotFromTheUSofA",
exchange1 -> {
});
assertExchange(exchange);
}
@Test
public void httpQueryWithPasswordContainingNonAsciiCharacterAsQueryParams() {
Exchange exchange = template.request(baseUrl + "/user/passwd",
exchange1 -> exchange1.getIn().setHeader(Exchange.HTTP_QUERY,
"password='PasswordWithCharsThatNeedEscaping!≥≤!'&username=NotFromTheUSofA"));
assertExchange(exchange);
}
@Test
public void httpDanishCharactersAcceptedInBaseURL() {
Exchange exchange
= template.request(baseUrl + "/danish-accepted?characters='" + DANISH_CHARACTERS_UNICODE + "'", exchange1 -> {
});
assertExchange(exchange);
}
@Test
public void httpDanishCharactersAcceptedAsQueryParams() {
Exchange exchange = template.request(baseUrl + "/danish-accepted",
exchange1 -> exchange1.getIn().setHeader(Exchange.HTTP_QUERY,
"characters='" + DANISH_CHARACTERS_UNICODE + "'"));
assertExchange(exchange);
}
}
|
HttpQueryTest
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java
|
{
"start": 8054,
"end": 11882
}
|
class ____ implements CounterGroupBase<Counter> {
private CounterGroupBase<Counter> realGroup;
protected Group() {
realGroup = null;
}
Group(GenericGroup group) {
this.realGroup = group;
}
Group(FSGroupImpl group) {
this.realGroup = group;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
Group(FrameworkGroupImpl group) {
this.realGroup = group;
}
/**
* @param counterName the name of the counter
* @return the value of the specified counter, or 0 if the counter does
* not exist.
*/
public long getCounter(String counterName) {
return getCounterValue(realGroup, counterName);
}
/**
* @return the compact stringified version of the group in the format
* {(actual-name)(display-name)(value)[][][]} where [] are compact strings
* for the counters within.
*/
public String makeEscapedCompactString() {
return toEscapedCompactString(realGroup);
}
/**
* Get the counter for the given id and create it if it doesn't exist.
* @param id the numeric id of the counter within the group
* @param name the internal counter name
* @return the counter
* @deprecated use {@link #findCounter(String)} instead
*/
@Deprecated
public Counter getCounter(int id, String name) {
return findCounter(name);
}
/**
* Get the counter for the given name and create it if it doesn't exist.
* @param name the internal counter name
* @return the counter
*/
public Counter getCounterForName(String name) {
return findCounter(name);
}
@Override
public void write(DataOutput out) throws IOException {
realGroup.write(out);
}
@Override
public void readFields(DataInput in) throws IOException {
realGroup.readFields(in);
}
@Override
public Iterator<Counter> iterator() {
return realGroup.iterator();
}
@Override
public String getName() {
return realGroup.getName();
}
@Override
public String getDisplayName() {
return realGroup.getDisplayName();
}
@Override
public void setDisplayName(String displayName) {
realGroup.setDisplayName(displayName);
}
@Override
public void addCounter(Counter counter) {
realGroup.addCounter(counter);
}
@Override
public Counter addCounter(String name, String displayName, long value) {
return realGroup.addCounter(name, displayName, value);
}
@Override
public Counter findCounter(String counterName, String displayName) {
return realGroup.findCounter(counterName, displayName);
}
@Override
public Counter findCounter(String counterName, boolean create) {
return realGroup.findCounter(counterName, create);
}
@Override
public Counter findCounter(String counterName) {
return realGroup.findCounter(counterName);
}
@Override
public int size() {
return realGroup.size();
}
@Override
public void incrAllCounters(CounterGroupBase<Counter> rightGroup) {
realGroup.incrAllCounters(rightGroup);
}
@Override
public CounterGroupBase<Counter> getUnderlyingGroup() {
return realGroup;
}
@Override
public synchronized boolean equals(Object genericRight) {
if (genericRight instanceof CounterGroupBase<?>) {
@SuppressWarnings("unchecked")
CounterGroupBase<Counter> right = ((CounterGroupBase<Counter>)
genericRight).getUnderlyingGroup();
return Iterators.elementsEqual(iterator(), right.iterator());
}
return false;
}
@Override
public int hashCode() {
return realGroup.hashCode();
}
}
// All the group impls need this for legacy group
|
Group
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/test/java/org/springframework/web/reactive/config/ResourceHandlerRegistryTests.java
|
{
"start": 2608,
"end": 8931
}
|
class ____ {
private ResourceHandlerRegistry registry;
private ResourceHandlerRegistration registration;
@BeforeEach
void setup() {
this.registry = new ResourceHandlerRegistry(new GenericApplicationContext());
this.registration = this.registry.addResourceHandler("/resources/**");
this.registration.addResourceLocations("classpath:org/springframework/web/reactive/config/");
}
@Test
void noResourceHandlers() {
this.registry = new ResourceHandlerRegistry(new GenericApplicationContext());
assertThat(this.registry.getHandlerMapping()).isNull();
}
@Test
void mapPathToLocation() {
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get(""));
exchange.getAttributes().put(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE,
PathContainer.parsePath("/testStylesheet.css"));
exchange.getAttributes().put(HandlerMapping.BEST_MATCHING_PATTERN_ATTRIBUTE,
new PathPatternParser().parse("/**"));
ResourceWebHandler handler = getHandler("/resources/**");
handler.handle(exchange).block(Duration.ofSeconds(5));
StepVerifier.create(exchange.getResponse().getBody())
.consumeNextWith(buf -> assertThat(buf.toString(UTF_8)).isEqualTo("test stylesheet content"))
.expectComplete()
.verify();
}
@Test
void cacheControl() {
assertThat(getHandler("/resources/**").getCacheControl()).isNull();
this.registration.setCacheControl(CacheControl.noCache().cachePrivate());
assertThat(getHandler("/resources/**").getCacheControl().getHeaderValue())
.isEqualTo(CacheControl.noCache().cachePrivate().getHeaderValue());
}
@Test
void mediaTypes() {
MediaType mediaType = MediaType.parseMediaType("foo/bar");
this.registration.setMediaTypes(Collections.singletonMap("bar", mediaType));
ResourceWebHandler requestHandler = this.registration.getRequestHandler();
assertThat(requestHandler.getMediaTypes()).hasSize(1);
assertThat(requestHandler.getMediaTypes()).containsEntry("bar", mediaType);
}
@Test
void order() {
assertThat(this.registry.getHandlerMapping().getOrder()).isEqualTo(Integer.MAX_VALUE -1);
this.registry.setOrder(0);
assertThat(this.registry.getHandlerMapping().getOrder()).isEqualTo(0);
}
@Test
void hasMappingForPattern() {
assertThat(this.registry.hasMappingForPattern("/resources/**")).isTrue();
assertThat(this.registry.hasMappingForPattern("/whatever")).isFalse();
}
@Test
void resourceChain() {
ResourceUrlProvider resourceUrlProvider = mock();
this.registry.setResourceUrlProvider(resourceUrlProvider);
ResourceResolver mockResolver = mock();
ResourceTransformerSupport mockTransformer = mock();
this.registration.resourceChain(true).addResolver(mockResolver).addTransformer(mockTransformer);
ResourceWebHandler handler = getHandler("/resources/**");
assertThat(handler.getResourceResolvers()).satisfiesExactly(
zero -> assertThat(zero).isInstanceOfSatisfying(CachingResourceResolver.class,
cachingResolver -> assertThat(cachingResolver.getCache()).isInstanceOf(ConcurrentMapCache.class)),
one -> assertThat(one).isEqualTo(mockResolver),
two -> assertThat(two).isInstanceOf(LiteWebJarsResourceResolver.class),
three -> assertThat(three).isInstanceOf(PathResourceResolver.class));
assertThat(handler.getResourceTransformers()).satisfiesExactly(
zero -> assertThat(zero).isInstanceOf(CachingResourceTransformer.class),
one -> assertThat(one).isEqualTo(mockTransformer));
Mockito.verify(mockTransformer).setResourceUrlProvider(resourceUrlProvider);
}
@Test
void resourceChainWithoutCaching() {
this.registration.resourceChain(false);
ResourceWebHandler handler = getHandler("/resources/**");
assertThat(handler.getResourceResolvers()).hasExactlyElementsOfTypes(
LiteWebJarsResourceResolver.class, PathResourceResolver.class);
assertThat(handler.getResourceTransformers()).isEmpty();
}
@Test
void resourceChainWithVersionResolver() {
VersionResourceResolver versionResolver = new VersionResourceResolver()
.addFixedVersionStrategy("fixed", "/**/*.js")
.addContentVersionStrategy("/**");
this.registration.resourceChain(true).addResolver(versionResolver);
ResourceWebHandler handler = getHandler("/resources/**");
assertThat(handler.getResourceResolvers()).satisfiesExactly(
zero -> assertThat(zero).isInstanceOf(CachingResourceResolver.class),
one -> assertThat(one).isSameAs(versionResolver),
two -> assertThat(two).isInstanceOf(LiteWebJarsResourceResolver.class),
three -> assertThat(three).isInstanceOf(PathResourceResolver.class));
assertThat(handler.getResourceTransformers()).hasExactlyElementsOfTypes(
CachingResourceTransformer.class, CssLinkResourceTransformer.class);
}
@Test
void resourceChainWithOverrides() {
CachingResourceResolver cachingResolver = mock();
VersionResourceResolver versionResolver = mock();
LiteWebJarsResourceResolver webjarsResolver = mock();
PathResourceResolver pathResourceResolver = new PathResourceResolver();
CachingResourceTransformer cachingTransformer = mock();
CssLinkResourceTransformer cssLinkTransformer = new CssLinkResourceTransformer();
this.registration.setCacheControl(CacheControl.maxAge(3600, TimeUnit.MILLISECONDS))
.resourceChain(false)
.addResolver(cachingResolver)
.addResolver(versionResolver)
.addResolver(webjarsResolver)
.addResolver(pathResourceResolver)
.addTransformer(cachingTransformer)
.addTransformer(cssLinkTransformer);
ResourceWebHandler handler = getHandler("/resources/**");
List<ResourceResolver> resolvers = handler.getResourceResolvers();
assertThat(resolvers).containsExactly(
cachingResolver, versionResolver, webjarsResolver, pathResourceResolver);
List<ResourceTransformer> transformers = handler.getResourceTransformers();
assertThat(transformers).containsExactly(cachingTransformer, cssLinkTransformer);
}
@Test
void ignoreLastModified() {
this.registration.setUseLastModified(false);
assertThat(getHandler("/resources/**").isUseLastModified()).isFalse();
}
private ResourceWebHandler getHandler(String pathPattern) {
SimpleUrlHandlerMapping mapping = (SimpleUrlHandlerMapping) this.registry.getHandlerMapping();
return (ResourceWebHandler) mapping.getUrlMap().get(pathPattern);
}
}
|
ResourceHandlerRegistryTests
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/error/ParseErrorTest_19.java
|
{
"start": 189,
"end": 490
}
|
class ____ extends TestCase {
public void test_for_error() throws Exception {
Exception error = null;
try {
JSON.parse("[\"wenshao\"");
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
}
|
ParseErrorTest_19
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java
|
{
"start": 1497,
"end": 1900
}
|
class ____ extends NMView {
private static final long BYTES_IN_MB = 1024 * 1024;
@Override
protected void commonPreHead(HTML<__> html) {
super.commonPreHead(html);
setTitle("NodeManager information");
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:1}");
}
@Override
protected Class<? extends SubView> content() {
return NodeBlock.class;
}
public static
|
NodePage
|
java
|
apache__camel
|
components/camel-lucene/src/main/java/org/apache/camel/component/lucene/LuceneSearcher.java
|
{
"start": 1599,
"end": 4034
}
|
class ____ {
private static final Logger LOG = LoggerFactory.getLogger(LuceneSearcher.class);
private Analyzer analyzer;
private IndexSearcher indexSearcher;
private IndexReader indexReader;
private ScoreDoc[] hits;
public void open(File indexDirectory, Analyzer analyzer) throws IOException {
if (indexDirectory != null) {
indexReader = DirectoryReader.open(new NIOFSDirectory(indexDirectory.toPath()));
} else {
indexReader = DirectoryReader.open(new NIOFSDirectory(new File("./indexDirectory").toPath()));
}
indexSearcher = new IndexSearcher(indexReader);
this.analyzer = analyzer;
}
public void close() throws IOException {
indexReader.close();
analyzer.close();
}
public Hits search(String searchPhrase, int maxNumberOfHits, int totalHitsThreshold) throws Exception {
return search(searchPhrase, maxNumberOfHits, totalHitsThreshold, false);
}
public Hits search(String searchPhrase, int maxNumberOfHits, int totalHitsThreshold, boolean returnLuceneDocs)
throws Exception {
Hits searchHits = new Hits();
int numberOfHits = doSearch(searchPhrase, maxNumberOfHits, totalHitsThreshold);
searchHits.setNumberOfHits(numberOfHits);
for (ScoreDoc hit : hits) {
Document selectedDocument = indexSearcher.doc(hit.doc);
Hit aHit = new Hit();
if (returnLuceneDocs) {
aHit.setDocument(selectedDocument);
}
aHit.setHitLocation(hit.doc);
aHit.setScore(hit.score);
aHit.setData(selectedDocument.get("contents"));
searchHits.getHit().add(aHit);
}
return searchHits;
}
private int doSearch(String searchPhrase, int maxNumberOfHits, int totalHitsThreshold)
throws NullPointerException, ParseException, IOException {
LOG.trace("*** Search Phrase: {} ***", searchPhrase);
QueryParser parser = new QueryParser("contents", analyzer);
Query query = parser.parse(searchPhrase);
TopScoreDocCollector collector = TopScoreDocCollector.create(maxNumberOfHits, totalHitsThreshold);
indexSearcher.search(query, collector);
hits = collector.topDocs().scoreDocs;
LOG.trace("*** Search generated {} hits ***", hits.length);
return hits.length;
}
}
|
LuceneSearcher
|
java
|
hibernate__hibernate-orm
|
hibernate-spatial/src/main/java/org/hibernate/spatial/dialect/sqlserver/AbstractSqlServerGISType.java
|
{
"start": 1048,
"end": 3465
}
|
class ____ implements JdbcType {
@Override
public int getJdbcTypeCode() {
return Types.ARRAY;
}
@Override
public abstract int getDefaultSqlTypeCode();
@Override
public abstract <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType);
@Override
public <X> ValueBinder<X> getBinder(final JavaType<X> javaType) {
return new BasicBinder<X>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final Geometry geometry = getJavaType().unwrap( value, Geometry.class, options );
final byte[] bytes = Encoders.encode( geometry );
st.setObject( index, bytes );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final Geometry geometry = getJavaType().unwrap( value, Geometry.class, options );
final byte[] bytes = Encoders.encode( geometry );
st.setObject( name, bytes );
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(final JavaType<X> javaType) {
return new BasicExtractor<X>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getJavaType().wrap( toGeometry( rs.getObject( paramIndex ) ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getJavaType().wrap( toGeometry( statement.getObject( index ) ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return getJavaType().wrap( toGeometry( statement.getObject( name ) ), options );
}
};
}
private Geometry toGeometry(Object obj) {
byte[] raw = null;
if ( obj == null ) {
return null;
}
if ( ( obj instanceof byte[] ) ) {
raw = (byte[]) obj;
}
else if ( obj instanceof Blob ) {
raw = toByteArray( (Blob) obj );
}
else {
throw new IllegalArgumentException( "Expected byte array or BLOB" );
}
return Decoders.decode( raw );
}
private byte[] toByteArray(Blob blob) {
try {
return blob.getBytes( 1, (int) blob.length() );
}
catch (SQLException e) {
throw new RuntimeException( "Error on transforming blob into array.", e );
}
}
}
|
AbstractSqlServerGISType
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/cache/interceptor/SimpleCacheErrorHandler.java
|
{
"start": 940,
"end": 1507
}
|
class ____ implements CacheErrorHandler {
@Override
public void handleCacheGetError(RuntimeException exception, Cache cache, Object key) {
throw exception;
}
@Override
public void handleCachePutError(RuntimeException exception, Cache cache, Object key, @Nullable Object value) {
throw exception;
}
@Override
public void handleCacheEvictError(RuntimeException exception, Cache cache, Object key) {
throw exception;
}
@Override
public void handleCacheClearError(RuntimeException exception, Cache cache) {
throw exception;
}
}
|
SimpleCacheErrorHandler
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/scripting/xmltags/OgnlMemberAccess.java
|
{
"start": 905,
"end": 1323
}
|
class ____ based on <a href=
* 'https://github.com/jkuhnert/ognl/blob/OGNL_3_2_1/src/java/ognl/DefaultMemberAccess.java'>DefaultMemberAccess</a>.
*
* @author Kazuki Shimizu
*
* @since 3.5.0
*
* @see <a href=
* 'https://github.com/jkuhnert/ognl/blob/OGNL_3_2_1/src/java/ognl/DefaultMemberAccess.java'>DefaultMemberAccess</a>
* @see <a href='https://github.com/jkuhnert/ognl/issues/47'>#47 of ognl</a>
*/
|
that
|
java
|
apache__camel
|
components/camel-ai/camel-chatscript/src/main/java/org/apache/camel/component/chatscript/ChatScriptMessage.java
|
{
"start": 913,
"end": 2379
}
|
class ____ {
@JsonProperty("username")
private String username;
@JsonProperty("botname")
private String botname;
@JsonProperty("message")
private String body;
@JsonProperty("response")
private String reply;
public ChatScriptMessage(final String userName, final String botName, final String iBody) {
this.username = userName;
this.botname = botName;
this.body = iBody;
}
public ChatScriptMessage() {
}
public String getUserName() {
return username;
}
public void setUserName(String userName) {
this.username = userName;
}
public String getBotName() {
return botname;
}
public void setBotName(String botName) {
this.botname = botName;
}
public String getBody() {
return body;
}
public void setBody(String iBody) {
this.body = iBody;
}
public String getReply() {
return reply;
}
public void setReply(String iReply) {
this.reply = iReply;
}
public String toCSFormat() {
String s;
final char nullChar = (char) 0;
s = this.username + nullChar + this.botname + nullChar + this.body + nullChar;
return s;
}
@Override
public String toString() {
return "ChatScriptMessage [username=" + username + ", botname=" + botname + ", message=" + body + ", reply=" + reply
+ "]";
}
}
|
ChatScriptMessage
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableDelay.java
|
{
"start": 4008,
"end": 4279
}
|
class ____ implements Runnable {
@Override
public void run() {
try {
downstream.onComplete();
} finally {
w.dispose();
}
}
}
}
}
|
OnComplete
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkDeleteRoleRestIT.java
|
{
"start": 814,
"end": 4908
}
|
class ____ extends SecurityOnTrialLicenseRestTestCase {
@SuppressWarnings("unchecked")
public void testDeleteValidExistingRoles() throws Exception {
Map<String, Object> responseMap = upsertRoles("""
{"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}, "test2":
{"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["read"]}]}, "test3":
{"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["write"]}]}}}""");
assertThat(responseMap, not(hasKey("errors")));
List<String> rolesToDelete = List.of("test1", "test3");
Map<String, Object> response = deleteRoles(rolesToDelete);
List<String> deleted = (List<String>) response.get("deleted");
assertThat(deleted, equalTo(rolesToDelete));
assertRolesDeleted(rolesToDelete);
assertRolesNotDeleted(List.of("test2"));
}
@SuppressWarnings("unchecked")
public void testTryDeleteNonExistingRoles() throws Exception {
Map<String, Object> responseMap = upsertRoles("""
{"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}}}""");
assertThat(responseMap, not(hasKey("errors")));
List<String> rolesToDelete = List.of("test1", "test2", "test3");
Map<String, Object> response = deleteRoles(rolesToDelete);
List<String> deleted = (List<String>) response.get("deleted");
List<String> notFound = (List<String>) response.get("not_found");
assertThat(deleted, equalTo(List.of("test1")));
assertThat(notFound, equalTo(List.of("test2", "test3")));
assertRolesDeleted(rolesToDelete);
}
@SuppressWarnings("unchecked")
public void testTryDeleteReservedRoleName() throws Exception {
Map<String, Object> responseMap = upsertRoles("""
{"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}}}""");
assertThat(responseMap, not(hasKey("errors")));
Map<String, Object> response = deleteRoles(List.of("superuser", "test1"));
List<String> deleted = (List<String>) response.get("deleted");
assertThat(deleted, equalTo(List.of("test1")));
Map<String, Object> errors = (Map<String, Object>) response.get("errors");
assertThat((Integer) errors.get("count"), equalTo(1));
Map<String, Object> errorDetails = (Map<String, Object>) ((Map<String, Object>) errors.get("details")).get("superuser");
assertThat(
errorDetails,
equalTo(Map.of("type", "illegal_argument_exception", "reason", "role [superuser] is reserved and cannot be deleted"))
);
assertRolesDeleted(List.of("test1"));
assertRolesNotDeleted(List.of("superuser"));
}
protected Map<String, Object> deleteRoles(List<String> roles) throws IOException {
Request request = new Request(HttpDelete.METHOD_NAME, "/_security/role");
request.setJsonEntity(Strings.format("""
{"names": [%s]}""", String.join(",", roles.stream().map(role -> "\"" + role + "\"").toList())));
Response response = adminClient().performRequest(request);
assertOK(response);
return responseAsMap(response);
}
protected void assertRolesDeleted(List<String> roleNames) {
for (String roleName : roleNames) {
ResponseException exception = assertThrows(
ResponseException.class,
() -> adminClient().performRequest(new Request("GET", "/_security/role/" + roleName))
);
assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(404));
}
}
protected void assertRolesNotDeleted(List<String> roleNames) throws IOException {
for (String roleName : roleNames) {
Response response = adminClient().performRequest(new Request("GET", "/_security/role/" + roleName));
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
}
}
}
|
BulkDeleteRoleRestIT
|
java
|
google__auto
|
value/src/it/functional/src/test/java/com/google/auto/value/gwt/CustomFieldSerializerTest.java
|
{
"start": 3486,
"end": 4399
}
|
class ____ implements Serializable {
abstract String getPackage();
abstract boolean isDefault();
static ValueTypeWithGetters create(String pkg, boolean dflt) {
return new AutoValue_CustomFieldSerializerTest_ValueTypeWithGetters(pkg, dflt);
}
}
@Test
public void testCustomFieldSerializerWithGetters() throws SerializationException {
AutoValue_CustomFieldSerializerTest_ValueTypeWithGetters instance =
(AutoValue_CustomFieldSerializerTest_ValueTypeWithGetters)
ValueTypeWithGetters.create("package", true);
AutoValue_CustomFieldSerializerTest_ValueTypeWithGetters_CustomFieldSerializer.serialize(
streamWriter, instance);
mock.verify(
() -> {
streamWriter.writeString("package");
streamWriter.writeBoolean(true);
});
}
@AutoValue
@GwtCompatible(serializable = true)
abstract static
|
ValueTypeWithGetters
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-rest/src/main/java/org/apache/camel/component/cxf/jaxrs/CxfRsEndpoint.java
|
{
"start": 29409,
"end": 30147
}
|
class ____ and put the response
* object into the exchange for further processing.
*/
public void setPerformInvocation(boolean performInvocation) {
this.performInvocation = performInvocation;
}
public boolean isPropagateContexts() {
return propagateContexts;
}
/**
* When the option is true, JAXRS UriInfo, HttpHeaders, Request and SecurityContext contexts will be available to
* custom CXFRS processors as typed Camel exchange properties. These contexts can be used to analyze the current
* requests using JAX-RS API.
*/
public void setPropagateContexts(boolean propagateContexts) {
this.propagateContexts = propagateContexts;
}
private static
|
instance
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/reflect/TypeTokenTest.java
|
{
"start": 48804,
"end": 52758
}
|
interface ____ extends ListIterable<String> {}
public void testGetSupertype_withTypeVariable() {
ParameterizedType expectedType =
Types.newParameterizedType(
Iterable.class,
Types.newParameterizedType(List.class, ListIterable.class.getTypeParameters()[0]));
assertEquals(
expectedType, TypeToken.of(ListIterable.class).getSupertype(Iterable.class).getType());
}
public <A, T extends Number & Iterable<A>>
void testGetSupertype_typeVariableWithMultipleBounds() {
assertEquals(
Number.class, new TypeToken<T>(getClass()) {}.getSupertype(Number.class).getType());
assertEquals(
new TypeToken<Iterable<A>>() {},
new TypeToken<T>(getClass()) {}.getSupertype(Iterable.class));
}
public void testGetSupertype_withoutTypeVariable() {
ParameterizedType expectedType =
Types.newParameterizedType(
Iterable.class, Types.newParameterizedType(List.class, String.class));
assertEquals(
expectedType,
TypeToken.of(StringListIterable.class).getSupertype(Iterable.class).getType());
}
public void testGetSupertype_chained() {
@SuppressWarnings("unchecked") // StringListIterable extends ListIterable<String>
TypeToken<ListIterable<String>> listIterableType =
(TypeToken<ListIterable<String>>)
TypeToken.of(StringListIterable.class).getSupertype(ListIterable.class);
ParameterizedType expectedType =
Types.newParameterizedType(
Iterable.class, Types.newParameterizedType(List.class, String.class));
assertEquals(expectedType, listIterableType.getSupertype(Iterable.class).getType());
}
public void testGetSupertype_withArray() {
assertEquals(
new TypeToken<Iterable<List<String>>[]>() {},
TypeToken.of(StringListIterable[].class).getSupertype(Iterable[].class));
assertEquals(int[].class, TypeToken.of(int[].class).getSupertype(int[].class).getType());
assertEquals(Object.class, TypeToken.of(int[].class).getSupertype(Object.class).getType());
assertEquals(int[][].class, TypeToken.of(int[][].class).getSupertype(int[][].class).getType());
assertEquals(
Object[].class, TypeToken.of(String[].class).getSupertype(Object[].class).getType());
assertEquals(Object.class, TypeToken.of(String[].class).getSupertype(Object.class).getType());
}
public void testGetSupertype_fromWildcard() {
@SuppressWarnings("unchecked") // can't do new TypeToken<? extends ...>() {}
TypeToken<? extends List<String>> type =
(TypeToken<? extends List<String>>)
TypeToken.of(Types.subtypeOf(new TypeToken<List<String>>() {}.getType()));
assertEquals(new TypeToken<Iterable<String>>() {}, type.getSupertype(Iterable.class));
}
public <T extends Iterable<String>> void testGetSupertype_fromTypeVariable() {
@SuppressWarnings("unchecked") // to construct TypeToken<T> from TypeToken.of()
TypeToken<T> typeVariableToken = (TypeToken<T>) TypeToken.of(new TypeCapture<T>() {}.capture());
assertEquals(
new TypeToken<Iterable<String>>() {}, typeVariableToken.getSupertype(Iterable.class));
}
@SuppressWarnings("rawtypes") // purpose is to test raw type
public void testGetSupertype_fromRawClass() {
assertEquals(
Types.newParameterizedType(Iterable.class, List.class.getTypeParameters()[0]),
new TypeToken<List>() {}.getSupertype(Iterable.class).getType());
}
@SuppressWarnings({"rawtypes", "unchecked"}) // purpose is to test raw type
public void testGetSupertype_notSupertype() {
assertThrows(
IllegalArgumentException.class,
() -> new TypeToken<List<String>>() {}.getSupertype((Class) String.class));
}
public void testGetSupertype_fromArray() {
assertEquals(
new TypeToken<Iterable<String>[]>() {},
new TypeToken<List<String>[]>() {}.getSupertype(Iterable[].class));
}
private
|
StringListArrayIterable
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java
|
{
"start": 1873,
"end": 6224
}
|
class ____ extends ESTestCase {
private ThreadPool threadPool;
private CapturingTransport transport;
private ClusterService clusterService;
private TransportService transportService;
private ShardStateAction shardStateAction;
public void setUp() throws Exception {
super.setUp();
threadPool = new TestThreadPool(getClass().getName());
transport = new CapturingTransport();
clusterService = createClusterService(threadPool);
transportService = transport.createTransportService(
clusterService.getSettings(),
threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
boundAddress -> clusterService.localNode(),
null,
Collections.emptySet()
);
transportService.start();
transportService.acceptIncomingRequests();
shardStateAction = new ShardStateAction(clusterService, transportService, null, null, threadPool);
}
public void tearDown() throws Exception {
try {
IOUtils.close(transportService, clusterService, transport);
} finally {
terminate(threadPool);
}
super.tearDown();
}
public void testTranslogSyncAfterGlobalCheckpointSync() throws Exception {
final IndicesService indicesService = mock(IndicesService.class);
final Index index = new Index("index", "uuid");
final IndexService indexService = mock(IndexService.class);
when(indicesService.indexServiceSafe(index)).thenReturn(indexService);
final int id = randomIntBetween(0, 4);
final IndexShard indexShard = mock(IndexShard.class);
when(indexService.getShard(id)).thenReturn(indexShard);
final ShardId shardId = new ShardId(index, id);
when(indexShard.shardId()).thenReturn(shardId);
final Translog.Durability durability = randomFrom(Translog.Durability.ASYNC, Translog.Durability.REQUEST);
when(indexShard.getTranslogDurability()).thenReturn(durability);
final long globalCheckpoint = randomIntBetween(Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED), Integer.MAX_VALUE);
final long lastSyncedGlobalCheckpoint;
if (randomBoolean() && globalCheckpoint != SequenceNumbers.NO_OPS_PERFORMED) {
lastSyncedGlobalCheckpoint = randomIntBetween(
Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED),
Math.toIntExact(globalCheckpoint) - 1
);
assert lastSyncedGlobalCheckpoint < globalCheckpoint;
} else {
lastSyncedGlobalCheckpoint = globalCheckpoint;
}
when(indexShard.getLastKnownGlobalCheckpoint()).thenReturn(globalCheckpoint);
when(indexShard.getLastSyncedGlobalCheckpoint()).thenReturn(lastSyncedGlobalCheckpoint);
doAnswer(invocation -> {
Consumer<Exception> argument = invocation.getArgument(1);
argument.accept(null);
return null;
}).when(indexShard).syncGlobalCheckpoint(anyLong(), any());
final GlobalCheckpointSyncAction action = new GlobalCheckpointSyncAction(
Settings.EMPTY,
transportService,
clusterService,
indicesService,
threadPool,
shardStateAction,
new ActionFilters(Collections.emptySet())
);
final GlobalCheckpointSyncAction.Request primaryRequest = new GlobalCheckpointSyncAction.Request(indexShard.shardId());
if (randomBoolean()) {
action.shardOperationOnPrimary(primaryRequest, indexShard, ActionTestUtils.assertNoFailureListener(r -> {}));
} else {
action.shardOperationOnReplica(
new GlobalCheckpointSyncAction.Request(indexShard.shardId()),
indexShard,
ActionTestUtils.assertNoFailureListener(r -> {})
);
}
if (durability == Translog.Durability.ASYNC || lastSyncedGlobalCheckpoint == globalCheckpoint) {
verify(indexShard, never()).sync();
verify(indexShard, never()).syncGlobalCheckpoint(anyLong(), any());
} else {
verify(indexShard, never()).sync();
verify(indexShard).syncGlobalCheckpoint(eq(globalCheckpoint), any());
}
}
}
|
GlobalCheckpointSyncActionTests
|
java
|
apache__camel
|
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
|
{
"start": 818732,
"end": 830332
}
|
class ____ extends YamlDeserializerBase<RecipientListDefinition> {
public RecipientListDefinitionDeserializer() {
super(RecipientListDefinition.class);
}
@Override
protected RecipientListDefinition newInstance() {
return new RecipientListDefinition();
}
@Override
protected boolean setProperty(RecipientListDefinition target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "aggregationStrategy": {
String val = asText(node);
target.setAggregationStrategy(val);
break;
}
case "aggregationStrategyMethodAllowNull": {
String val = asText(node);
target.setAggregationStrategyMethodAllowNull(val);
break;
}
case "aggregationStrategyMethodName": {
String val = asText(node);
target.setAggregationStrategyMethodName(val);
break;
}
case "cacheSize": {
String val = asText(node);
target.setCacheSize(val);
break;
}
case "delimiter": {
String val = asText(node);
target.setDelimiter(val);
break;
}
case "disabled": {
String val = asText(node);
target.setDisabled(val);
break;
}
case "executorService": {
String val = asText(node);
target.setExecutorService(val);
break;
}
case "expression": {
org.apache.camel.model.language.ExpressionDefinition val = asType(node, org.apache.camel.model.language.ExpressionDefinition.class);
target.setExpression(val);
break;
}
case "ignoreInvalidEndpoints": {
String val = asText(node);
target.setIgnoreInvalidEndpoints(val);
break;
}
case "onPrepare": {
String val = asText(node);
target.setOnPrepare(val);
break;
}
case "parallelAggregate": {
String val = asText(node);
target.setParallelAggregate(val);
break;
}
case "parallelProcessing": {
String val = asText(node);
target.setParallelProcessing(val);
break;
}
case "shareUnitOfWork": {
String val = asText(node);
target.setShareUnitOfWork(val);
break;
}
case "stopOnException": {
String val = asText(node);
target.setStopOnException(val);
break;
}
case "streaming": {
String val = asText(node);
target.setStreaming(val);
break;
}
case "synchronous": {
String val = asText(node);
target.setSynchronous(val);
break;
}
case "timeout": {
String val = asText(node);
target.setTimeout(val);
break;
}
case "id": {
String val = asText(node);
target.setId(val);
break;
}
case "description": {
String val = asText(node);
target.setDescription(val);
break;
}
case "note": {
String val = asText(node);
target.setNote(val);
break;
}
default: {
ExpressionDefinition ed = target.getExpressionType();
if (ed != null) {
throw new org.apache.camel.dsl.yaml.common.exception.DuplicateFieldException(node, propertyName, "as an expression");
}
ed = ExpressionDeserializers.constructExpressionType(propertyKey, node);
if (ed != null) {
target.setExpressionType(ed);
} else {
return false;
}
}
}
return true;
}
}
@YamlType(
nodes = "redeliveryPolicy",
types = org.apache.camel.model.RedeliveryPolicyDefinition.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "Redelivery Policy",
description = "To configure re-delivery for error handling",
deprecated = false,
properties = {
@YamlProperty(name = "allowRedeliveryWhileStopping", type = "boolean", defaultValue = "true", description = "Controls whether to allow redelivery while stopping/shutting down a route that uses error handling.", displayName = "Allow Redelivery While Stopping"),
@YamlProperty(name = "asyncDelayedRedelivery", type = "boolean", defaultValue = "false", description = "Allow asynchronous delayed redelivery. The route, in particular the consumer's component, must support the Asynchronous Routing Engine (e.g. seda).", displayName = "Async Delayed Redelivery"),
@YamlProperty(name = "backOffMultiplier", type = "number", defaultValue = "2.0", description = "Sets the back off multiplier", displayName = "Back Off Multiplier"),
@YamlProperty(name = "collisionAvoidanceFactor", type = "number", defaultValue = "0.15", description = "Sets the collision avoidance factor", displayName = "Collision Avoidance Factor"),
@YamlProperty(name = "delayPattern", type = "string", description = "Sets the delay pattern with delay intervals.", displayName = "Delay Pattern"),
@YamlProperty(name = "disableRedelivery", type = "boolean", defaultValue = "false", description = "Disables redelivery (same as setting maximum redeliveries to 0)", displayName = "Disable Redelivery"),
@YamlProperty(name = "exchangeFormatterRef", type = "string", description = "Sets the reference of the instance of org.apache.camel.spi.ExchangeFormatter to generate the log message from exchange.", displayName = "Exchange Formatter Ref"),
@YamlProperty(name = "id", type = "string", description = "The id of this node", displayName = "Id"),
@YamlProperty(name = "logContinued", type = "boolean", defaultValue = "false", description = "Sets whether continued exceptions should be logged or not. Can be used to include or reduce verbose.", displayName = "Log Continued"),
@YamlProperty(name = "logExhausted", type = "boolean", defaultValue = "true", description = "Sets whether exhausted exceptions should be logged or not. Can be used to include or reduce verbose.", displayName = "Log Exhausted"),
@YamlProperty(name = "logExhaustedMessageBody", type = "boolean", defaultValue = "false", description = "Sets whether exhausted message body should be logged including message history or not (supports property placeholders). Can be used to include or reduce verbose. Requires logExhaustedMessageHistory to be enabled.", displayName = "Log Exhausted Message Body"),
@YamlProperty(name = "logExhaustedMessageHistory", type = "boolean", defaultValue = "false", description = "Sets whether exhausted exceptions should be logged including message history or not (supports property placeholders). Can be used to include or reduce verbose.", displayName = "Log Exhausted Message History"),
@YamlProperty(name = "logHandled", type = "boolean", defaultValue = "false", description = "Sets whether handled exceptions should be logged or not. Can be used to include or reduce verbose.", displayName = "Log Handled"),
@YamlProperty(name = "logNewException", type = "boolean", defaultValue = "true", description = "Sets whether new exceptions should be logged or not. Can be used to include or reduce verbose. A new exception is an exception that was thrown while handling a previous exception.", displayName = "Log New Exception"),
@YamlProperty(name = "logRetryAttempted", type = "boolean", defaultValue = "true", description = "Sets whether retry attempts should be logged or not. Can be used to include or reduce verbose.", displayName = "Log Retry Attempted"),
@YamlProperty(name = "logRetryStackTrace", type = "boolean", defaultValue = "false", description = "Sets whether stack traces should be logged when an retry attempt failed. Can be used to include or reduce verbose.", displayName = "Log Retry Stack Trace"),
@YamlProperty(name = "logStackTrace", type = "boolean", defaultValue = "true", description = "Sets whether stack traces should be logged. Can be used to include or reduce verbose.", displayName = "Log Stack Trace"),
@YamlProperty(name = "maximumRedeliveries", type = "number", description = "Sets the maximum redeliveries x = redeliver at most x times 0 = no redeliveries -1 = redeliver forever", displayName = "Maximum Redeliveries"),
@YamlProperty(name = "maximumRedeliveryDelay", type = "string", defaultValue = "60000", description = "Sets the maximum delay between redelivery", displayName = "Maximum Redelivery Delay"),
@YamlProperty(name = "redeliveryDelay", type = "string", defaultValue = "1000", description = "Sets the initial redelivery delay", displayName = "Redelivery Delay"),
@YamlProperty(name = "retriesExhaustedLogLevel", type = "enum:TRACE,DEBUG,INFO,WARN,ERROR,OFF", defaultValue = "ERROR", description = "Sets the logging level to use when retries have been exhausted", displayName = "Retries Exhausted Log Level"),
@YamlProperty(name = "retryAttemptedLogInterval", type = "number", defaultValue = "1", description = "Sets the interval to use for logging retry attempts", displayName = "Retry Attempted Log Interval"),
@YamlProperty(name = "retryAttemptedLogLevel", type = "enum:TRACE,DEBUG,INFO,WARN,ERROR,OFF", defaultValue = "DEBUG", description = "Sets the logging level to use for logging retry attempts", displayName = "Retry Attempted Log Level"),
@YamlProperty(name = "useCollisionAvoidance", type = "boolean", defaultValue = "false", description = "Turn on collision avoidance.", displayName = "Use Collision Avoidance"),
@YamlProperty(name = "useExponentialBackOff", type = "boolean", defaultValue = "false", description = "Turn on exponential back off", displayName = "Use Exponential Back Off")
}
)
public static
|
RecipientListDefinitionDeserializer
|
java
|
apache__maven
|
compat/maven-compat/src/main/java/org/apache/maven/artifact/manager/DefaultWagonManager.java
|
{
"start": 2264,
"end": 6616
}
|
class ____ IoC has no chance to inject the loggers
@Inject
private Logger log;
@Inject
private LegacySupport legacySupport;
@Inject
private SettingsDecrypter settingsDecrypter;
@Inject
private MirrorSelector mirrorSelector;
@Inject
private ArtifactRepositoryFactory artifactRepositoryFactory;
@Override
public AuthenticationInfo getAuthenticationInfo(String id) {
MavenSession session = legacySupport.getSession();
if (session != null && id != null) {
MavenExecutionRequest request = session.getRequest();
if (request != null) {
List<Server> servers = request.getServers();
if (servers != null) {
for (Server server : servers) {
if (id.equalsIgnoreCase(server.getId())) {
SettingsDecryptionResult result =
settingsDecrypter.decrypt(new DefaultSettingsDecryptionRequest(server));
server = result.getServer();
AuthenticationInfo authInfo = new AuthenticationInfo();
authInfo.setUserName(server.getUsername());
authInfo.setPassword(server.getPassword());
authInfo.setPrivateKey(server.getPrivateKey());
authInfo.setPassphrase(server.getPassphrase());
return authInfo;
}
}
}
}
}
// empty one to prevent NPE
return new AuthenticationInfo();
}
@Override
public ProxyInfo getProxy(String protocol) {
MavenSession session = legacySupport.getSession();
if (session != null && protocol != null) {
MavenExecutionRequest request = session.getRequest();
if (request != null) {
List<Proxy> proxies = request.getProxies();
if (proxies != null) {
for (Proxy proxy : proxies) {
if (proxy.isActive() && protocol.equalsIgnoreCase(proxy.getProtocol())) {
SettingsDecryptionResult result =
settingsDecrypter.decrypt(new DefaultSettingsDecryptionRequest(proxy));
proxy = result.getProxy();
ProxyInfo proxyInfo = new ProxyInfo();
proxyInfo.setHost(proxy.getHost());
proxyInfo.setType(proxy.getProtocol());
proxyInfo.setPort(proxy.getPort());
proxyInfo.setNonProxyHosts(proxy.getNonProxyHosts());
proxyInfo.setUserName(proxy.getUsername());
proxyInfo.setPassword(proxy.getPassword());
return proxyInfo;
}
}
}
}
}
return null;
}
@Override
public void getArtifact(Artifact artifact, ArtifactRepository repository)
throws TransferFailedException, ResourceDoesNotExistException {
getArtifact(artifact, repository, null, false);
}
@Override
public void getArtifact(Artifact artifact, List<ArtifactRepository> remoteRepositories)
throws TransferFailedException, ResourceDoesNotExistException {
getArtifact(artifact, remoteRepositories, null, false);
}
@Deprecated
@Override
public ArtifactRepository getMirrorRepository(ArtifactRepository repository) {
Mirror mirror = mirrorSelector.getMirror(
repository, legacySupport.getSession().getSettings().getMirrors());
if (mirror != null) {
String id = mirror.getId();
if (id == null) {
// TODO this should be illegal in settings.xml
id = repository.getId();
}
log.debug("Using mirror: " + mirror.getUrl() + " (id: " + id + ")");
repository = artifactRepositoryFactory.createArtifactRepository(
id, mirror.getUrl(), repository.getLayout(), repository.getSnapshots(), repository.getReleases());
}
return repository;
}
}
|
or
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/model/AppArtifact.java
|
{
"start": 646,
"end": 3995
}
|
class ____ extends AppArtifactCoords implements ResolvedDependency, Serializable {
private static final long serialVersionUID = -6226544163467103712L;
protected PathsCollection paths;
private final WorkspaceModule module;
private final String scope;
private final int flags;
public AppArtifact(AppArtifactCoords coords) {
this(coords, null);
}
public AppArtifact(AppArtifactCoords coords, WorkspaceModule module) {
this(coords.getGroupId(), coords.getArtifactId(), coords.getClassifier(), coords.getType(), coords.getVersion(),
module, "compile", 0);
}
public AppArtifact(String groupId, String artifactId, String version) {
super(groupId, artifactId, version);
module = null;
scope = "compile";
flags = 0;
}
public AppArtifact(String groupId, String artifactId, String classifier, String type, String version) {
super(groupId, artifactId, classifier, type, version);
module = null;
scope = "compile";
flags = 0;
}
public AppArtifact(String groupId, String artifactId, String classifier, String type, String version,
WorkspaceModule module, String scope, int flags) {
super(groupId, artifactId, classifier, type, version);
this.module = module;
this.scope = scope;
this.flags = flags;
}
/**
* @deprecated in favor of {@link #getResolvedPaths()}
*/
@Deprecated
public Path getPath() {
return paths.getSinglePath();
}
/**
* Associates the artifact with the given path
*
* @param path artifact location
*/
public void setPath(Path path) {
setPaths(PathsCollection.of(path));
}
/**
* Collection of the paths that collectively constitute the artifact's content.
* Normally, especially in the Maven world, an artifact is resolved to a single path,
* e.g. a JAR or a project's output directory. However, in Gradle, depending on the build/test phase,
* artifact's content may need to be represented as a collection of paths.
*
* @return collection of paths that constitute the artifact's content
*/
public PathsCollection getPaths() {
return paths;
}
/**
* Associates the artifact with a collection of paths that constitute its content.
*
* @param paths collection of paths that constitute the artifact's content.
*/
public void setPaths(PathsCollection paths) {
this.paths = paths;
}
/**
* Whether the artifact has been resolved, i.e. associated with paths
* that constitute its content.
*
* @return true if the artifact has been resolved, otherwise - false
*/
@Override
public boolean isResolved() {
return paths != null && !paths.isEmpty();
}
@Override
public PathCollection getResolvedPaths() {
return paths == null ? null : PathList.from(paths);
}
@Override
public WorkspaceModule getWorkspaceModule() {
return module;
}
@Override
public String getScope() {
return scope;
}
@Override
public int getFlags() {
return flags;
}
@Override
public Collection<ArtifactCoords> getDependencies() {
return List.of();
}
}
|
AppArtifact
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/MonoHandleFuseable.java
|
{
"start": 1174,
"end": 2065
}
|
class ____<T, R> extends InternalMonoOperator<T, R>
implements Fuseable {
final BiConsumer<? super T, SynchronousSink<R>> handler;
MonoHandleFuseable(Mono<? extends T> source, BiConsumer<? super T, SynchronousSink<R>> handler) {
super(source);
this.handler = Objects.requireNonNull(handler, "handler");
}
@Override
public CoreSubscriber<? super T> subscribeOrReturn(CoreSubscriber<? super R> actual) {
BiConsumer<? super T, SynchronousSink<R>> handler2 = ContextPropagationSupport.shouldRestoreThreadLocalsInSomeOperators() ?
ContextPropagation.contextRestoreForHandle(this.handler, actual::currentContext) : this.handler;
return new FluxHandleFuseable.HandleFuseableSubscriber<>(actual, handler2);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
}
|
MonoHandleFuseable
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/javadoc/InheritDocTest.java
|
{
"start": 2146,
"end": 2353
}
|
class ____ extends java.util.ArrayList {}
""")
.doTest();
}
@Test
public void variable() {
helper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
apache__flink
|
flink-end-to-end-tests/flink-local-recovery-and-allocation-test/src/main/java/org/apache/flink/streaming/tests/StickyAllocationAndLocalRecoveryTestJob.java
|
{
"start": 19166,
"end": 20731
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 1L;
/** True iff this task inflicts a test failure. */
final boolean failingTask;
/** True iff this task kills its JVM. */
final boolean killedJvm;
/** PID of the task JVM. */
final int jvmPid;
/** Name and subtask index of the task. */
final String taskNameWithSubtask;
/** The current allocation id of this task. */
final String allocationId;
MapperSchedulingAndFailureInfo(
boolean failingTask,
boolean killedJvm,
int jvmPid,
String taskNameWithSubtask,
String allocationId) {
this.failingTask = failingTask;
this.killedJvm = killedJvm;
this.jvmPid = jvmPid;
this.taskNameWithSubtask = taskNameWithSubtask;
this.allocationId = allocationId;
}
@Override
public String toString() {
return "MapperTestInfo{"
+ "failingTask="
+ failingTask
+ ", killedJvm="
+ killedJvm
+ ", jvmPid="
+ jvmPid
+ ", taskNameWithSubtask='"
+ taskNameWithSubtask
+ '\''
+ ", allocationId='"
+ allocationId
+ '\''
+ '}';
}
}
}
|
MapperSchedulingAndFailureInfo
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/GroovyTest.java
|
{
"start": 489,
"end": 1028
}
|
class ____ {\n" + //
" int id\n" + //
"}");
// A实例
GroovyObject a = (GroovyObject) AClass.newInstance();
a.setProperty("id", 33);
String textA = JSON.toJSONString(a);
GroovyObject aa = (GroovyObject) JSON.parseObject(textA, AClass);
Assert.assertEquals(a.getProperty("id"), aa.getProperty("id"));
System.out.println(a);
// B类,继承于A
Class BClass = loader.parseClass("
|
A
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/TrackingModifiedEntitiesRevisionMapping.java
|
{
"start": 860,
"end": 2261
}
|
class ____ extends RevisionMapping {
@ElementCollection(fetch = FetchType.EAGER)
@JoinTable(name = "REVCHANGES", joinColumns = @JoinColumn(name = "REV"))
@Column(name = "ENTITYNAME")
@Fetch(FetchMode.JOIN)
@ModifiedEntityNames
private Set<String> modifiedEntityNames = new HashSet<>();
public Set<String> getModifiedEntityNames() {
return modifiedEntityNames;
}
public void setModifiedEntityNames(Set<String> modifiedEntityNames) {
this.modifiedEntityNames = modifiedEntityNames;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof TrackingModifiedEntitiesRevisionMapping) ) {
return false;
}
if ( !super.equals( o ) ) {
return false;
}
final TrackingModifiedEntitiesRevisionMapping that = (TrackingModifiedEntitiesRevisionMapping) o;
if ( modifiedEntityNames != null ? !modifiedEntityNames.equals( that.modifiedEntityNames )
: that.modifiedEntityNames != null ) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (modifiedEntityNames != null ? modifiedEntityNames.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "DefaultTrackingModifiedEntitiesRevisionEntity(" + super.toString() + ", modifiedEntityNames = " + modifiedEntityNames + ")";
}
}
|
TrackingModifiedEntitiesRevisionMapping
|
java
|
apache__camel
|
core/camel-main/src/generated/java/org/apache/camel/main/MetricsConfigurationPropertiesConfigurer.java
|
{
"start": 708,
"end": 7759
}
|
class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("BaseEndpointURIExchangeEventNotifier", boolean.class);
map.put("Binders", java.lang.String.class);
map.put("ClearOnReload", boolean.class);
map.put("EnableExchangeEventNotifier", boolean.class);
map.put("EnableInstrumentedThreadPoolFactory", boolean.class);
map.put("EnableMessageHistory", boolean.class);
map.put("EnableRouteEventNotifier", boolean.class);
map.put("EnableRoutePolicy", boolean.class);
map.put("Enabled", boolean.class);
map.put("NamingStrategy", java.lang.String.class);
map.put("Path", java.lang.String.class);
map.put("RoutePolicyLevel", java.lang.String.class);
map.put("SkipCamelInfo", boolean.class);
map.put("TextFormatVersion", java.lang.String.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.main.MetricsConfigurationProperties target = (org.apache.camel.main.MetricsConfigurationProperties) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "baseendpointuriexchangeeventnotifier":
case "baseEndpointURIExchangeEventNotifier": target.setBaseEndpointURIExchangeEventNotifier(property(camelContext, boolean.class, value)); return true;
case "binders": target.setBinders(property(camelContext, java.lang.String.class, value)); return true;
case "clearonreload":
case "clearOnReload": target.setClearOnReload(property(camelContext, boolean.class, value)); return true;
case "enableexchangeeventnotifier":
case "enableExchangeEventNotifier": target.setEnableExchangeEventNotifier(property(camelContext, boolean.class, value)); return true;
case "enableinstrumentedthreadpoolfactory":
case "enableInstrumentedThreadPoolFactory": target.setEnableInstrumentedThreadPoolFactory(property(camelContext, boolean.class, value)); return true;
case "enablemessagehistory":
case "enableMessageHistory": target.setEnableMessageHistory(property(camelContext, boolean.class, value)); return true;
case "enablerouteeventnotifier":
case "enableRouteEventNotifier": target.setEnableRouteEventNotifier(property(camelContext, boolean.class, value)); return true;
case "enableroutepolicy":
case "enableRoutePolicy": target.setEnableRoutePolicy(property(camelContext, boolean.class, value)); return true;
case "enabled": target.setEnabled(property(camelContext, boolean.class, value)); return true;
case "namingstrategy":
case "namingStrategy": target.setNamingStrategy(property(camelContext, java.lang.String.class, value)); return true;
case "path": target.setPath(property(camelContext, java.lang.String.class, value)); return true;
case "routepolicylevel":
case "routePolicyLevel": target.setRoutePolicyLevel(property(camelContext, java.lang.String.class, value)); return true;
case "skipcamelinfo":
case "skipCamelInfo": target.setSkipCamelInfo(property(camelContext, boolean.class, value)); return true;
case "textformatversion":
case "textFormatVersion": target.setTextFormatVersion(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "baseendpointuriexchangeeventnotifier":
case "baseEndpointURIExchangeEventNotifier": return boolean.class;
case "binders": return java.lang.String.class;
case "clearonreload":
case "clearOnReload": return boolean.class;
case "enableexchangeeventnotifier":
case "enableExchangeEventNotifier": return boolean.class;
case "enableinstrumentedthreadpoolfactory":
case "enableInstrumentedThreadPoolFactory": return boolean.class;
case "enablemessagehistory":
case "enableMessageHistory": return boolean.class;
case "enablerouteeventnotifier":
case "enableRouteEventNotifier": return boolean.class;
case "enableroutepolicy":
case "enableRoutePolicy": return boolean.class;
case "enabled": return boolean.class;
case "namingstrategy":
case "namingStrategy": return java.lang.String.class;
case "path": return java.lang.String.class;
case "routepolicylevel":
case "routePolicyLevel": return java.lang.String.class;
case "skipcamelinfo":
case "skipCamelInfo": return boolean.class;
case "textformatversion":
case "textFormatVersion": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.main.MetricsConfigurationProperties target = (org.apache.camel.main.MetricsConfigurationProperties) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "baseendpointuriexchangeeventnotifier":
case "baseEndpointURIExchangeEventNotifier": return target.isBaseEndpointURIExchangeEventNotifier();
case "binders": return target.getBinders();
case "clearonreload":
case "clearOnReload": return target.isClearOnReload();
case "enableexchangeeventnotifier":
case "enableExchangeEventNotifier": return target.isEnableExchangeEventNotifier();
case "enableinstrumentedthreadpoolfactory":
case "enableInstrumentedThreadPoolFactory": return target.isEnableInstrumentedThreadPoolFactory();
case "enablemessagehistory":
case "enableMessageHistory": return target.isEnableMessageHistory();
case "enablerouteeventnotifier":
case "enableRouteEventNotifier": return target.isEnableRouteEventNotifier();
case "enableroutepolicy":
case "enableRoutePolicy": return target.isEnableRoutePolicy();
case "enabled": return target.isEnabled();
case "namingstrategy":
case "namingStrategy": return target.getNamingStrategy();
case "path": return target.getPath();
case "routepolicylevel":
case "routePolicyLevel": return target.getRoutePolicyLevel();
case "skipcamelinfo":
case "skipCamelInfo": return target.isSkipCamelInfo();
case "textformatversion":
case "textFormatVersion": return target.getTextFormatVersion();
default: return null;
}
}
}
|
MetricsConfigurationPropertiesConfigurer
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.