language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__hadoop | hadoop-tools/hadoop-benchmark/src/main/java/org/apache/hadoop/benchmark/VectoredReadBenchmark.java | {
"start": 3100,
"end": 4017
} | class ____ {
@Param({"direct", "array"})
private String bufferKind;
private IntFunction<ByteBuffer> allocate;
@Setup(Level.Trial)
public void setup() {
allocate = "array".equals(bufferKind)
? ByteBuffer::allocate : ByteBuffer::allocateDirect;
}
}
@Benchmark
public void asyncRead(FileSystemChoice fsChoice,
BufferChoice bufferChoice,
Blackhole blackhole) throws Exception {
FSDataInputStream stream = fsChoice.fs.open(DATA_PATH);
List<FileRange> ranges = new ArrayList<>();
for(int m=0; m < 100; ++m) {
FileRange range = FileRange.createFileRange(m * SEEK_SIZE, READ_SIZE);
ranges.add(range);
}
stream.readVectored(ranges, bufferChoice.allocate);
for(FileRange range: ranges) {
blackhole.consume(range.getData().get());
}
stream.close();
}
static | BufferChoice |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 26755,
"end": 27113
} | class ____");
}
@Test
public void autoValueMustHaveNoArgConstructor() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoValue;",
"",
"@AutoValue",
"public abstract | Nested |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldBeInSameMinuteWindow.java | {
"start": 925,
"end": 1690
} | class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldBeInSameMinuteWindow}</code>.
*
* @param actual the actual value in the failed assertion.
* @param other the value used in the failed assertion to compare the actual value to.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBeInSameMinuteWindow(Date actual, Date other) {
return new ShouldBeInSameMinuteWindow(actual, other);
}
private ShouldBeInSameMinuteWindow(Date actual, Date other) {
super("%nExpecting actual:%n %s%nto be close to:%n %s%nby less than one minute (strictly) but difference was: "
+ formatTimeDifference(actual, other), actual, other);
}
}
| ShouldBeInSameMinuteWindow |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/Group.java | {
"start": 204,
"end": 444
} | class ____ {
private Long id;
private String name;
/**
* for persistence
*/
Group() {
}
public Group(String name) {
this.name = name;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
}
| Group |
java | quarkusio__quarkus | extensions/smallrye-fault-tolerance/deployment/src/test/java/io/quarkus/smallrye/faulttolerance/test/asynchronous/additional/BlockingNonBlockingOnMethodTest.java | {
"start": 449,
"end": 976
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(BlockingNonBlockingOnMethodService.class))
.assertException(e -> {
assertEquals(DefinitionException.class, e.getClass());
assertTrue(e.getMessage().contains("Both @Blocking and @NonBlocking present"));
});
@Test
public void test() {
fail();
}
}
| BlockingNonBlockingOnMethodTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/AnnotationPositionTest.java | {
"start": 16441,
"end": 16657
} | class ____ {
Object foo(@TypeUse final Object a) {
return null;
}
}
""")
.addOutputLines(
"Test.java",
"""
| T |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ClassPathFactoryBeanDefinitionScannerTests.java | {
"start": 3860,
"end": 4074
} | class ____ {
@Autowired @Qualifier("public")
public TestBean testBean;
@Autowired
public DependencyBean dependencyBean;
@Autowired
AbstractApplicationContext applicationContext;
}
}
| QualifiedClientBean |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ComparableTypeTest.java | {
"start": 1274,
"end": 1375
} | class ____ {
// BUG: Diagnostic contains: [ComparableType]
public static | ComparableTypePositiveCases |
java | google__dagger | javatests/dagger/internal/codegen/LazyClassKeyMapBindingComponentProcessorTest.java | {
"start": 6116,
"end": 6536
} | interface ____ {} }");
Source mapKeyBindingsModule =
CompilerTests.javaSource(
"test.MapKeyBindingsModule",
"package test;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"import dagger.multibindings.LazyClassKey;",
"import dagger.multibindings.IntoMap;",
"",
"@Module",
"public | Bar |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/request/VoyageAIRerankRequest.java | {
"start": 846,
"end": 2583
} | class ____ extends VoyageAIRequest {
private final String query;
private final List<String> input;
private final Boolean returnDocuments;
private final Integer topN;
private final VoyageAIRerankModel model;
public VoyageAIRerankRequest(
String query,
List<String> input,
@Nullable Boolean returnDocuments,
@Nullable Integer topN,
VoyageAIRerankModel model
) {
this.model = Objects.requireNonNull(model);
this.input = Objects.requireNonNull(input);
this.query = Objects.requireNonNull(query);
this.returnDocuments = returnDocuments;
this.topN = topN;
}
@Override
public HttpRequest createHttpRequest() {
HttpPost httpPost = new HttpPost(model.uri());
ByteArrayEntity byteEntity = new ByteArrayEntity(
Strings.toString(
new VoyageAIRerankRequestEntity(
query,
input,
returnDocuments,
topN,
model.getTaskSettings(),
model.getServiceSettings().modelId()
)
).getBytes(StandardCharsets.UTF_8)
);
httpPost.setEntity(byteEntity);
decorateWithHeaders(httpPost, model);
return new HttpRequest(httpPost, getInferenceEntityId());
}
@Override
public String getInferenceEntityId() {
return model.getInferenceEntityId();
}
@Override
public URI getURI() {
return model.uri();
}
@Override
public Request truncate() {
return this;
}
@Override
public boolean[] getTruncationInfo() {
return null;
}
}
| VoyageAIRerankRequest |
java | elastic__elasticsearch | x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java | {
"start": 69325,
"end": 69766
} | interface ____ extends Releasable {
/**
* Create the input stream at the specified position.
* @param relativePos the relative position in the remote storage to read from.
* @param listener listener for the input stream ready to be read from.
*/
void create(int relativePos, ActionListener<InputStream> listener) throws IOException;
}
private abstract static | SourceInputStreamFactory |
java | quarkusio__quarkus | extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/graal/MongoClientSubstitutions.java | {
"start": 2563,
"end": 3751
} | class ____ {
@Alias
private InetAddressResolver inetAddressResolver;
@Alias
private SocketSettings settings;
@Alias
private SslSettings sslSettings;
@Alias
private BufferProvider bufferProvider;
@Substitute
public Stream create(final ServerAddress serverAddress) {
Stream stream;
if (sslSettings.isEnabled()) {
stream = new SocketStream(serverAddress, inetAddressResolver, settings, sslSettings,
getSslContext().getSocketFactory(), bufferProvider);
} else {
stream = new SocketStream(serverAddress, inetAddressResolver, settings, sslSettings,
SocketFactory.getDefault(), bufferProvider);
}
return stream;
}
@Alias
private SSLContext getSslContext() {
try {
return (sslSettings.getContext() == null) ? SSLContext.getDefault() : sslSettings.getContext();
} catch (NoSuchAlgorithmException e) {
throw new MongoClientException("Unable to create default SSLContext", e);
}
}
}
@TargetClass(className = "com.mongodb.internal.connection.Compressor")
final | SocketStreamFactorySubstitution |
java | quarkusio__quarkus | extensions/mongodb-client/deployment/src/test/java/io/quarkus/mongodb/MongoNamedClientClientBuildItemConsumerTest.java | {
"start": 834,
"end": 2796
} | class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar.addClasses(MongoTestBase.class))
.withConfigurationResource("named-mongoclient.properties")
.addBuildChainCustomizer(buildCustomizer());
@Test
public void testContainerHasBeans() {
assertThat(Arc.container().instance(MongoClient.class, Default.Literal.INSTANCE).get()).isNotNull();
assertThat(Arc.container().instance(MongoClient.class, NamedLiteral.of("second")).get()).isNotNull();
assertThat(Arc.container().instance(ReactiveMongoClient.class, Default.Literal.INSTANCE).get()).isNotNull();
assertThat(Arc.container().instance(ReactiveMongoClient.class, NamedLiteral.of("secondreactive")).get()).isNotNull();
}
protected static Consumer<BuildChainBuilder> buildCustomizer() {
return new Consumer<BuildChainBuilder>() {
// This represents the extension.
@Override
public void accept(BuildChainBuilder builder) {
builder.addBuildStep(context -> {
ApplicationArchivesBuildItem archive = context.consume(ApplicationArchivesBuildItem.class);
context.produce(Collections.singletonList(new MongoClientNameBuildItem("second")));
}).consumes(ApplicationArchivesBuildItem.class)
.produces(MongoClientNameBuildItem.class)
.build();
builder.addBuildStep(context -> {
List<MongoClientBuildItem> mongoClientBuildItems = context.consumeMulti(MongoClientBuildItem.class);
context.produce(new FeatureBuildItem("dummy"));
}).consumes(MongoClientBuildItem.class)
.produces(FeatureBuildItem.class)
.build();
}
};
}
}
| MongoNamedClientClientBuildItemConsumerTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/io/support/SpringFactoriesLoaderTests.java | {
"start": 7798,
"end": 9627
} | class ____ {
@Test
void throwingReturnsHandlerThatThrowsIllegalArgumentException() {
FailureHandler handler = FailureHandler.throwing();
RuntimeException cause = new RuntimeException();
assertThatIllegalArgumentException().isThrownBy(() -> handler.handleFailure(
DummyFactory.class, MyDummyFactory1.class.getName(),
cause)).withMessageStartingWith("Unable to instantiate factory class").withCause(cause);
}
@Test
void throwingWithFactoryReturnsHandlerThatThrows() {
FailureHandler handler = FailureHandler.throwing(IllegalStateException::new);
RuntimeException cause = new RuntimeException();
assertThatIllegalStateException().isThrownBy(() -> handler.handleFailure(
DummyFactory.class, MyDummyFactory1.class.getName(),
cause)).withMessageStartingWith("Unable to instantiate factory class").withCause(cause);
}
@Test
void loggingReturnsHandlerThatLogs() {
Log logger = mock();
FailureHandler handler = FailureHandler.logging(logger);
RuntimeException cause = new RuntimeException();
handler.handleFailure(DummyFactory.class, MyDummyFactory1.class.getName(), cause);
verify(logger).trace(isA(LogMessage.class), eq(cause));
}
@Test
void handleMessageReturnsHandlerThatAcceptsMessage() {
List<Throwable> failures = new ArrayList<>();
List<String> messages = new ArrayList<>();
FailureHandler handler = FailureHandler.handleMessage((message, failure) -> {
failures.add(failure);
messages.add(message.get());
});
RuntimeException cause = new RuntimeException();
handler.handleFailure(DummyFactory.class, MyDummyFactory1.class.getName(), cause);
assertThat(failures).containsExactly(cause);
assertThat(messages).singleElement().asString().startsWith("Unable to instantiate factory class");
}
}
@Nested
| FailureHandlerTests |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/ssl/SslBundleRegistrar.java | {
"start": 984,
"end": 1239
} | interface ____ {
/**
* Callback method for registering {@link SslBundle}s with an
* {@link SslBundleRegistry}.
* @param registry the registry that accepts {@code SslBundle}s
*/
void registerBundles(SslBundleRegistry registry);
}
| SslBundleRegistrar |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/PromqlBaseParser.java | {
"start": 66019,
"end": 67617
} | class ____ extends ParserRuleContext {
public ExpressionContext expression() {
return getRuleContext(ExpressionContext.class,0);
}
@SuppressWarnings("this-escape")
public DurationContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_duration; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof PromqlBaseParserListener ) ((PromqlBaseParserListener)listener).enterDuration(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof PromqlBaseParserListener ) ((PromqlBaseParserListener)listener).exitDuration(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof PromqlBaseParserVisitor ) return ((PromqlBaseParserVisitor<? extends T>)visitor).visitDuration(this);
else return visitor.visitChildren(this);
}
}
public final DurationContext duration() throws RecognitionException {
DurationContext _localctx = new DurationContext(_ctx, getState());
enterRule(_localctx, 34, RULE_duration);
try {
enterOuterAlt(_localctx, 1);
{
setState(249);
expression(0);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static | DurationContext |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/integration/spring/bind/MyBean.java | {
"start": 1073,
"end": 1657
} | class ____ {
private Map<?, ?> headers;
private String body;
@EndpointInject("mock:result")
private ProducerTemplate producer;
@Consume("activemq:Test.BindingQueue")
public void myMethod(@Headers Map<?, ?> headers, String body) {
// defensive copy of headers
this.headers = new HashMap<>(headers);
this.body = body;
// now lets notify we've completed
producer.sendBody("Completed");
}
public String getBody() {
return body;
}
public Map<?, ?> getHeaders() {
return headers;
}
}
| MyBean |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/DeprecatedQueueConfigurationParser.java | {
"start": 1397,
"end": 5125
} | class ____ extends QueueConfigurationParser {
private static final Logger LOG =
LoggerFactory.getLogger(DeprecatedQueueConfigurationParser.class);
static final String MAPRED_QUEUE_NAMES_KEY = "mapred.queue.names";
DeprecatedQueueConfigurationParser(Configuration conf) {
//If not configuration done return immediately.
if(!deprecatedConf(conf)) {
return;
}
List<Queue> listq = createQueues(conf);
this.setAclsEnabled(conf.getBoolean(MRConfig.MR_ACLS_ENABLED, false));
root = new Queue();
root.setName("");
for (Queue q : listq) {
root.addChild(q);
}
}
private List<Queue> createQueues(Configuration conf) {
String[] queueNameValues = conf.getStrings(
MAPRED_QUEUE_NAMES_KEY);
List<Queue> list = new ArrayList<Queue>();
for (String name : queueNameValues) {
try {
Map<String, AccessControlList> acls = getQueueAcls(
name, conf);
QueueState state = getQueueState(name, conf);
Queue q = new Queue(name, acls, state);
list.add(q);
} catch (Throwable t) {
LOG.warn("Not able to initialize queue " + name);
}
}
return list;
}
/**
* Only applicable to leaf level queues
* Parse ACLs for the queue from the configuration.
*/
private QueueState getQueueState(String name, Configuration conf) {
String stateVal = conf.get(
toFullPropertyName(name, "state"),
QueueState.RUNNING.getStateName());
return QueueState.getState(stateVal);
}
/**
* Check if queue properties are configured in the passed in
* configuration. If yes, print out deprecation warning messages.
*/
private boolean deprecatedConf(Configuration conf) {
String[] queues = null;
String queueNameValues = getQueueNames(conf);
if (queueNameValues == null) {
return false;
} else {
LOG.warn(
"Configuring \"" + MAPRED_QUEUE_NAMES_KEY
+ "\" in mapred-site.xml or "
+ "hadoop-site.xml is deprecated and will overshadow "
+ QUEUE_CONF_FILE_NAME + ". Remove this property and configure "
+ "queue hierarchy in " + QUEUE_CONF_FILE_NAME);
// store queues so we can check if ACLs are also configured
// in the deprecated files.
queues = conf.getStrings(MAPRED_QUEUE_NAMES_KEY);
}
// check if acls are defined
if (queues != null) {
for (String queue : queues) {
for (QueueACL qAcl : QueueACL.values()) {
String key = toFullPropertyName(queue, qAcl.getAclName());
String aclString = conf.get(key);
if (aclString != null) {
LOG.warn(
"Configuring queue ACLs in mapred-site.xml or " +
"hadoop-site.xml is deprecated. Configure queue ACLs in " +
QUEUE_CONF_FILE_NAME);
// even if one string is configured, it is enough for printing
// the warning. so we can return from here.
return true;
}
}
}
}
return true;
}
private String getQueueNames(Configuration conf) {
String queueNameValues = conf.get(MAPRED_QUEUE_NAMES_KEY);
return queueNameValues;
}
/**
* Parse ACLs for the queue from the configuration.
*/
private Map<String, AccessControlList> getQueueAcls(
String name,
Configuration conf) {
HashMap<String, AccessControlList> map =
new HashMap<String, AccessControlList>();
for (QueueACL qAcl : QueueACL.values()) {
String aclKey = toFullPropertyName(name, qAcl.getAclName());
map.put(
aclKey, new AccessControlList(
conf.get(
aclKey, "*")));
}
return map;
}
}
| DeprecatedQueueConfigurationParser |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/jackson/Initializers.java | {
"start": 1857,
"end": 3468
} | class ____ {
void setupModule(
final SetupContext context, final boolean includeStacktrace, final boolean stacktraceAsString) {
// JRE classes: we cannot edit those with Jackson annotations
context.setMixInAnnotations(StackTraceElement.class, StackTraceElementMixIn.class);
// Log4j API classes: we do not want to edit those with Jackson annotations because the API module should
// not depend on Jackson.
context.setMixInAnnotations(Marker.class, MarkerMixIn.class);
context.setMixInAnnotations(Level.class, LevelMixIn.class);
context.setMixInAnnotations(Instant.class, InstantMixIn.class);
context.setMixInAnnotations(LogEvent.class, LogEventMixIn.class);
// Log4j Core classes: we do not want to bring in Jackson at runtime if we do not have to.
context.setMixInAnnotations(ExtendedStackTraceElement.class, ExtendedStackTraceElementMixIn.class);
context.setMixInAnnotations(
ThrowableProxy.class,
includeStacktrace
? (stacktraceAsString
? ThrowableProxyWithStacktraceAsStringMixIn.class
: ThrowableProxyMixIn.class)
: ThrowableProxyWithoutStacktraceMixIn.class);
}
}
/**
* Used to set up {@link SetupContext} from different {@link SimpleModule}s.
* <p>
* Serializes the context map as list of objects.
* </p>
*/
static | AbstractInitializer |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/PlannerCallProcedureOperation.java | {
"start": 15202,
"end": 19257
} | class ____ implements ResultProvider {
private final DataStructureConverter<Object, Object> converter;
private final RowDataToStringConverter toStringConverter;
// a converter to convert internal RowData to Row
private final @Nullable RowRowConverter rowConverter;
private final Object[] result;
public CallProcedureResultProvider(
DataStructureConverter<Object, Object> converter,
RowDataToStringConverter toStringConverter,
@Nullable RowRowConverter rowConverter,
Object result) {
this.converter = converter;
this.toStringConverter = toStringConverter;
this.result = toResultArray(result);
this.rowConverter = rowConverter;
}
@Override
public ResultProvider setJobClient(JobClient jobClient) {
return this;
}
@Override
public CloseableIterator<RowData> toInternalIterator() {
Iterator<Object> objectIterator = Arrays.stream(result).iterator();
return new CloseableIterator<RowData>() {
@Override
public boolean hasNext() {
return objectIterator.hasNext();
}
@Override
public RowData next() {
return toRowData(objectIterator.next());
}
@Override
public void close() {}
};
}
private RowData toRowData(Object externalValue) {
Object element = converter.toInternalOrNull(externalValue);
if (!(element instanceof RowData)) {
return GenericRowData.of(element);
}
return (RowData) element;
}
@Override
public CloseableIterator<Row> toExternalIterator() {
Iterator<Object> objectIterator = Arrays.stream(result).iterator();
return new CloseableIterator<Row>() {
@Override
public boolean hasNext() {
return objectIterator.hasNext();
}
@Override
public Row next() {
Object element = objectIterator.next();
if (!(element instanceof Row)) {
if (rowConverter != null) {
// first convert the extern value to internal RowData,
// then convert the RowData to Row
return rowConverter.toExternal(toRowData(element));
} else {
return Row.of(element);
}
}
return (Row) element;
}
@Override
public void close() {}
};
}
@Override
public RowDataToStringConverter getRowDataStringConverter() {
return toStringConverter;
}
@Override
public boolean isFirstRowReady() {
// always return true
return true;
}
private Object[] toResultArray(Object result) {
// the result may be primitive array,
// convert it to primitive wrapper array
if (isPrimitiveArray(result)) {
return toPrimitiveWrapperArray(result);
}
return (Object[]) result;
}
private boolean isPrimitiveArray(Object result) {
return result.getClass().isArray()
&& result.getClass().getComponentType().isPrimitive();
}
private Object[] toPrimitiveWrapperArray(Object primitiveArray) {
int length = Array.getLength(primitiveArray);
Object[] objArray = new Object[length];
for (int i = 0; i < length; i++) {
objArray[i] = Array.get(primitiveArray, i);
}
return objArray;
}
}
}
| CallProcedureResultProvider |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthSerializationTests.java | {
"start": 539,
"end": 769
} | class ____ extends AbstractUnaryScalarSerializationTests<ByteLength> {
@Override
protected ByteLength create(Source source, Expression child) {
return new ByteLength(source, child);
}
}
| ByteLengthSerializationTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/AndLobTest.java | {
"start": 1144,
"end": 1910
} | class ____ {
@Test
public void testMappingAttributeWithLobAndAttributeConverter(ServiceRegistryScope scope) {
final Metadata metadata = new MetadataSources( scope.getRegistry() )
.addAnnotatedClass( EntityImpl.class )
.buildMetadata();
final Type type = metadata.getEntityBinding( EntityImpl.class.getName() ).getProperty( "status" ).getType();
final ConvertedBasicTypeImpl typeAdapter = assertTyping( ConvertedBasicTypeImpl.class, type );
assertThat( typeAdapter.getJavaTypeDescriptor().getJavaTypeClass(), equalTo( String.class ) );
assertThat( typeAdapter.getJdbcJavaType().getJavaTypeClass(), equalTo( Integer.class ) );
assertThat( typeAdapter.getJdbcType().getJdbcTypeCode(), is( Types.INTEGER ) );
}
@Converter
public static | AndLobTest |
java | apache__camel | core/camel-base/src/main/java/org/apache/camel/impl/converter/CoreTypeConverterRegistry.java | {
"start": 2726,
"end": 5598
} | enum ____ for optional performance
protected final TypeConverter enumTypeConverter = new EnumTypeConverter();
private final ConverterStatistics statistics;
protected TypeConverterExists typeConverterExists = TypeConverterExists.Ignore;
protected LoggingLevel typeConverterExistsLoggingLevel = LoggingLevel.DEBUG;
// Why 256: as of Camel 4, we have about 230 type converters. Therefore, set the capacity to a few more to provide
// space for others added during runtime
private final Map<TypeConvertible<?, ?>, TypeConverter> converters = new ConcurrentHashMap<>(256);
protected CoreTypeConverterRegistry(boolean statisticsEnabled) {
if (statisticsEnabled) {
statistics = new TypeConverterStatistics();
} else {
statistics = new NoopTypeConverterStatistics();
}
}
@Override
public boolean allowNull() {
return false;
}
@Override
public void setInjector(Injector injector) {
throw new UnsupportedOperationException();
}
@Override
public Injector getInjector() {
throw new UnsupportedOperationException();
}
public <T> T convertTo(Class<T> type, Object value) {
return convertTo(type, null, value);
}
@SuppressWarnings("unchecked")
private <T> T fastConvertTo(Class<T> type, Exchange exchange, Object value) {
if (value == null) {
return null;
}
if (type.equals(value.getClass())) {
// same instance
return (T) value;
}
if (type == boolean.class) {
// primitive boolean which must return a value so throw exception if not possible
Object answer = ObjectConverter.toBoolean(value);
requireNonNullBoolean(type, value, answer);
return (T) answer;
} else if (type == Boolean.class && value instanceof String str) {
// String -> Boolean
Boolean parsedBoolean = customParseBoolean(str);
if (parsedBoolean != null) {
return (T) parsedBoolean;
}
} else if (type.isPrimitive()) {
// okay its a wrapper -> primitive then return as-is for some common types
Class<?> cls = value.getClass();
if (cls == Integer.class || cls == Long.class) {
return (T) value;
}
} else if (type == String.class) {
// okay its a primitive -> string then return as-is for some common types
Class<?> cls = value.getClass();
if (cls.isPrimitive()
|| cls == Boolean.class
|| cls == Integer.class
|| cls == Long.class) {
return (T) value.toString();
}
} else if (type.isEnum()) {
// okay its a conversion to | converter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/InjectOnMemberAndConstructorTest.java | {
"start": 3728,
"end": 3948
} | class ____ {
@Inject private String stringFieldWithInject;
@Inject
public MixedInject() {}
}
}\
""")
.doTest();
}
}
| MixedInject |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/test/java/org/springframework/security/oauth2/server/authorization/JdbcOAuth2AuthorizationServiceTests.java | {
"start": 24633,
"end": 28243
} | class ____ extends JdbcOAuth2AuthorizationService {
// @formatter:off
private static final String COLUMN_NAMES = "id, "
+ "registeredClientId, "
+ "principalName, "
+ "authorizationGrantType, "
+ "authorizedScopes, "
+ "attributes, "
+ "state, "
+ "authorizationCodeValue, "
+ "authorizationCodeIssuedAt, "
+ "authorizationCodeExpiresAt,"
+ "authorizationCodeMetadata,"
+ "accessTokenValue,"
+ "accessTokenIssuedAt,"
+ "accessTokenExpiresAt,"
+ "accessTokenMetadata,"
+ "accessTokenType,"
+ "accessTokenScopes,"
+ "oidcIdTokenValue,"
+ "oidcIdTokenIssuedAt,"
+ "oidcIdTokenExpiresAt,"
+ "oidcIdTokenMetadata,"
+ "refreshTokenValue,"
+ "refreshTokenIssuedAt,"
+ "refreshTokenExpiresAt,"
+ "refreshTokenMetadata,"
+ "userCodeValue,"
+ "userCodeIssuedAt,"
+ "userCodeExpiresAt,"
+ "userCodeMetadata,"
+ "deviceCodeValue,"
+ "deviceCodeIssuedAt,"
+ "deviceCodeExpiresAt,"
+ "deviceCodeMetadata";
// @formatter:on
private static final String TABLE_NAME = "oauth2Authorization";
private static final String PK_FILTER = "id = ?";
private static final String UNKNOWN_TOKEN_TYPE_FILTER = "state = ? OR authorizationCodeValue = ? OR "
+ "accessTokenValue = ? OR oidcIdTokenValue = ? OR refreshTokenValue = ? OR userCodeValue = ? OR "
+ "deviceCodeValue = ?";
// @formatter:off
private static final String LOAD_AUTHORIZATION_SQL = "SELECT " + COLUMN_NAMES
+ " FROM " + TABLE_NAME
+ " WHERE ";
// @formatter:on
// @formatter:off
private static final String SAVE_AUTHORIZATION_SQL = "INSERT INTO " + TABLE_NAME
+ " (" + COLUMN_NAMES + ") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
// @formatter:on
private static final String REMOVE_AUTHORIZATION_SQL = "DELETE FROM " + TABLE_NAME + " WHERE " + PK_FILTER;
private CustomJdbcOAuth2AuthorizationService(JdbcOperations jdbcOperations,
RegisteredClientRepository registeredClientRepository) {
super(jdbcOperations, registeredClientRepository);
setAuthorizationRowMapper(new CustomOAuth2AuthorizationRowMapper(registeredClientRepository));
setAuthorizationParametersMapper(new CustomOAuth2AuthorizationParametersMapper());
}
@Override
public void save(OAuth2Authorization authorization) {
List<SqlParameterValue> parameters = getAuthorizationParametersMapper().apply(authorization);
PreparedStatementSetter pss = new ArgumentPreparedStatementSetter(parameters.toArray());
getJdbcOperations().update(SAVE_AUTHORIZATION_SQL, pss);
}
@Override
public void remove(OAuth2Authorization authorization) {
SqlParameterValue[] parameters = new SqlParameterValue[] {
new SqlParameterValue(Types.VARCHAR, authorization.getId()) };
PreparedStatementSetter pss = new ArgumentPreparedStatementSetter(parameters);
getJdbcOperations().update(REMOVE_AUTHORIZATION_SQL, pss);
}
@Override
public OAuth2Authorization findById(String id) {
return findBy(PK_FILTER, id);
}
@Override
public OAuth2Authorization findByToken(String token, OAuth2TokenType tokenType) {
return findBy(UNKNOWN_TOKEN_TYPE_FILTER, token, token, token, token, token, token, token);
}
private OAuth2Authorization findBy(String filter, Object... args) {
List<OAuth2Authorization> result = getJdbcOperations().query(LOAD_AUTHORIZATION_SQL + filter,
getAuthorizationRowMapper(), args);
return !result.isEmpty() ? result.get(0) : null;
}
private static final | CustomJdbcOAuth2AuthorizationService |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/testutils/LogAggregationTestcase.java | {
"start": 2830,
"end": 17631
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(LogAggregationTestcase.class);
private final Configuration conf;
private final long now;
private PathWithFileStatus bucketDir;
private final long bucketDirModTime;
private PathWithFileStatus userDir;
private final String userDirName;
private final long userDirModTime;
private PathWithFileStatus suffixDir;
private final String suffix;
private final String suffixDirName;
private final long suffixDirModTime;
private final String bucketId;
private final Path remoteRootLogPath;
private final Map<Integer, Exception> injectedAppDirDeletionExceptions;
private final List<String> fileControllers;
private final List<Pair<String, Long>> additionalAppDirs;
private final List<ApplicationId> applicationIds = new ArrayList<>();
private final int[] runningAppIds;
private final int[] finishedAppIds;
private final List<List<PathWithFileStatus>> appFiles = new ArrayList<>();
private final FileSystem mockFs;
private List<PathWithFileStatus> appDirs;
private final List<AppDescriptor> appDescriptors;
private AggregatedLogDeletionServiceForTest deletionService;
private ApplicationClientProtocol rmClient;
public LogAggregationTestcase(LogAggregationTestcaseBuilder builder) throws IOException {
conf = builder.conf;
now = builder.now;
bucketDir = builder.bucketDir;
bucketDirModTime = builder.bucketDirModTime;
userDir = builder.userDir;
userDirName = builder.userDirName;
userDirModTime = builder.userDirModTime;
suffix = builder.suffix;
suffixDir = builder.suffixDir;
suffixDirName = builder.suffixDirName;
suffixDirModTime = builder.suffixDirModTime;
bucketId = builder.bucketId;
appDescriptors = builder.apps;
runningAppIds = builder.runningAppIds;
finishedAppIds = builder.finishedAppIds;
remoteRootLogPath = builder.remoteRootLogPath;
injectedAppDirDeletionExceptions = builder.injectedAppDirDeletionExceptions;
fileControllers = builder.fileControllers;
additionalAppDirs = builder.additionalAppDirs;
mockFs = ((FilterFileSystem) builder.rootFs).getRawFileSystem();
validateAppControllers();
setupMocks();
setupDeletionService();
}
private void validateAppControllers() {
Set<String> controllers = appDescriptors.stream()
.map(a -> a.fileController)
.filter(Objects::nonNull)
.collect(Collectors.toSet());
Set<String> availableControllers = fileControllers != null ?
new HashSet<>(this.fileControllers) : Sets.newHashSet();
Set<String> difference = Sets.difference(controllers, availableControllers);
if (!difference.isEmpty()) {
throw new IllegalStateException(String.format("Invalid controller defined!" +
" Available: %s, Actual: %s", availableControllers, controllers));
}
}
private void setupMocks() throws IOException {
createApplicationsByDescriptors();
List<Path> rootPaths = determineRootPaths();
for (Path rootPath : rootPaths) {
String controllerName = rootPath.getName();
ApplicationId arbitraryAppIdForBucketDir = this.applicationIds.get(0);
userDir = createDirLogPathWithFileStatus(rootPath, userDirName, userDirModTime);
suffixDir = createDirLogPathWithFileStatus(userDir.path, suffixDirName, suffixDirModTime);
if (bucketId != null) {
bucketDir = createDirLogPathWithFileStatus(suffixDir.path, bucketId, bucketDirModTime);
} else {
bucketDir = createDirBucketDirLogPathWithFileStatus(rootPath, userDirName, suffix,
arbitraryAppIdForBucketDir, bucketDirModTime);
}
setupListStatusForPath(rootPath, userDir);
initFileSystemListings(controllerName);
}
}
private List<Path> determineRootPaths() {
List<Path> rootPaths = new ArrayList<>();
if (fileControllers != null && !fileControllers.isEmpty()) {
for (String fileController : fileControllers) {
//Generic path: <remote-app-log-dir>/<user>/bucket-<suffix>/<bucket id>/
// <application id>/<NodeManager id>
//remoteRootLogPath: <remote-app-log-dir>/
//example: mockfs://foo/tmp/logs/
//userDir: <remote-app-log-dir>/<user>/
//example: mockfs://foo/tmp/logs/me/
//suffixDir: <remote-app-log-dir>/<user>/bucket-<suffix>/
//example: mockfs://foo/tmp/logs/me/bucket-logs/
//bucketDir: <remote-app-log-dir>/<user>/bucket-<suffix>/<bucket id>/
//example: mockfs://foo/tmp/logs/me/bucket-logs/0001/
//remoteRootLogPath with controller: <remote-app-log-dir>/<controllerName>
//example: mockfs://foo/tmp/logs/IFile
rootPaths.add(new Path(remoteRootLogPath, fileController));
}
} else {
rootPaths.add(remoteRootLogPath);
}
return rootPaths;
}
private void initFileSystemListings(String controllerName) throws IOException {
setupListStatusForPath(userDir, suffixDir);
setupListStatusForPath(suffixDir, bucketDir);
setupListStatusForPath(bucketDir, appDirs.stream()
.filter(app -> app.path.toString().contains(controllerName))
.map(app -> app.fileStatus)
.toArray(FileStatus[]::new));
for (Pair<String, Long> appDirPair : additionalAppDirs) {
PathWithFileStatus appDir = createDirLogPathWithFileStatus(bucketDir.path,
appDirPair.getLeft(), appDirPair.getRight());
setupListStatusForPath(appDir, new FileStatus[] {});
}
}
private void createApplicationsByDescriptors() throws IOException {
int len = appDescriptors.size();
appDirs = new ArrayList<>(len);
for (int i = 0; i < len; i++) {
AppDescriptor appDesc = appDescriptors.get(i);
ApplicationId applicationId = appDesc.createApplicationId(now, i + 1);
applicationIds.add(applicationId);
Path basePath = this.remoteRootLogPath;
if (appDesc.fileController != null) {
basePath = new Path(basePath, appDesc.fileController);
}
PathWithFileStatus appDir = createPathWithFileStatusForAppId(
basePath, applicationId, userDirName, suffix, appDesc.modTimeOfAppDir);
LOG.debug("Created application with ID '{}' to path '{}'", applicationId, appDir.path);
appDirs.add(appDir);
addAppChildrenFiles(appDesc, appDir);
}
setupFsMocksForAppsAndChildrenFiles();
for (Map.Entry<Integer, Exception> e : injectedAppDirDeletionExceptions.entrySet()) {
when(mockFs.delete(this.appDirs.get(e.getKey()).path, true)).thenThrow(e.getValue());
}
}
private void setupFsMocksForAppsAndChildrenFiles() throws IOException {
for (int i = 0; i < appDirs.size(); i++) {
List<PathWithFileStatus> appChildren = appFiles.get(i);
Path appPath = appDirs.get(i).path;
setupListStatusForPath(appPath,
appChildren.stream()
.map(child -> child.fileStatus)
.toArray(FileStatus[]::new));
}
}
private void setupListStatusForPath(Path dir, PathWithFileStatus pathWithFileStatus)
throws IOException {
setupListStatusForPath(dir, new FileStatus[]{pathWithFileStatus.fileStatus});
}
private void setupListStatusForPath(PathWithFileStatus dir, PathWithFileStatus pathWithFileStatus)
throws IOException {
setupListStatusForPath(dir, new FileStatus[]{pathWithFileStatus.fileStatus});
}
private void setupListStatusForPath(Path dir, FileStatus[] fileStatuses) throws IOException {
LOG.debug("Setting up listStatus. Parent: {}, files: {}", dir, fileStatuses);
when(mockFs.listStatus(dir)).thenReturn(fileStatuses);
}
private void setupListStatusForPath(PathWithFileStatus dir, FileStatus[] fileStatuses)
throws IOException {
LOG.debug("Setting up listStatus. Parent: {}, files: {}", dir.path, fileStatuses);
when(mockFs.listStatus(dir.path)).thenReturn(fileStatuses);
}
private void setupDeletionService() {
List<ApplicationId> finishedApps = createFinishedAppsList();
List<ApplicationId> runningApps = createRunningAppsList();
deletionService = new AggregatedLogDeletionServiceForTest(runningApps, finishedApps, conf);
}
public LogAggregationTestcase startDeletionService() {
deletionService.init(conf);
deletionService.start();
return this;
}
private List<ApplicationId> createRunningAppsList() {
List<ApplicationId> runningApps = new ArrayList<>();
for (int i : runningAppIds) {
ApplicationId appId = this.applicationIds.get(i - 1);
runningApps.add(appId);
}
return runningApps;
}
private List<ApplicationId> createFinishedAppsList() {
List<ApplicationId> finishedApps = new ArrayList<>();
for (int i : finishedAppIds) {
ApplicationId appId = this.applicationIds.get(i - 1);
finishedApps.add(appId);
}
return finishedApps;
}
public LogAggregationTestcase runDeletionTask(long retentionSeconds) throws Exception {
List<ApplicationId> finishedApps = createFinishedAppsList();
List<ApplicationId> runningApps = createRunningAppsList();
rmClient = createMockRMClient(finishedApps, runningApps);
List<LogDeletionTask> tasks = deletionService.createLogDeletionTasks(conf, retentionSeconds,
rmClient);
for (LogDeletionTask deletionTask : tasks) {
deletionTask.run();
}
return this;
}
private void addAppChildrenFiles(AppDescriptor appDesc, PathWithFileStatus appDir) {
List<PathWithFileStatus> appChildren = new ArrayList<>();
for (Pair<String, Long> fileWithModDate : appDesc.filesWithModDate) {
PathWithFileStatus appChildFile = createFileLogPathWithFileStatus(appDir.path,
fileWithModDate.getLeft(),
fileWithModDate.getRight());
appChildren.add(appChildFile);
}
this.appFiles.add(appChildren);
}
public LogAggregationTestcase verifyAppDirsDeleted(long timeout, int... ids) throws IOException {
for (int id : ids) {
verifyAppDirDeleted(id, timeout);
}
return this;
}
public LogAggregationTestcase verifyAppDirsNotDeleted(long timeout, int... ids)
throws IOException {
for (int id : ids) {
verifyAppDirNotDeleted(id, timeout);
}
return this;
}
public LogAggregationTestcase verifyAppDirDeleted(int id, long timeout) throws IOException {
verifyAppDirDeletion(id, 1, timeout);
return this;
}
public LogAggregationTestcase verifyAppDirNotDeleted(int id, long timeout) throws IOException {
verifyAppDirDeletion(id, 0, timeout);
return this;
}
public LogAggregationTestcase verifyAppFilesDeleted(long timeout,
List<Pair<Integer, Integer>> pairs)
throws IOException {
for (Pair<Integer, Integer> pair : pairs) {
verifyAppFileDeleted(pair.getLeft(), pair.getRight(), timeout);
}
return this;
}
public LogAggregationTestcase verifyAppFilesNotDeleted(long timeout,
List<Pair<Integer, Integer>> pairs)
throws IOException {
for (Pair<Integer, Integer> pair : pairs) {
verifyAppFileNotDeleted(pair.getLeft(), pair.getRight(), timeout);
}
return this;
}
public LogAggregationTestcase verifyAppFileDeleted(int id, int fileNo, long timeout)
throws IOException {
verifyAppFileDeletion(id, fileNo, 1, timeout);
return this;
}
public LogAggregationTestcase verifyAppFileNotDeleted(int id, int fileNo, long timeout)
throws IOException {
verifyAppFileDeletion(id, fileNo, 0, timeout);
return this;
}
private void verifyAppDirDeletion(int id, int times, long timeout) throws IOException {
if (timeout == NO_TIMEOUT) {
verify(mockFs, times(times)).delete(this.appDirs.get(id - 1).path, true);
} else {
verify(mockFs, timeout(timeout).times(times)).delete(this.appDirs.get(id - 1).path, true);
}
}
private void verifyAppFileDeletion(int appId, int fileNo, int times, long timeout)
throws IOException {
List<PathWithFileStatus> childrenFiles = this.appFiles.get(appId - 1);
PathWithFileStatus file = childrenFiles.get(fileNo - 1);
verify(mockFs, timeout(timeout).times(times)).delete(file.path, true);
}
private void verifyMockRmClientWasClosedNTimes(int expectedRmClientCloses)
throws IOException {
ApplicationClientProtocol mockRMClient;
if (deletionService != null) {
mockRMClient = deletionService.getMockRMClient();
} else {
mockRMClient = rmClient;
}
verify((Closeable)mockRMClient, times(expectedRmClientCloses)).close();
}
public void teardown(int expectedRmClientCloses) throws IOException {
deletionService.stop();
verifyMockRmClientWasClosedNTimes(expectedRmClientCloses);
}
public LogAggregationTestcase refreshLogRetentionSettings() throws IOException {
deletionService.refreshLogRetentionSettings();
return this;
}
public AggregatedLogDeletionService getDeletionService() {
return deletionService;
}
public LogAggregationTestcase verifyCheckIntervalMilliSecondsEqualTo(
int checkIntervalMilliSeconds) {
assertEquals(checkIntervalMilliSeconds, deletionService.getCheckIntervalMsecs());
return this;
}
public LogAggregationTestcase verifyCheckIntervalMilliSecondsNotEqualTo(
int checkIntervalMilliSeconds) {
assertTrue(checkIntervalMilliSeconds != deletionService.getCheckIntervalMsecs());
return this;
}
public LogAggregationTestcase verifyAnyPathListedAtLeast(int atLeast, long timeout)
throws IOException {
verify(mockFs, timeout(timeout).atLeast(atLeast)).listStatus(any(Path.class));
return this;
}
public LogAggregationTestcase changeModTimeOfApp(int appId, long modTime) {
PathWithFileStatus appDir = appDirs.get(appId - 1);
appDir.changeModificationTime(modTime);
return this;
}
public LogAggregationTestcase changeModTimeOfAppLogDir(int appId, int fileNo, long modTime) {
List<PathWithFileStatus> childrenFiles = this.appFiles.get(appId - 1);
PathWithFileStatus file = childrenFiles.get(fileNo - 1);
file.changeModificationTime(modTime);
return this;
}
public LogAggregationTestcase changeModTimeOfBucketDir(long modTime) {
bucketDir.changeModificationTime(modTime);
return this;
}
public LogAggregationTestcase reinitAllPaths() throws IOException {
List<Path> rootPaths = determineRootPaths();
for (Path rootPath : rootPaths) {
String controllerName = rootPath.getName();
initFileSystemListings(controllerName);
}
setupFsMocksForAppsAndChildrenFiles();
return this;
}
}
| LogAggregationTestcase |
java | google__auto | value/src/test/java/com/google/auto/value/extension/memoized/MemoizedTest.java | {
"start": 1626,
"end": 2011
} | class ____ {
abstract boolean getNative();
abstract boolean getNative0();
abstract String getNotKeyword();
@Memoized
boolean getMemoizedNative() {
return getNative();
}
@Memoized
boolean getMemoizedNative0() {
return getNative0();
}
}
@AutoValue
@CopyAnnotations
@javax.annotation.Nullable
abstract static | ValueWithKeywordName |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/TimeSeriesCounter.java | {
"start": 5513,
"end": 23704
} | class ____ {
/*
* In the following diagrams, we take a duration of 100 and resolution of 20.
*
* |___________________________________________________________|
* duration = 100
*
* |___________|___________|___________|___________|___________|
* buckets = 5
*
* |___________|
* resolution = 20
*
* Action: inc(235) - Increment the counter at time 235 seconds.
*
* While there is only one `buckets` array, it's useful to view the array as overlapping three
* epoch (time at bucket[0]), the last epoch, the present epoch and the future epoch.
*
* Past
* [_] [_] [2] [3] [4]
* |___________|___________|___________|___________|___________|
* 140[e]-> 160-> 180-> 199
*
* Present
* [0] [1][b] [2][g] [3] [4]
* |___________|_____1_____|___________|___________|___________|
* 200[a]-> 220-> 240[d]-> 260-> 280-> 299
*
* Future
* [0] [_] [_] [_] [_]
* |___________|___________|___________|___________|___________|
* 300[c]-> 320[f]
*
* [a] Beginning of the current epoch
* startOfCurrentEpoch = 200 = (t / duration) * duration = (235 / 100) * 100
* Start time of bucket zero, this is used to anchor the bucket ring in time. Without `startOfCurrentEpoch`,
* it would be impossible to distinguish between two times that are `duration` away from each other.
* In this example, the first inc used time 235, since startOfCurrentEpoch is rounded down to the nearest
* duration (100), it is 200.
*
* [b] The current bucket
* curBucket = 1 = (t / resolution) % buckets.length = (235 / 20) % 5
* The latest active bucket in the bucket ring. The bucket of a timestamp is determined by the `resolution`.
* In this case the `resolution` is 20, so each bucket covers 20 seconds, the first covers 200-219, the
* second covers 220->239, the third 240->259 and so on. 235 is in the second bucket, at index 1.
*
* [c] Beginning of the next epoch
* nextEpoch() = 300 = startOfCurrentEpoch + duration = 200 + 100
* The first time of the next epoch, this indicates when `startOfCurrentEpoch` should be updated. When `curBucket`
* advances to or past zero, `startOfCurrentEpoch` must be updated to `nextEpoch()`
*
* [d] Beginning of the next bucket
* nextBucketStartTime() = 240 = startOfCurrentEpoch + ((curBucket + 1) * resolution) = 200 + ((1 + 1) * 20
* The first time of the next bucket, when a timestamp is greater than or equal to this time, we must update
* the `curBucket` and potentially the `startOfCurrentEpoch`.
*
* [e] The earliest time to sum
* earliestTimeInCounter() = 140 = nextBucketStartTime() - duration = 240 - 100
* `curBucket` covers the latest timestamp seen by the `Counter`. Since the counter keeps a history, when a
* caller calls `sum(t)`, the `Counter` must clamp the range to the earliest time covered by its current state.
* The times proceed backwards for `buckets.length - 1`.
* **Important** this is likely _before_ the `startOfCurrentEpoch`. `startOfCurrentEpoch` is the timestamp of bucket[0].
*
* [f] The counter is no longer valid at this time
* counterExpired() = 320 = startOfCurrentEpoch + (curBucket * resolution) + duration = 200 + (1 * 20) + 100
* Where `earliestTimeInCounter()` is looking backwards, to the history covered by the counter, `counterExpired()`
* looks forward to when current counter has expired. Since `curBucket` represents the latest time in this counter,
* `counterExpired()` is `duration` from the start of the time covered from `curBucket`
*
* [g] The next bucket in the bucket ring
* nextBucket(curBucket) = 2 = (i + 1) % buckets.length = (1 + 1) % 5
* Since `buckets` is a ring, the next bucket may wrap around.
*
* ------------------------------------------------------------------------------------------------------------------
*
* Action: inc(238) - since this inc is within the current bucket, it is incremented and nothing else changes
*
* Present
* [0] [1][b] [2][g] [3] [4]
* |___________|_____2_____|___________|___________|___________|
* 200[a]-> 220-> 240[d]-> 260-> 280-> 299
*
* ------------------------------------------------------------------------------------------------------------------
*
* Action: inc(165) - only the current bucket is incremented, so increments from a timestamp in the past are
* clamped to the current bucket. This makes `inc(long)` dependent on the ordering of timestamps,
* but it avoids revising a history that may have already been exposed via `sum(long)`.
*
* Present
* [0] [1][b] [2][g] [3] [4]
* |___________|_____3_____|___________|___________|___________|
* 200[a]-> 220-> 240[d]-> 260-> 280-> 299
*
* ------------------------------------------------------------------------------------------------------------------
*
* Action: inc(267) - 267 is in bucket 3, so bucket 2 is zeroed and skipped. Bucket 2 is zeroed because it may have
* had contents that were relevant for timestamps 140 - 159.
*
* The `startOfCurrentEpoch`[a], does not change while `curBucket`[b] is now bucket 3.
*
* `nextEpoch()`[c] does not change as there hasn't been a rollover.
*
* `nextBucketStartTime()`[d] is now 280, the start time of bucket 4.
*
* `earliestTimeInCounter()`[e] is now 180, bucket 2 was zeroed, erasing the history from 140-159 and
* bucket 3 was set to 1, now representing 260-279 rather than 160-179.
*
* `counterExpired()`[f] is now 360. Bucket 3 in the current epoch represents 260->279, an
* `inc(long)` any of time (260 + `duration`) or beyond would require clearing all `buckets` in the
* `Counter` and any `sum(long)` that starts at 360 or later does not cover the valid time range for
* this state of the counter.
*
* `nextBucket(curBucket)`[g] is now 4, the bucket after 3.
*
*
* Past
* [_] [_] [_] [_] [4]
* |___________|___________|___________|___________|___________|
* 180[e]-> 199
*
* Present
* [0] [1] [2] [3][b] [4][g]
* |___________|_____3_____|___________|______1____|___________|
* 200[a]-> 220-> 240-> 260-> 280[d]-> 299
*
* Future
* [0] [1] [2] [_] [_]
* |___________|___________|___________|___________|___________|
* 300[c]-> 320-> 340-> 360[f]->
*
* ------------------------------------------------------------------------------------------------------------------
*
* Action: inc(310) - 310 is in bucket 0, so bucket 4 is zeroed and skipped, as it may have had contents
* for timestamps 180-199.
*
* The `startOfCurrentEpoch`[a], is now 300 as the `Counter` has rolled through bucket 0.
*
* `curBucket`[b] is now bucket 0.
*
* `nextEpoch()`[c] is now 400 because `startOfCurrentEpoch`[a] has changed.
*
* `nextBucketStartTime()`[d] is now 320, the start time of bucket 1 in this new epoch.
*
* `earliestTimeInCounter()`[e] is now 220, bucket 4 was zeroed, erasing the history from 180-199 and
* bucket 0 was set to 1, now representing 300-319 due to the epoch change, rather than 200-219, so
* 220 is the earliest time available in the `Counter`.
*
* `counterExpired()`[f] is now 400. Bucket 0 in the current epoch represents 300-319, an
* `inc(long)` any of time (300 + `duration`) or beyond would require clearing all `buckets` in the
* `Counter` and any `sum(long)` that starts at 400 or later does not cover the valid time range for
* this state of the counter.
*
* `nextBucket(curBucket)`[g] is now 1, the bucket after 0.
*
*
* Past
* [_] [1] [2] [3] [4]
* |___________|_____3_____|___________|______1____|___________|
* 220[e]-> 240-> 260-> 280-> 299
*
* Present
* [0][b] [1][g] [2] [3] [4]
* |_____1_____|___________|___________|___________|___________|
* 300[a]-> 320[d]-> 340-> 360-> 380-> 399
*
* Future
* [_] [_] [_] [_] [_]
* |___________|___________|___________|___________|___________|
* 400[c][f]->
*
* ------------------------------------------------------------------------------------------------------------------
*
* Action: inc(321) - 321 is in bucket 1, so the previous contents of bucket 1 is replaced with the value 1.
*
* The `startOfCurrentEpoch`[a] remains 300.
*
* `curBucket`[b] is now bucket 1.
*
* `nextEpoch()`[c] remains 400.
*
* `nextBucketStartTime()`[d] is now 340, the start time of bucket 2.
*
* `earliestTimeInCounter()`[e] is now 240 as bucket 1 now represents 320-339 rather than 220-239.
*
* `counterExpired()`[f] is now 420. Bucket 1 in the current epoch represents 320-339, an
* `inc(long)` any of time (320 + `duration`) or beyond would require clearing all `buckets` in the
* `Counter` and any `sum(long)` that starts at 420 or later does not cover the valid time range for
* this state of the counter.
*
* `nextBucket(curBucket)`[g] is now 2, the bucket after 1.
*
* Past
* [_] [_] [2] [3] [4]
* |___________|___________|___________|______1____|___________|
* 240[e]-> 260-> 280-> 299
*
* Present
* [0] [1][b] [2][g] [3] [4]
* |_____1_____|_____1_____|___________|___________|___________|
* 300[a]-> 320-> 340[d]-> 360-> 380-> 399
*
* Future
* [0] [_] [_] [_] [_]
* |_____0_____|___________|___________|___________|___________|
* 400[c]-> 420[f]->
*
*
* ------------------------------------------------------------------------------------------------------------------
*
* Action: sum(321) - This is a sum at the exact time of the last update, because of the `earliestTimeInCounter` check,
* it starts at bucket 2, which is after the current bucket index, 1, but the earliest time covered by
* the counter, 240 to 259.
* 1) calculate start = 321 - duration = 321 - 100 = 221
* 2) start is before the nextBucketStartTime (340), so sum does not terminate early
* 3) start is before the earliestTimeInCounter (240) -> start = 240
* 3) Iterate from bucket(start) = bucket(240) = 2 until curBucket 1, summing the following
* bucket 2 = 0, bucket 3 = 1, bucket 4 = 0, bucket 0 = 1 -> 1 + 1 = 2
* 4) return that with the context of bucket 1 = 1 -> 2 + 1 = 3
*
* Action: sum(465) - This sum is so far in the future, it does not overlap with any bucket in range
* 1) calculate start = 465 - duration = 465 - 100 = 365
* 2) start is greater than or equal to the nextBucketStartTime (340), so we know the counter has no contexts
* -> return 0
*
* Action: sum(439) - This sum starts _after_ the last update, which is at 321, but because of the bucket resolution
* sum still catches the value bucket 1, times 320 to 339.
* 1) calculate start = 439 - duration = 439 - 100 = 339
* 2) start is before nextBucketStartTime(340), so sum does not terminate early
* 3) start is after earliestTimeInCounter (240), so it is now updated
* 4) bucket(start) = 1 which is curBucket, so the for loop falls through
* 5) return total = 0 + buckets[curBucket] = 0 + 1 = 1
*/
protected final long resolution;
protected final long duration;
protected final long[] buckets;
// The start time of buckets[0]. bucket(t + (i * duration)) is the same for all i. startOfCurrentEpoch allows the counter
// to differentiate between those times.
protected long startOfCurrentEpoch;
protected int curBucket = 0;
/**
* Create a Counter that covers duration seconds at the given resolution. Duration must be divisible by resolution.
*/
public Counter(long resolution, long duration) {
if (resolution <= 0) {
throw new IllegalArgumentException("resolution [" + resolution + "] must be greater than zero");
} else if (duration <= 0) {
throw new IllegalArgumentException("duration [" + duration + "] must be greater than zero");
} else if (duration % resolution != 0) {
throw new IllegalArgumentException("duration [" + duration + "] must divisible by resolution [" + resolution + "]");
}
this.resolution = resolution;
this.duration = duration;
this.buckets = new long[(int) (duration / resolution)];
this.startOfCurrentEpoch = 0;
assert buckets.length > 0;
}
/**
* Increment the counter at time {@code now}, expressed in seconds.
*/
public void inc(long now) {
if (now < nextBucketStartTime()) {
buckets[curBucket]++;
} else if (now >= counterExpired()) {
reset(now);
} else {
int dstBucket = bucket(now);
for (int i = nextBucket(curBucket); i != dstBucket; i = nextBucket(i)) {
buckets[i] = 0;
}
curBucket = dstBucket;
buckets[curBucket] = 1;
if (now >= nextEpoch()) {
startOfCurrentEpoch = epoch(now);
}
}
}
/**
* sum for the duration of the counter until {@code now}.
*/
public long sum(long now) {
long start = now - duration;
if (start >= nextBucketStartTime()) {
return 0;
}
if (start < earliestTimeInCounter()) {
start = earliestTimeInCounter();
}
long total = 0;
for (int i = bucket(start); i != curBucket; i = nextBucket(i)) {
total += buckets[i];
}
return total + buckets[curBucket];
}
/**
* Reset the counter. Next counter begins at now.
*/
void reset(long now) {
Arrays.fill(buckets, 0);
startOfCurrentEpoch = epoch(now);
curBucket = bucket(now);
buckets[curBucket] = 1;
}
// The time at bucket[0] for the given timestamp.
long epoch(long t) {
return (t / duration) * duration;
}
// What is the start time of the next epoch?
long nextEpoch() {
return startOfCurrentEpoch + duration;
}
// What is the earliest time covered by this counter? Counters do not extend before zero.
long earliestTimeInCounter() {
long time = nextBucketStartTime() - duration;
return time <= 0 ? 0 : time;
}
// When does this entire counter expire?
long counterExpired() {
return startOfCurrentEpoch + (curBucket * resolution) + duration;
}
// bucket for the given time
int bucket(long t) {
return (int) (t / resolution) % buckets.length;
}
// the next bucket in the circular bucket array
int nextBucket(int i) {
return (i + 1) % buckets.length;
}
// When does the next bucket start?
long nextBucketStartTime() {
return startOfCurrentEpoch + ((curBucket + 1) * resolution);
}
}
}
| Counter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/inlinedirtychecking/LoadUninitializedCollectionTest.java | {
"start": 4414,
"end": 4890
} | class ____ {
@Id
private Long id;
@ManyToOne(fetch = FetchType.LAZY)
private Bank bank;
private String serialNumber;
public BankAccount() {
}
public BankAccount(Long id, Bank bank, String serialNumber) {
this.id = id;
this.bank = bank;
this.serialNumber = serialNumber;
}
public Bank getBank() {
return bank;
}
public String getSerialNumber() {
return serialNumber;
}
}
@Entity(name = "BankDepartment")
public static | BankAccount |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/crypto/RsaKeyConversionServicePostProcessorTests.java | {
"start": 2078,
"end": 6672
} | class ____ {
// @formatter:off
private static final String PKCS8_PRIVATE_KEY = "-----BEGIN PRIVATE KEY-----\n"
+ "MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCMk7CKSTfu3QoV\n"
+ "HoPVXxwZO+qweztd36cVWYqGOZinrOR2crWFu50AgR2CsdIH0+cqo7F4Vx7/3O8i\n"
+ "RpYYZPe2VoO5sumzJt8P6fS80/TAKjhJDAqgZKRJTgGN8KxCM6p/aJli1ZeDBqiV\n"
+ "v7vJJe+ZgJuPGRS+HMNa/wPxEkqqXsglcJcQV1ZEtfKXSHB7jizKpRL38185SyAC\n"
+ "pwyjvBu6Cmm1URfhQo88mf239ONh4dZ2HoDfzN1q6Ssu4F4hgutxr9B0DVLDP5u+\n"
+ "WFrm3nsJ76zf99uJ+ntMUHJ+bY+gOjSlVWIVBIZeAaEGKCNWRk/knjvjbijpvm3U\n"
+ "acGlgdL3AgMBAAECggEACxxxS7zVyu91qI2s5eSKmAQAXMqgup6+2hUluc47nqUv\n"
+ "uZz/c/6MPkn2Ryo+65d4IgqmMFjSfm68B/2ER5FTcvoLl1Xo2twrrVpUmcg3BClS\n"
+ "IZPuExdhVNnxjYKEWwcyZrehyAoR261fDdcFxLRW588efIUC+rPTTRHzAc7sT+Ln\n"
+ "t/uFeYNWJm3LaegOLoOmlMAhJ5puAWSN1F0FxtRf/RVgzbLA9QC975SKHJsfWCSr\n"
+ "IZyPsdeaqomKaF65l8nfqlE0Ua2L35gIOGKjUwb7uUE8nI362RWMtYdoi3zDDyoY\n"
+ "hSFbgjylCHDM0u6iSh6KfqOHtkYyJ8tUYgVWl787wQKBgQDYO3wL7xuDdD101Lyl\n"
+ "AnaDdFB9fxp83FG1cWr+t7LYm9YxGfEUsKHAJXN6TIayDkOOoVwIl+Gz0T3Z06Bm\n"
+ "eBGLrB9mrVA7+C7NJwu5gTMlzP6HxUR9zKJIQ/VB1NUGM77LSmvOFbHc9Q0+z8EH\n"
+ "X5WO516a3Z7lNtZJcCoPOtu2rwKBgQCmbj41Fh+SSEUApCEKms5ETRpe7LXQlJgx\n"
+ "yW7zcJNNuIb1C3vBLPxjiOTMgYKOeMg5rtHTGLT43URHLh9ArjawasjSAr4AM3J4\n"
+ "xpoi/sKGDdiKOsuDWIGfzdYL8qyTHSdpZLQsCTMRiRYgAHZFPgNa7SLZRfZicGlr\n"
+ "GHN1rJW6OQKBgEjiM/upyrJSWeypUDSmUeAZMpA6aWkwsfHgmtnkfUn5rQa74cDB\n"
+ "kKO9e+D7LmOR3z+SL/1NhGwh2SE07dncGr3jdGodfO/ZxZyszozmeaECKcEFwwJM\n"
+ "GV8WWPKplGwUwPiwywmZ0mvRxXcoe73KgBS88+xrSwWjqDL0tZiQlEJNAoGATkei\n"
+ "GMQMG3jEg9Wu+NbxV6zQT3+U0MNjhl9RQU1c63x0dcNt9OFc4NAdlZcAulRTENaK\n"
+ "OHjxffBM0hH+fySx8m53gFfr2BpaqDX5f6ZGBlly1SlsWZ4CchCVsc71nshipi7I\n"
+ "k8HL9F5/OpQdDNprJ5RMBNfkWE65Nrcsb1e6oPkCgYAxwgdiSOtNg8PjDVDmAhwT\n"
+ "Mxj0Dtwi2fAqQ76RVrrXpNp3uCOIAu4CfruIb5llcJ3uak0ZbnWri32AxSgk80y3\n"
+ "EWiRX/WEDu5znejF+5O3pI02atWWcnxifEKGGlxwkcMbQdA67MlrJLFaSnnGpNXo\n"
+ "yPfcul058SOqhafIZQMEKQ==\n"
+ "-----END PRIVATE KEY-----";
// @formatter:on
private static final String X509_PUBLIC_KEY_LOCATION = "classpath:org/springframework/security/config/annotation/web/configuration/simple.pub";
private final RsaKeyConversionServicePostProcessor postProcessor = new RsaKeyConversionServicePostProcessor();
private ConversionService service;
@Value("classpath:org/springframework/security/config/annotation/web/configuration/simple.pub")
RSAPublicKey publicKey;
@Value("classpath:org/springframework/security/config/annotation/web/configuration/simple.priv")
RSAPrivateKey privateKey;
@Value("custom:simple.pub")
RSAPublicKey samePublicKey;
public final SpringTestContext spring = new SpringTestContext(this);
@BeforeEach
public void setUp() {
ConfigurableListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.setConversionService(new GenericConversionService());
this.postProcessor.postProcessBeanFactory(beanFactory);
this.service = beanFactory.getConversionService();
}
@Test
public void convertWhenUsingConversionServiceForRawKeyThenOk() {
RSAPrivateKey key = this.service.convert(PKCS8_PRIVATE_KEY, RSAPrivateKey.class);
assertThat(key.getModulus().bitLength()).isEqualTo(2048);
}
@Test
public void convertWhenUsingConversionServiceForClasspathThenOk() {
RSAPublicKey key = this.service.convert(X509_PUBLIC_KEY_LOCATION, RSAPublicKey.class);
assertThat(key.getModulus().bitLength()).isEqualTo(1024);
}
@Test
public void valueWhenReferringToClasspathPublicKeyThenConverts() {
this.spring.register(CustomResourceLoaderConfig.class, DefaultConfig.class).autowire();
assertThat(this.publicKey.getModulus().bitLength()).isEqualTo(1024);
}
@Test
public void valueWhenReferringToClasspathPrivateKeyThenConverts() {
this.spring.register(CustomResourceLoaderConfig.class, DefaultConfig.class).autowire();
assertThat(this.privateKey.getModulus().bitLength()).isEqualTo(2048);
}
@Test
public void valueWhenReferringToCustomResourceLoadedPublicKeyThenConverts() {
this.spring.register(CustomResourceLoaderConfig.class, DefaultConfig.class).autowire();
assertThat(this.samePublicKey.getModulus().bitLength()).isEqualTo(1024);
}
@Test
public void valueWhenOverridingConversionServiceThenUsed() {
assertThatExceptionOfType(Exception.class)
.isThrownBy(
() -> this.spring.register(OverrideConversionServiceConfig.class, DefaultConfig.class).autowire())
.withRootCauseInstanceOf(IllegalArgumentException.class);
}
@Configuration
@EnableWebSecurity
static | RsaKeyConversionServicePostProcessorTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/repositories/Repository.java | {
"start": 1777,
"end": 2228
} | interface ____ interacting with a repository in snapshot and restore.
* <p>
* Implementations are responsible for reading and writing both metadata and shard data to and from
* a repository backend.
* <p>
* To perform a snapshot:
* <ul>
* <li>Data nodes call {@link Repository#snapshotShard}
* for each shard</li>
* <li>When all shard calls return master calls {@link #finalizeSnapshot} with possible list of failures</li>
* </ul>
*/
public | for |
java | playframework__playframework | web/play-java-forms/src/test/scala/play/data/format/FormattersTest.java | {
"start": 1387,
"end": 1596
} | class ____ {
private Integer plainIntegerField;
@CustomInteger
private Integer annotatedIntegerField;
}
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
@ | Bean |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/configuration/StaticInitConfigBuilder.java | {
"start": 213,
"end": 691
} | class ____ implements SmallRyeConfigBuilderCustomizer {
@Override
public void configBuilder(final SmallRyeConfigBuilder builder) {
new QuarkusConfigBuilderCustomizer().configBuilder(builder);
builder.forClassLoader(Thread.currentThread().getContextClassLoader())
.addDefaultInterceptors()
.addDefaultSources();
}
@Override
public int priority() {
return Integer.MIN_VALUE;
}
}
| StaticInitConfigBuilder |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/cluster/models/partitions/ClusterPartitionParserUnitTests.java | {
"start": 455,
"end": 7269
} | class ____ {
private static String nodes = "c37ab8396be428403d4e55c0d317348be27ed973 127.0.0.1:7381 master - 111 1401258245007 222 connected 7000 12000 12002-16383\n"
+ "3d005a179da7d8dc1adae6409d47b39c369e992b 127.0.0.1:7380 master - 0 1401258245007 2 disconnected 8000-11999 [8000->-4213a8dabb94f92eb6a860f4d0729e6a25d43e0c] [5461-<-c37ab8396be428403d4e55c0d317348be27ed973]\n"
+ "4213a8dabb94f92eb6a860f4d0729e6a25d43e0c 127.0.0.1:7379 myself,slave 4213a8dabb94f92eb6a860f4d0729e6a25d43e0c 0 0 1 connected 0-6999 7001-7999 12001\n"
+ "5f4a2236d00008fba7ac0dd24b95762b446767bd :0 myself,master - 0 0 1 connected [5460->-5f4a2236d00008fba7ac0dd24b95762b446767bd] [5461-<-5f4a2236d00008fba7ac0dd24b95762b446767bd]";
private static String nodesWithIPv6Addresses = "c37ab8396be428403d4e55c0d317348be27ed973 affe:affe:123:34::1:7381 master - 111 1401258245007 222 connected 7000 12000 12002-16383\n"
+ "3d005a179da7d8dc1adae6409d47b39c369e992b [dead:beef:dead:beef::1]:7380 master - 0 1401258245007 2 disconnected 8000-11999 [8000->-4213a8dabb94f92eb6a860f4d0729e6a25d43e0c] [5461-<-c37ab8396be428403d4e55c0d317348be27ed973]\n"
+ "4213a8dabb94f92eb6a860f4d0729e6a25d43e0c 127.0.0.1:7379 myself,slave 4213a8dabb94f92eb6a860f4d0729e6a25d43e0c 0 0 1 connected 0-6999 7001-7999 12001\n"
+ "5f4a2236d00008fba7ac0dd24b95762b446767bd :0 myself,master - 0 0 1 connected [5460->-5f4a2236d00008fba7ac0dd24b95762b446767bd] [5461-<-5f4a2236d00008fba7ac0dd24b95762b446767bd]";
private static String nodesWithBusPort = "c37ab8396be428403d4e55c0d317348be27ed973 127.0.0.1:7381@17381 slave 4213a8dabb94f92eb6a860f4d0729e6a25d43e0c 0 1454482721690 3 connected\n"
+ "3d005a179da7d8dc1adae6409d47b39c369e992b 127.0.0.1:7380@17380 master - 0 1454482721690 0 connected 12000-16383\n"
+ "4213a8dabb94f92eb6a860f4d0729e6a25d43e0c 127.0.0.1:7379@17379 myself,master - 0 0 1 connected 0-11999\n"
+ "5f4a2236d00008fba7ac0dd24b95762b446767bd 127.0.0.1:7382@17382 slave 3d005a179da7d8dc1adae6409d47b39c369e992b 0 1454482721690 2 connected";
private static String nodesWithHostname = "c37ab8396be428403d4e55c0d317348be27ed973 127.0.0.1:7381@17381,my-host.name.com slave 4213a8dabb94f92eb6a860f4d0729e6a25d43e0c 0 1454482721690 3 connected\n"
+ "3d005a179da7d8dc1adae6409d47b39c369e992b 127.0.0.1:7380@17380, master - 0 1454482721690 0 connected 12000-16383";
@Test
void shouldParseNodesCorrectly() {
Partitions result = ClusterPartitionParser.parse(nodes);
assertThat(result.getPartitions()).hasSize(4);
RedisClusterNode p1 = result.getPartitions().get(0);
assertThat(p1.getNodeId()).isEqualTo("c37ab8396be428403d4e55c0d317348be27ed973");
assertThat(p1.getUri().getHost()).isEqualTo("127.0.0.1");
assertThat(p1.getUri().getPort()).isEqualTo(7381);
assertThat(p1.getSlaveOf()).isNull();
assertThat(p1.getFlags()).isEqualTo(Collections.singleton(RedisClusterNode.NodeFlag.MASTER));
assertThat(p1.getPingSentTimestamp()).isEqualTo(111);
assertThat(p1.getPongReceivedTimestamp()).isEqualTo(1401258245007L);
assertThat(p1.getConfigEpoch()).isEqualTo(222);
assertThat(p1.isConnected()).isTrue();
assertThat(p1.getSlots()).contains(7000, 12000, 12002, 12003, 16383);
RedisClusterNode p3 = result.getPartitions().get(2);
assertThat(p3.getSlaveOf()).isEqualTo("4213a8dabb94f92eb6a860f4d0729e6a25d43e0c");
assertThat(p3.toString()).contains(RedisClusterNode.class.getSimpleName());
assertThat(result.toString()).contains(Partitions.class.getSimpleName());
}
@Test
void shouldParseNodesWithBusPort() {
Partitions result = ClusterPartitionParser.parse(nodesWithBusPort);
assertThat(result.getPartitions()).hasSize(4);
RedisClusterNode p1 = result.getPartitions().get(0);
assertThat(p1.getNodeId()).isEqualTo("c37ab8396be428403d4e55c0d317348be27ed973");
assertThat(p1.getUri().getHost()).isEqualTo("127.0.0.1");
assertThat(p1.getUri().getPort()).isEqualTo(7381);
}
@Test
void shouldParseNodesWithHostname() {
Partitions result = ClusterPartitionParser.parse(nodesWithHostname);
assertThat(result.getPartitions()).hasSize(2);
RedisClusterNode p1 = result.getPartitions().get(0);
assertThat(p1.getNodeId()).isEqualTo("c37ab8396be428403d4e55c0d317348be27ed973");
assertThat(p1.getUri().getHost()).isEqualTo("my-host.name.com");
assertThat(p1.getUri().getPort()).isEqualTo(7381);
RedisClusterNode p2 = result.getPartitions().get(1);
assertThat(p2.getUri().getHost()).isEqualTo("127.0.0.1");
}
@Test
void shouldParseNodesIPv6Address() {
Partitions result = ClusterPartitionParser.parse(nodesWithIPv6Addresses);
assertThat(result.getPartitions()).hasSize(4);
RedisClusterNode p1 = result.getPartitions().get(0);
assertThat(p1.getUri().getHost()).isEqualTo("affe:affe:123:34::1");
assertThat(p1.getUri().getPort()).isEqualTo(7381);
RedisClusterNode p2 = result.getPartitions().get(1);
assertThat(p2.getUri().getHost()).isEqualTo("dead:beef:dead:beef::1");
assertThat(p2.getUri().getPort()).isEqualTo(7380);
}
@Test
void getNodeByHashShouldReturnCorrectNode() {
Partitions partitions = ClusterPartitionParser.parse(nodes);
assertThat(partitions.getPartitionBySlot(7000).getNodeId()).isEqualTo("c37ab8396be428403d4e55c0d317348be27ed973");
assertThat(partitions.getPartitionBySlot(5460).getNodeId()).isEqualTo("4213a8dabb94f92eb6a860f4d0729e6a25d43e0c");
}
@Test
void testModel() {
RedisClusterNode node = mockRedisClusterNode();
assertThat(node.toString()).contains(RedisClusterNode.class.getSimpleName());
assertThat(node.hasSlot(1)).isTrue();
assertThat(node.hasSlot(9)).isFalse();
}
RedisClusterNode mockRedisClusterNode() {
RedisClusterNode node = new RedisClusterNode();
node.setConfigEpoch(1);
node.setConnected(true);
node.setFlags(new HashSet<>());
node.setNodeId("abcd");
node.setPingSentTimestamp(2);
node.setPongReceivedTimestamp(3);
node.setSlaveOf("me");
node.setSlots(LettuceLists.unmodifiableList(1, 2, 3));
node.setUri(new RedisURI("localhost", 1, Duration.ofDays(1)));
return node;
}
@Test
void createNode() {
RedisClusterNode original = mockRedisClusterNode();
RedisClusterNode created = RedisClusterNode.of(original.getNodeId());
assertThat(original).isEqualTo(created);
}
}
| ClusterPartitionParserUnitTests |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryActionTests.java | {
"start": 745,
"end": 1776
} | class ____ extends ESSingleNodeTestCase {
/*
* This test covers a fallthrough bug that we had, where if the index we were validating against did not exist, we would invoke the
* failure listener, and then fallthrough and invoke the success listener too. This would cause problems when the listener was
* ultimately wrapping sending a response on the channel, as it could lead to us sending both a failure or success responses, and having
* them garbled together, or trying to write one after the channel had closed, etc.
*/
public void testListenerOnlyInvokedOnceWhenIndexDoesNotExist() {
assertThat(
safeAwaitFailure(
ValidateQueryResponse.class,
listener -> client().admin()
.indices()
.validateQuery(new ValidateQueryRequest("non-existent-index"), ActionListener.assertOnce(listener))
),
instanceOf(IndexNotFoundException.class)
);
}
}
| TransportValidateQueryActionTests |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/load/resource/gif/ByteBufferGifDecoderTest.java | {
"start": 1425,
"end": 5139
} | class ____ {
private static final byte[] GIF_HEADER = new byte[] {0x47, 0x49, 0x46};
private static final int ARRAY_POOL_SIZE_BYTES = 4 * 1024 * 1024;
private ByteBufferGifDecoder decoder;
private GifHeader gifHeader;
private Options options;
@Mock private BitmapPool bitmapPool;
@Mock private GifHeaderParser parser;
@Mock private GifDecoder gifDecoder;
@Mock private ByteBufferGifDecoder.GifHeaderParserPool parserPool;
@Mock private ByteBufferGifDecoder.GifDecoderFactory decoderFactory;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
gifHeader = Mockito.spy(new GifHeader());
when(parser.parseHeader()).thenReturn(gifHeader);
when(parserPool.obtain(isA(ByteBuffer.class))).thenReturn(parser);
when(decoderFactory.build(
isA(GifDecoder.BitmapProvider.class), eq(gifHeader), isA(ByteBuffer.class), anyInt()))
.thenReturn(gifDecoder);
List<ImageHeaderParser> parsers = new ArrayList<>();
parsers.add(new DefaultImageHeaderParser());
options = new Options();
decoder =
new ByteBufferGifDecoder(
ApplicationProvider.getApplicationContext(),
parsers,
bitmapPool,
new LruArrayPool(ARRAY_POOL_SIZE_BYTES),
parserPool,
decoderFactory);
}
@Test
public void testDoesNotHandleStreamIfEnabledButNotAGif() throws IOException {
assertThat(decoder.handles(ByteBuffer.allocate(0), options)).isFalse();
}
@Test
public void testHandlesStreamIfContainsGifHeaderAndDisabledIsNotSet() throws IOException {
assertThat(decoder.handles(ByteBuffer.wrap(GIF_HEADER), options)).isTrue();
}
@Test
public void testHandlesStreamIfContainsGifHeaderAndDisabledIsFalse() throws IOException {
options.set(GifOptions.DISABLE_ANIMATION, false);
assertThat(decoder.handles(ByteBuffer.wrap(GIF_HEADER), options)).isTrue();
}
@Test
public void testDoesNotHandleStreamIfDisabled() throws IOException {
options.set(GifOptions.DISABLE_ANIMATION, true);
assertThat(decoder.handles(ByteBuffer.wrap(GIF_HEADER), options)).isFalse();
}
@Test
public void testReturnsNullIfParsedHeaderHasZeroFrames() throws IOException {
when(gifHeader.getNumFrames()).thenReturn(0);
assertNull(decoder.decode(ByteBuffer.allocate(10), 100, 100, options));
}
@Test
public void testReturnsNullIfParsedHeaderHasFormatError() {
when(gifHeader.getStatus()).thenReturn(GifDecoder.STATUS_FORMAT_ERROR);
assertNull(decoder.decode(ByteBuffer.allocate(10), 100, 100, options));
}
@Test
public void testReturnsNullIfParsedHeaderHasOpenError() {
when(gifHeader.getStatus()).thenReturn(GifDecoder.STATUS_OPEN_ERROR);
assertNull(decoder.decode(ByteBuffer.allocate(10), 100, 100, options));
}
@Test
public void testReturnsParserToPool() throws IOException {
decoder.decode(ByteBuffer.allocate(10), 100, 100, options);
verify(parserPool).release(eq(parser));
}
@Test
public void testReturnsParserToPoolWhenParserThrows() {
when(parser.parseHeader()).thenThrow(new RuntimeException("Test"));
try {
decoder.decode(ByteBuffer.allocate(10), 100, 100, options);
fail("Failed to receive expected exception");
} catch (RuntimeException e) {
// Expected.
}
verify(parserPool).release(eq(parser));
}
@Test
public void testReturnsNullIfGifDecoderFailsToDecodeFirstFrame() {
when(gifHeader.getNumFrames()).thenReturn(1);
when(gifHeader.getStatus()).thenReturn(GifDecoder.STATUS_OK);
when(gifDecoder.getNextFrame()).thenReturn(null);
assertNull(decoder.decode(ByteBuffer.allocate(10), 100, 100, options));
}
}
| ByteBufferGifDecoderTest |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/requests/ProduceResponse.java | {
"start": 2330,
"end": 6690
} | class ____ extends AbstractResponse {
public static final long INVALID_OFFSET = -1L;
private final ProduceResponseData data;
public ProduceResponse(ProduceResponseData produceResponseData) {
super(ApiKeys.PRODUCE);
this.data = produceResponseData;
}
/**
* Constructor for Version 0
* This is deprecated in favor of using the ProduceResponseData constructor, KafkaApis should switch to that
* in KAFKA-10730
* @param responses Produced data grouped by topic-partition
*/
@Deprecated
public ProduceResponse(Map<TopicIdPartition, PartitionResponse> responses) {
this(responses, DEFAULT_THROTTLE_TIME, Collections.emptyList());
}
/**
* This is deprecated in favor of using the ProduceResponseData constructor, KafkaApis should switch to that
* in KAFKA-10730
* @param responses Produced data grouped by topic-partition
* @param throttleTimeMs Time in milliseconds the response was throttled
*/
@Deprecated
public ProduceResponse(Map<TopicIdPartition, PartitionResponse> responses, int throttleTimeMs) {
this(toData(responses, throttleTimeMs, Collections.emptyList()));
}
/**
* Constructor for the latest version
* This is deprecated in favor of using the ProduceResponseData constructor, KafkaApis should switch to that
* in KAFKA-10730
* @param responses Produced data grouped by topic-partition
* @param throttleTimeMs Time in milliseconds the response was throttled
* @param nodeEndpoints List of node endpoints
*/
@Deprecated
public ProduceResponse(Map<TopicIdPartition, PartitionResponse> responses, int throttleTimeMs, List<Node> nodeEndpoints) {
this(toData(responses, throttleTimeMs, nodeEndpoints));
}
private static ProduceResponseData toData(Map<TopicIdPartition, PartitionResponse> responses, int throttleTimeMs, List<Node> nodeEndpoints) {
ProduceResponseData data = new ProduceResponseData().setThrottleTimeMs(throttleTimeMs);
responses.forEach((tp, response) -> {
ProduceResponseData.TopicProduceResponse tpr = data.responses().find(tp.topic(), tp.topicId());
if (tpr == null) {
tpr = new ProduceResponseData.TopicProduceResponse().setName(tp.topic()).setTopicId(tp.topicId());
data.responses().add(tpr);
}
tpr.partitionResponses()
.add(new ProduceResponseData.PartitionProduceResponse()
.setIndex(tp.partition())
.setBaseOffset(response.baseOffset)
.setLogStartOffset(response.logStartOffset)
.setLogAppendTimeMs(response.logAppendTime)
.setErrorMessage(response.errorMessage)
.setErrorCode(response.error.code())
.setCurrentLeader(response.currentLeader != null ? response.currentLeader : new LeaderIdAndEpoch())
.setRecordErrors(response.recordErrors
.stream()
.map(e -> new ProduceResponseData.BatchIndexAndErrorMessage()
.setBatchIndex(e.batchIndex)
.setBatchIndexErrorMessage(e.message))
.collect(Collectors.toList())));
});
nodeEndpoints.forEach(endpoint -> data.nodeEndpoints()
.add(new ProduceResponseData.NodeEndpoint()
.setNodeId(endpoint.id())
.setHost(endpoint.host())
.setPort(endpoint.port())
.setRack(endpoint.rack())));
return data;
}
@Override
public ProduceResponseData data() {
return this.data;
}
@Override
public int throttleTimeMs() {
return this.data.throttleTimeMs();
}
@Override
public void maybeSetThrottleTimeMs(int throttleTimeMs) {
data.setThrottleTimeMs(throttleTimeMs);
}
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
data.responses().forEach(t -> t.partitionResponses().forEach(p -> updateErrorCounts(errorCounts, Errors.forCode(p.errorCode()))));
return errorCounts;
}
public static final | ProduceResponse |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/launcher/testservices/FailingStopInStartService.java | {
"start": 930,
"end": 1480
} | class ____ extends FailureTestService {
public static final String NAME =
"org.apache.hadoop.service.launcher.testservices.FailingStopInStartService";
public static final int EXIT_CODE = -4;
public FailingStopInStartService() {
super(false, false, true, 0);
}
@Override
protected void serviceStart() throws Exception {
super.serviceStart();
try {
stop();
} catch (Exception e) {
//this is secretly swallowed
}
}
@Override
int getExitCode() {
return EXIT_CODE;
}
}
| FailingStopInStartService |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_generic_huansi.java | {
"start": 155,
"end": 605
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
String jsonStr = "{\"id\": 1234}";
SimpleGenericObject jsonObj = JSON.parseObject(jsonStr, SimpleGenericObject.class);
try {
Long id = jsonObj.getId();
assertTrue(id.equals(1234L));
} catch (Exception e) {
fail("parse error:" + e.getMessage());
}
}
public static | Bug_for_generic_huansi |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/search/index/NumericIndex.java | {
"start": 730,
"end": 1440
} | interface ____ extends FieldIndex {
/**
* Defines the attribute associated to the field name
*
* @param as the associated attribute
* @return options object
*/
NumericIndex as(String as);
/**
* Defines sort mode applied to the value of this attribute
*
* @param sortMode sort mode
* @return options object
*/
NumericIndex sortMode(SortMode sortMode);
/**
* Defines to not index this attribute
*
* @return options object
*/
NumericIndex noIndex();
/**
* Defines to index documents that don't contain this attribute
*
* @return options object
*/
NumericIndex indexMissing();
}
| NumericIndex |
java | spring-projects__spring-framework | spring-orm/src/test/java/org/springframework/orm/jpa/hibernate/HibernateNativeEntityManagerFactorySpringBeanContainerIntegrationTests.java | {
"start": 1949,
"end": 11797
} | class ____
extends AbstractEntityManagerFactoryIntegrationTests {
@Autowired
private ApplicationContext applicationContext;
@Override
protected String[] getConfigLocations() {
return new String[] {"/org/springframework/orm/jpa/hibernate/hibernate-manager-native.xml",
"/org/springframework/orm/jpa/memdb.xml", "/org/springframework/orm/jpa/inject.xml",
"/org/springframework/orm/jpa/hibernate/inject-hibernate-spring-bean-container-tests.xml"};
}
@SuppressWarnings("deprecation")
private ManagedBeanRegistry getManagedBeanRegistry() {
SessionFactory sessionFactory = entityManagerFactory.unwrap(SessionFactory.class);
ServiceRegistry serviceRegistry = sessionFactory.getSessionFactoryOptions().getServiceRegistry();
return serviceRegistry.requireService(ManagedBeanRegistry.class);
}
private BeanContainer getBeanContainer() {
return getManagedBeanRegistry().getBeanContainer();
}
@Test
void testCanRetrieveBeanByTypeWithJpaCompliantOptions() {
BeanContainer beanContainer = getBeanContainer();
assertThat(beanContainer).isNotNull();
ContainedBean<SinglePrototypeInSpringContextTestBean> bean = beanContainer.getBean(
SinglePrototypeInSpringContextTestBean.class,
JpaLifecycleOptions.INSTANCE, IneffectiveBeanInstanceProducer.INSTANCE
);
assertThat(bean).isNotNull();
SinglePrototypeInSpringContextTestBean instance = bean.getBeanInstance();
assertThat(instance).isNotNull();
assertThat(instance.getApplicationContext()).isSameAs(applicationContext);
}
@Test
void testCanRetrieveBeanByNameWithJpaCompliantOptions() {
BeanContainer beanContainer = getBeanContainer();
assertThat(beanContainer).isNotNull();
ContainedBean<MultiplePrototypesInSpringContextTestBean> bean = beanContainer.getBean(
"multiple-1", MultiplePrototypesInSpringContextTestBean.class,
JpaLifecycleOptions.INSTANCE, IneffectiveBeanInstanceProducer.INSTANCE
);
assertThat(bean).isNotNull();
MultiplePrototypesInSpringContextTestBean instance = bean.getBeanInstance();
assertThat(instance).isNotNull();
assertThat(instance.getName()).isEqualTo("multiple-1");
assertThat(instance.getApplicationContext()).isSameAs(applicationContext);
}
@Test
void testCanRetrieveBeanByTypeWithNativeOptions() {
BeanContainer beanContainer = getBeanContainer();
assertThat(beanContainer).isNotNull();
ContainedBean<SinglePrototypeInSpringContextTestBean> bean = beanContainer.getBean(
SinglePrototypeInSpringContextTestBean.class,
NativeLifecycleOptions.INSTANCE, IneffectiveBeanInstanceProducer.INSTANCE
);
assertThat(bean).isNotNull();
SinglePrototypeInSpringContextTestBean instance = bean.getBeanInstance();
assertThat(instance).isNotNull();
assertThat(instance.getName()).isEqualTo("single");
assertThat(instance.getApplicationContext()).isSameAs(applicationContext);
ContainedBean<SinglePrototypeInSpringContextTestBean> bean2 = beanContainer.getBean(
SinglePrototypeInSpringContextTestBean.class,
NativeLifecycleOptions.INSTANCE, IneffectiveBeanInstanceProducer.INSTANCE
);
assertThat(bean2).isNotNull();
SinglePrototypeInSpringContextTestBean instance2 = bean2.getBeanInstance();
assertThat(instance2).isNotNull();
// Due to the lifecycle options, and because the bean has the "prototype" scope, we should not return the same instance
assertThat(instance2).isNotSameAs(instance);
}
@Test
void testCanRetrieveBeanByNameWithNativeOptions() {
BeanContainer beanContainer = getBeanContainer();
assertThat(beanContainer).isNotNull();
ContainedBean<MultiplePrototypesInSpringContextTestBean> bean = beanContainer.getBean(
"multiple-1", MultiplePrototypesInSpringContextTestBean.class,
NativeLifecycleOptions.INSTANCE, IneffectiveBeanInstanceProducer.INSTANCE
);
assertThat(bean).isNotNull();
MultiplePrototypesInSpringContextTestBean instance = bean.getBeanInstance();
assertThat(instance).isNotNull();
assertThat(instance.getName()).isEqualTo("multiple-1");
assertThat(instance.getApplicationContext()).isSameAs(applicationContext);
ContainedBean<MultiplePrototypesInSpringContextTestBean> bean2 = beanContainer.getBean(
"multiple-1", MultiplePrototypesInSpringContextTestBean.class,
NativeLifecycleOptions.INSTANCE, IneffectiveBeanInstanceProducer.INSTANCE
);
assertThat(bean2).isNotNull();
MultiplePrototypesInSpringContextTestBean instance2 = bean2.getBeanInstance();
assertThat(instance2).isNotNull();
// Due to the lifecycle options, and because the bean has the "prototype" scope, we should not return the same instance
assertThat(instance2).isNotSameAs(instance);
}
@Test
void testCanRetrieveFallbackBeanByTypeWithJpaCompliantOptions() {
BeanContainer beanContainer = getBeanContainer();
assertThat(beanContainer).isNotNull();
NoDefinitionInSpringContextTestBeanInstanceProducer fallbackProducer = new NoDefinitionInSpringContextTestBeanInstanceProducer();
ContainedBean<NoDefinitionInSpringContextTestBean> bean = beanContainer.getBean(
NoDefinitionInSpringContextTestBean.class,
JpaLifecycleOptions.INSTANCE, fallbackProducer
);
assertThat(fallbackProducer.currentUnnamedInstantiationCount()).isEqualTo(1);
assertThat(fallbackProducer.currentNamedInstantiationCount()).isEqualTo(0);
assertThat(bean).isNotNull();
NoDefinitionInSpringContextTestBean instance = bean.getBeanInstance();
assertThat(instance).isNotNull();
assertThat(instance.getSource()).isEqualTo(BeanSource.FALLBACK);
assertThat(instance.getApplicationContext()).isNull();
}
@Test
void testCanRetrieveFallbackBeanByNameWithJpaCompliantOptions() {
BeanContainer beanContainer = getBeanContainer();
assertThat(beanContainer).isNotNull();
NoDefinitionInSpringContextTestBeanInstanceProducer fallbackProducer = new NoDefinitionInSpringContextTestBeanInstanceProducer();
ContainedBean<NoDefinitionInSpringContextTestBean> bean = beanContainer.getBean(
"some name", NoDefinitionInSpringContextTestBean.class,
JpaLifecycleOptions.INSTANCE, fallbackProducer
);
assertThat(fallbackProducer.currentUnnamedInstantiationCount()).isEqualTo(0);
assertThat(fallbackProducer.currentNamedInstantiationCount()).isEqualTo(1);
assertThat(bean).isNotNull();
NoDefinitionInSpringContextTestBean instance = bean.getBeanInstance();
assertThat(instance).isNotNull();
assertThat(instance.getSource()).isEqualTo(BeanSource.FALLBACK);
assertThat(instance.getName()).isEqualTo("some name");
assertThat(instance.getApplicationContext()).isNull();
}
@Test
void testCanRetrieveFallbackBeanByTypeWithNativeOptions() {
BeanContainer beanContainer = getBeanContainer();
assertThat(beanContainer).isNotNull();
NoDefinitionInSpringContextTestBeanInstanceProducer fallbackProducer = new NoDefinitionInSpringContextTestBeanInstanceProducer();
ContainedBean<NoDefinitionInSpringContextTestBean> bean = beanContainer.getBean(
NoDefinitionInSpringContextTestBean.class,
NativeLifecycleOptions.INSTANCE, fallbackProducer
);
assertThat(fallbackProducer.currentUnnamedInstantiationCount()).isEqualTo(1);
assertThat(fallbackProducer.currentNamedInstantiationCount()).isEqualTo(0);
assertThat(bean).isNotNull();
NoDefinitionInSpringContextTestBean instance = bean.getBeanInstance();
assertThat(instance).isNotNull();
assertThat(instance.getSource()).isEqualTo(BeanSource.FALLBACK);
assertThat(instance.getApplicationContext()).isNull();
}
@Test
void testCanRetrieveFallbackBeanByNameWithNativeOptions() {
BeanContainer beanContainer = getBeanContainer();
assertThat(beanContainer).isNotNull();
NoDefinitionInSpringContextTestBeanInstanceProducer fallbackProducer = new NoDefinitionInSpringContextTestBeanInstanceProducer();
ContainedBean<NoDefinitionInSpringContextTestBean> bean = beanContainer.getBean(
"some name", NoDefinitionInSpringContextTestBean.class,
NativeLifecycleOptions.INSTANCE, fallbackProducer
);
assertThat(fallbackProducer.currentUnnamedInstantiationCount()).isEqualTo(0);
assertThat(fallbackProducer.currentNamedInstantiationCount()).isEqualTo(1);
assertThat(bean).isNotNull();
NoDefinitionInSpringContextTestBean instance = bean.getBeanInstance();
assertThat(instance).isNotNull();
assertThat(instance.getSource()).isEqualTo(BeanSource.FALLBACK);
assertThat(instance.getName()).isEqualTo("some name");
assertThat(instance.getApplicationContext()).isNull();
}
@Test
void testFallbackExceptionInCaseOfNoSpringBeanFound() {
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() ->
getBeanContainer().getBean(NoDefinitionInSpringContextTestBean.class,
NativeLifecycleOptions.INSTANCE, IneffectiveBeanInstanceProducer.INSTANCE
));
}
@Test
void testOriginalExceptionInCaseOfFallbackProducerFailure() {
assertThatExceptionOfType(BeanCreationException.class).isThrownBy(() ->
getBeanContainer().getBean(AttributeConverter.class,
NativeLifecycleOptions.INSTANCE, IneffectiveBeanInstanceProducer.INSTANCE
));
}
@Test
void testFallbackExceptionInCaseOfNoSpringBeanFoundByName() {
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() ->
getBeanContainer().getBean("some name", NoDefinitionInSpringContextTestBean.class,
NativeLifecycleOptions.INSTANCE, IneffectiveBeanInstanceProducer.INSTANCE
));
}
@Test
void testOriginalExceptionInCaseOfFallbackProducerFailureByName() {
assertThatExceptionOfType(BeanCreationException.class).isThrownBy(() ->
getBeanContainer().getBean("invalid", AttributeConverter.class,
NativeLifecycleOptions.INSTANCE, IneffectiveBeanInstanceProducer.INSTANCE
));
}
/**
* The lifecycle options mandated by the JPA spec and used as a default in Hibernate ORM.
*/
private static | HibernateNativeEntityManagerFactorySpringBeanContainerIntegrationTests |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/proxy/fakedns/DnameRecordEncoder.java | {
"start": 295,
"end": 561
} | class ____ extends ResourceRecordEncoder {
protected void putResourceRecordData(IoBuffer byteBuffer, ResourceRecord record) {
String domainName = record.get(DnsAttribute.DOMAIN_NAME);
putDomainName(byteBuffer, domainName);
}
}
| DnameRecordEncoder |
java | netty__netty | buffer/src/main/java/io/netty/buffer/search/AhoCorasicSearchProcessorFactory.java | {
"start": 1323,
"end": 1608
} | class ____ extends AbstractMultiSearchProcessorFactory {
private final int[] jumpTable;
private final int[] matchForNeedleId;
static final int BITS_PER_SYMBOL = 8;
static final int ALPHABET_SIZE = 1 << BITS_PER_SYMBOL;
private static | AhoCorasicSearchProcessorFactory |
java | apache__flink | flink-clients/src/test/java/org/apache/flink/client/program/rest/RestClusterClientTest.java | {
"start": 67855,
"end": 68625
} | class ____<
R extends RequestBody, P extends ResponseBody, M extends MessageParameters>
extends AbstractRestHandler<DispatcherGateway, R, P, M> {
private TestHandler(MessageHeaders<R, P, M> headers) {
super(mockGatewayRetriever, RpcUtils.INF_TIMEOUT, Collections.emptyMap(), headers);
}
}
private TestRestServerEndpoint createRestServerEndpoint(
final AbstractRestHandler<?, ?, ?, ?>... abstractRestHandlers) throws Exception {
TestRestServerEndpoint.Builder builder = TestRestServerEndpoint.builder(restConfig);
Arrays.stream(abstractRestHandlers).forEach(builder::withHandler);
return builder.buildAndStart();
}
@FunctionalInterface
private | TestHandler |
java | google__gson | gson/src/test/java/com/google/gson/functional/ObjectTest.java | {
"start": 20079,
"end": 21575
} | class ____ {
String a = "";
String b = "";
String c = "";
}
@Test
public void testJsonObjectSerialization() {
Gson gson = new GsonBuilder().serializeNulls().create();
JsonObject obj = new JsonObject();
String json = gson.toJson(obj);
assertThat(json).isEqualTo("{}");
}
/** Test for issue 215. */
@Test
public void testSingletonLists() {
Gson gson = new Gson();
Product product = new Product();
assertThat(gson.toJson(product)).isEqualTo("{\"attributes\":[],\"departments\":[]}");
Product deserialized = gson.fromJson(gson.toJson(product), Product.class);
assertThat(deserialized.attributes).isEmpty();
assertThat(deserialized.departments).isEmpty();
product.departments.add(new Department());
assertThat(gson.toJson(product))
.isEqualTo("{\"attributes\":[],\"departments\":[{\"name\":\"abc\",\"code\":\"123\"}]}");
deserialized = gson.fromJson(gson.toJson(product), Product.class);
assertThat(deserialized.attributes).isEmpty();
assertThat(deserialized.departments).hasSize(1);
product.attributes.add("456");
assertThat(gson.toJson(product))
.isEqualTo(
"{\"attributes\":[\"456\"],\"departments\":[{\"name\":\"abc\",\"code\":\"123\"}]}");
deserialized = gson.fromJson(gson.toJson(product), Product.class);
assertThat(deserialized.attributes).containsExactly("456");
assertThat(deserialized.departments).hasSize(1);
}
static final | ClassWithEmptyStringFields |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/localdate/LocalDateAssert_isNotIn_Test.java | {
"start": 1311,
"end": 2720
} | class ____ extends LocalDateAssertBaseTest {
@Test
void should_pass_if_actual_is_not_in_dates_as_string_array_parameter() {
assertThat(REFERENCE).isNotIn(AFTER.toString(), BEFORE.toString());
}
@Test
void should_fail_if_actual_is_in_dates_as_string_array_parameter() {
// WHEN
ThrowingCallable code = () -> assertThat(REFERENCE).isNotIn(REFERENCE.toString(), AFTER.toString());
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessage(shouldNotBeIn(REFERENCE, asList(REFERENCE, AFTER)).create());
}
@Test
void should_fail_if_dates_as_string_array_parameter_is_null() {
// GIVEN
String[] otherLocalDatesAsString = null;
// WHEN
ThrowingCallable code = () -> assertThat(LocalDate.now()).isNotIn(otherLocalDatesAsString);
// THEN
assertThatIllegalArgumentException().isThrownBy(code)
.withMessage("The given LocalDate array should not be null");
}
@Test
void should_fail_if_dates_as_string_array_parameter_is_empty() {
// GIVEN
String[] otherLocalDatesAsString = new String[0];
// WHEN
ThrowingCallable code = () -> assertThat(LocalDate.now()).isNotIn(otherLocalDatesAsString);
// THEN
assertThatIllegalArgumentException().isThrownBy(code)
.withMessage("The given LocalDate array should not be empty");
}
}
| LocalDateAssert_isNotIn_Test |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/MockitoBeanDuplicateTypeReplacementIntegrationTests.java | {
"start": 3287,
"end": 3382
} | class ____ {
@Bean
ExampleService exampleService() {
return () -> "@Bean";
}
}
}
| Config |
java | apache__flink | flink-queryable-state/flink-queryable-state-runtime/src/test/java/org/apache/flink/queryablestate/network/ClientTest.java | {
"start": 34480,
"end": 35651
} | class ____ extends ChannelInboundHandlerAdapter {
private final MessageSerializer<KvStateInternalRequest, KvStateResponse> serializer;
private final byte[] serializedResult;
private RespondingChannelHandler(
MessageSerializer<KvStateInternalRequest, KvStateResponse> serializer,
byte[] serializedResult) {
this.serializer = serializer;
this.serializedResult = serializedResult;
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
ByteBuf buf = (ByteBuf) msg;
assertThat(MessageSerializer.deserializeHeader(buf)).isEqualTo(MessageType.REQUEST);
long requestId = MessageSerializer.getRequestId(buf);
KvStateInternalRequest request = serializer.deserializeRequest(buf);
buf.release();
KvStateResponse response = new KvStateResponse(serializedResult);
ByteBuf serResponse =
MessageSerializer.serializeResponse(ctx.alloc(), requestId, response);
ctx.channel().writeAndFlush(serResponse);
}
}
}
| RespondingChannelHandler |
java | apache__camel | components/camel-http/src/test/java/org/apache/camel/component/http/interceptor/BasicAuthTokenExtractor.java | {
"start": 1161,
"end": 2175
} | class ____ {
public String extract(final HttpRequest request) throws HttpException {
String auth = null;
Header h = request.getFirstHeader(HttpHeaders.AUTHORIZATION);
if (h != null) {
final String s = h.getValue();
if (s != null) {
auth = s.trim();
}
}
if (auth != null) {
final int i = auth.indexOf(' ');
if (i == -1) {
throw new ProtocolException("Invalid Authorization header: " + auth);
}
final String authscheme = auth.substring(0, i);
if (authscheme.equalsIgnoreCase("basic")) {
final String s = auth.substring(i + 1).trim();
byte[] credsRaw = s.getBytes(StandardCharsets.US_ASCII);
Base64.Decoder codec = Base64.getDecoder();
auth = new String(codec.decode(credsRaw), StandardCharsets.US_ASCII);
}
}
return auth;
}
}
| BasicAuthTokenExtractor |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/typeutils/SerializerTestInstance.java | {
"start": 1049,
"end": 3220
} | class ____<T> extends SerializerTestBase<T> {
private final TypeSerializer<T> serializer;
private final Class<T> typeClass;
private final int length;
private final T[] testData;
// --------------------------------------------------------------------------------------------
@SafeVarargs
public SerializerTestInstance(
TypeSerializer<T> serializer, Class<T> typeClass, int length, T... testData) {
this(new DeeplyEqualsChecker(), serializer, typeClass, length, testData);
}
@SafeVarargs
public SerializerTestInstance(
DeeplyEqualsChecker checker,
TypeSerializer<T> serializer,
Class<T> typeClass,
int length,
T... testData) {
super(checker);
this.serializer = serializer;
this.typeClass = typeClass;
this.length = length;
this.testData = testData;
}
// --------------------------------------------------------------------------------------------
@Override
protected TypeSerializer<T> createSerializer() {
return this.serializer;
}
@Override
protected int getLength() {
return this.length;
}
@Override
protected Class<T> getTypeClass() {
return this.typeClass;
}
@Override
protected T[] getTestData() {
return this.testData;
}
// --------------------------------------------------------------------------------------------
public void testAll() {
for (Method method : SerializerTestBase.class.getMethods()) {
if (method.getAnnotation(Test.class) == null) {
continue;
}
try {
method.invoke(this);
} catch (IllegalAccessException e) {
throw new RuntimeException("Unable to invoke test " + method.getName(), e);
} catch (InvocationTargetException e) {
sneakyThrow(e.getCause());
}
}
}
@SuppressWarnings("unchecked")
private static <E extends Throwable> void sneakyThrow(Throwable e) throws E {
throw (E) e;
}
}
| SerializerTestInstance |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArrays_assertHasSameSizeAs_with_Iterable_Test.java | {
"start": 1374,
"end": 2387
} | class ____ extends ShortArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
actual = null;
// WHEN
ThrowingCallable code = () -> arrays.assertHasSameSizeAs(someInfo(), actual, list("Solo", "Leia"));
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessage(actualIsNull());
}
@Test
void should_fail_if_size_of_actual_is_not_equal_to_expected_size() {
// GIVEN
AssertionInfo info = someInfo();
List<String> other = list("Solo", "Leia");
// WHEN
ThrowingCallable code = () -> arrays.assertHasSameSizeAs(info, actual, other);
// THEN
String error = shouldHaveSameSizeAs(actual, other, actual.length, other.size()).create(null, info.representation());
assertThatAssertionErrorIsThrownBy(code).withMessage(error);
}
@Test
void should_pass_if_size_of_actual_is_equal_to_expected_size() {
arrays.assertHasSameSizeAs(someInfo(), actual, list("Solo", "Leia", "Luke"));
}
}
| ShortArrays_assertHasSameSizeAs_with_Iterable_Test |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/buffer/DataBufferOutputStream.java | {
"start": 926,
"end": 1787
} | class ____ extends OutputStream {
private final DataBuffer dataBuffer;
private boolean closed;
public DataBufferOutputStream(DataBuffer dataBuffer) {
Assert.notNull(dataBuffer, "DataBuffer must not be null");
this.dataBuffer = dataBuffer;
}
@Override
public void write(int b) throws IOException {
checkClosed();
this.dataBuffer.ensureWritable(1);
this.dataBuffer.write((byte) b);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
checkClosed();
if (len > 0) {
this.dataBuffer.ensureWritable(len);
this.dataBuffer.write(b, off, len);
}
}
@Override
public void close() {
if (this.closed) {
return;
}
this.closed = true;
}
private void checkClosed() throws IOException {
if (this.closed) {
throw new IOException("DataBufferOutputStream is closed");
}
}
}
| DataBufferOutputStream |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/module/Module.java | {
"start": 1519,
"end": 5235
} | interface ____ {
/**
* List names of all functions in this module. It excludes internal functions.
*
* @return a set of function names
*/
default Set<String> listFunctions() {
return Collections.emptySet();
}
/**
* List names of all functions in this module.
*
* <p>A module can decide to hide certain functions. For example, internal functions that can be
* resolved via {@link #getFunctionDefinition(String)} but should not be listed by default.
*
* @param includeHiddenFunctions whether to list hidden functions or not
* @return a set of function names
*/
default Set<String> listFunctions(boolean includeHiddenFunctions) {
return listFunctions();
}
/**
* Get an optional of {@link FunctionDefinition} by a given name.
*
* <p>It includes hidden functions even though not listed in {@link #listFunctions()}.
*
* @param name name of the {@link FunctionDefinition}.
* @return an optional function definition
*/
default Optional<FunctionDefinition> getFunctionDefinition(String name) {
return Optional.empty();
}
/**
* Returns a {@link DynamicTableSourceFactory} for creating source tables.
*
* <p>A factory is determined with the following precedence rule:
*
* <ul>
* <li>1. Factory provided by the corresponding catalog of a persisted table.
* <li>2. Factory provided by a module.
* <li>3. Factory discovered using Java SPI.
* </ul>
*
* <p>This will be called on loaded modules in the order in which they have been loaded. The
* first factory returned will be used.
*
* <p>This method can be useful to disable Java SPI completely or influence how temporary table
* sources should be created without a corresponding catalog.
*/
default Optional<DynamicTableSourceFactory> getTableSourceFactory() {
return Optional.empty();
}
/**
* Returns a {@link DynamicTableSinkFactory} for creating sink tables.
*
* <p>A factory is determined with the following precedence rule:
*
* <ul>
* <li>1. Factory provided by the corresponding catalog of a persisted table.
* <li>2. Factory provided by a module.
* <li>3. Factory discovered using Java SPI.
* </ul>
*
* <p>This will be called on loaded modules in the order in which they have been loaded. The
* first factory returned will be used.
*
* <p>This method can be useful to disable Java SPI completely or influence how temporary table
* sinks should be created without a corresponding catalog.
*/
default Optional<DynamicTableSinkFactory> getTableSinkFactory() {
return Optional.empty();
}
/**
* Returns a {@link ModelProviderFactory} for creating model providers.
*
* <p>A factory is determined with the following precedence rule:
*
* <ul>
* <li>1. Factory provided by the corresponding catalog of a persisted model. See {@link
* Catalog#getFactory()}
* <li>2. Factory provided by a module.
* <li>3. Factory discovered using Java SPI.
* </ul>
*
* <p>This will be called on loaded modules in the order in which they have been loaded. The
* first factory returned will be used.
*
* <p>This method can be useful to disable Java SPI completely or influence how temporary model
* providers should be created without a corresponding catalog.
*/
default Optional<ModelProviderFactory> getModelProviderFactory() {
return Optional.empty();
}
// user defined types, operators, rules, etc
}
| Module |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_filteredOnAssertions_ThrowingConsumer_Test.java | {
"start": 1369,
"end": 3969
} | class ____ extends ObjectArrayAssert_filtered_baseTest {
@Test
void should_rethrow_throwables_as_runtime_exceptions() {
// GIVEN
Throwable exception = new Throwable("boom!");
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(employees).filteredOnAssertions(throwingConsumer(exception)));
// THEN
then(throwable).isInstanceOf(RuntimeException.class)
.cause().isSameAs(exception);
}
@Test
void should_propagate_RuntimeException_as_is() {
// GIVEN
RuntimeException runtimeException = new RuntimeException("boom!");
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(employees).filteredOnAssertions(throwingConsumer(runtimeException)));
// THEN
then(throwable).isSameAs(runtimeException);
}
@Test
void should_filter_iterable_under_test_verifying_given_assertions() {
// GIVEN
ThrowingConsumer<? super Employee> old = employee -> assertThat(employee.getAge()).isGreaterThan(100);
// WHEN/THEN
then(employees).filteredOnAssertions(old)
.containsOnly(yoda, obiwan);
}
@Test
void should_fail_if_given_consumer_is_null() {
// GIVEN
ThrowingConsumer<? super Employee> consumer = null;
// WHEN/THEN
thenIllegalArgumentException().isThrownBy(() -> assertThat(employees).filteredOnAssertions(consumer))
.withMessage("The element assertions should not be null");
}
@Test
void should_keep_assertion_state() {
// GIVEN
String[] names = array("John", "Doe", "Jane", "Doe");
ThrowingConsumer<? super String> fourCharsWord = string -> assertThat(string.length()).isEqualTo(4);
// WHEN
ObjectArrayAssert<String> assertion = assertThat(names).as("test description")
.withFailMessage("error message")
.withRepresentation(UNICODE_REPRESENTATION)
.usingElementComparator(CaseInsensitiveStringComparator.INSTANCE)
.filteredOnAssertions(fourCharsWord)
.containsExactly("JOHN", "JANE");
// THEN
assertThat(assertion.descriptionText()).isEqualTo("test description");
assertThat(assertion.info.representation()).isEqualTo(UNICODE_REPRESENTATION);
assertThat(assertion.info.overridingErrorMessage()).isEqualTo("error message");
}
}
| ObjectArrayAssert_filteredOnAssertions_ThrowingConsumer_Test |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/protocol/header/NotifyBrokerRoleChangedRequestHeader.java | {
"start": 1336,
"end": 3136
} | class ____ implements CommandCustomHeader {
private String masterAddress;
private Integer masterEpoch;
private Integer syncStateSetEpoch;
private Long masterBrokerId;
public NotifyBrokerRoleChangedRequestHeader() {
}
public NotifyBrokerRoleChangedRequestHeader(String masterAddress, Long masterBrokerId, Integer masterEpoch, Integer syncStateSetEpoch) {
this.masterAddress = masterAddress;
this.masterEpoch = masterEpoch;
this.syncStateSetEpoch = syncStateSetEpoch;
this.masterBrokerId = masterBrokerId;
}
public String getMasterAddress() {
return masterAddress;
}
public void setMasterAddress(String masterAddress) {
this.masterAddress = masterAddress;
}
public Integer getMasterEpoch() {
return masterEpoch;
}
public void setMasterEpoch(Integer masterEpoch) {
this.masterEpoch = masterEpoch;
}
public Integer getSyncStateSetEpoch() {
return syncStateSetEpoch;
}
public void setSyncStateSetEpoch(Integer syncStateSetEpoch) {
this.syncStateSetEpoch = syncStateSetEpoch;
}
public Long getMasterBrokerId() {
return masterBrokerId;
}
public void setMasterBrokerId(Long masterBrokerId) {
this.masterBrokerId = masterBrokerId;
}
@Override
public String toString() {
return "NotifyBrokerRoleChangedRequestHeader{" +
"masterAddress='" + masterAddress + '\'' +
", masterEpoch=" + masterEpoch +
", syncStateSetEpoch=" + syncStateSetEpoch +
", masterBrokerId=" + masterBrokerId +
'}';
}
@Override
public void checkFields() throws RemotingCommandException {
}
}
| NotifyBrokerRoleChangedRequestHeader |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/SimpleExecutionSlotAllocatorTest.java | {
"start": 2127,
"end": 11816
} | class ____ {
private static final ResourceProfile RESOURCE_PROFILE = ResourceProfile.fromResources(3, 5);
private static final ExecutionAttemptID EXECUTION_ATTEMPT_ID = createExecutionAttemptId();
@Test
void testSlotAllocation() {
final AllocationContext context = new AllocationContext();
final CompletableFuture<LogicalSlot> slotFuture =
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
assertThat(slotFuture).isCompleted();
assertThat(context.getSlotProvider().getRequests()).hasSize(1);
final PhysicalSlotRequest slotRequest =
context.getSlotProvider().getRequests().values().iterator().next();
assertThat(slotRequest.getSlotProfile().getPhysicalSlotResourceProfile())
.isEqualTo(RESOURCE_PROFILE);
}
@Test
void testDuplicateAllocationDoesNotRecreateLogicalSlotFuture() throws Exception {
final AllocationContext context = new AllocationContext();
final CompletableFuture<LogicalSlot> slotFuture1 =
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
final CompletableFuture<LogicalSlot> slotFuture2 =
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
assertThat(slotFuture1.get()).isSameAs(slotFuture2.get());
}
@Test
void testFailedPhysicalSlotRequestFailsLogicalSlotFuture() {
final AllocationContext context =
new AllocationContext(
TestingPhysicalSlotProvider.createWithoutImmediatePhysicalSlotCreation(),
false);
final CompletableFuture<LogicalSlot> slotFuture =
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
final SlotRequestId slotRequestId =
context.getSlotProvider().getFirstRequestOrFail().getSlotRequestId();
assertThat(slotFuture).isNotDone();
context.getSlotProvider()
.getResponses()
.get(slotRequestId)
.completeExceptionally(new Throwable());
assertThat(slotFuture).isCompletedExceptionally();
// next allocation allocates new slot
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
assertThat(context.getSlotProvider().getRequests()).hasSize(2);
}
@Test
void testSlotWillBeOccupiedIndefinitelyFalse() throws Exception {
testSlotWillBeOccupiedIndefinitely(false);
}
@Test
void testSlotWillBeOccupiedIndefinitelyTrue() throws Exception {
testSlotWillBeOccupiedIndefinitely(true);
}
private static void testSlotWillBeOccupiedIndefinitely(boolean slotWillBeOccupiedIndefinitely)
throws Exception {
final AllocationContext context =
new AllocationContext(
TestingPhysicalSlotProvider.createWithInfiniteSlotCreation(),
slotWillBeOccupiedIndefinitely);
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
final PhysicalSlotRequest slotRequest = context.getSlotProvider().getFirstRequestOrFail();
assertThat(slotRequest.willSlotBeOccupiedIndefinitely())
.isEqualTo(slotWillBeOccupiedIndefinitely);
final TestingPhysicalSlot physicalSlot =
context.getSlotProvider().getResponses().get(slotRequest.getSlotRequestId()).get();
assertThat(physicalSlot.getPayload()).isNotNull();
assertThat(physicalSlot.getPayload().willOccupySlotIndefinitely())
.isEqualTo(slotWillBeOccupiedIndefinitely);
}
@Test
void testLogicalSlotReleasingCancelsPhysicalSlotRequest() throws Exception {
testLogicalSlotRequestCancellationOrRelease(
true, true, (context, slotFuture) -> slotFuture.get().releaseSlot(null));
}
@Test
void testLogicalSlotCancellationCancelsPhysicalSlotRequest() throws Exception {
testLogicalSlotRequestCancellationOrRelease(
false,
true,
(context, slotFuture) -> {
assertThatThrownBy(
() -> {
context.getAllocator().cancel(EXECUTION_ATTEMPT_ID);
slotFuture.get();
})
.as("The logical future must finish with a cancellation exception.")
.isInstanceOf(CancellationException.class);
});
}
@Test
void testCompletedLogicalSlotCancellationDoesNotCancelPhysicalSlotRequest() throws Exception {
testLogicalSlotRequestCancellationOrRelease(
true,
false,
(context, slotFuture) -> {
context.getAllocator().cancel(EXECUTION_ATTEMPT_ID);
slotFuture.get();
});
}
private static void testLogicalSlotRequestCancellationOrRelease(
final boolean autoCompletePhysicalSlotFuture,
final boolean expectPhysicalSlotRequestCanceled,
final BiConsumerWithException<
AllocationContext, CompletableFuture<LogicalSlot>, Exception>
cancelOrReleaseAction)
throws Exception {
final TestingPhysicalSlotProvider physicalSlotProvider;
if (!autoCompletePhysicalSlotFuture) {
physicalSlotProvider =
TestingPhysicalSlotProvider.createWithoutImmediatePhysicalSlotCreation();
} else {
physicalSlotProvider = TestingPhysicalSlotProvider.createWithInfiniteSlotCreation();
}
final AllocationContext context = new AllocationContext(physicalSlotProvider, false);
final CompletableFuture<LogicalSlot> slotFuture1 =
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
cancelOrReleaseAction.accept(context, slotFuture1);
final SlotRequestId slotRequestId =
context.getSlotProvider().getFirstRequestOrFail().getSlotRequestId();
assertThat(context.getSlotProvider().getCancellations().containsKey(slotRequestId))
.isEqualTo(expectPhysicalSlotRequestCanceled);
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
final int expectedNumberOfRequests = expectPhysicalSlotRequestCanceled ? 2 : 1;
assertThat(context.getSlotProvider().getRequests()).hasSize(expectedNumberOfRequests);
}
@Test
void testPhysicalSlotReleasesLogicalSlots() throws Exception {
final AllocationContext context = new AllocationContext();
final CompletableFuture<LogicalSlot> slotFuture =
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
final TestingPayload payload = new TestingPayload();
slotFuture.thenAccept(logicalSlot -> logicalSlot.tryAssignPayload(payload));
final SlotRequestId slotRequestId =
context.getSlotProvider().getFirstRequestOrFail().getSlotRequestId();
final TestingPhysicalSlot physicalSlot =
context.getSlotProvider().getFirstResponseOrFail().get();
assertThat(payload.getTerminalStateFuture()).isNotDone();
assertThat(physicalSlot.getPayload()).isNotNull();
physicalSlot.getPayload().release(new Throwable());
assertThat(payload.getTerminalStateFuture()).isDone();
assertThat(context.getSlotProvider().getCancellations()).containsKey(slotRequestId);
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
// there should be one more physical slot allocation, as the first allocation should be
// removed after releasing all logical slots
assertThat(context.getSlotProvider().getRequests()).hasSize(2);
}
@Test
void testFailLogicalSlotIfPhysicalSlotIsFails() {
final AllocationContext context =
new AllocationContext(
TestingPhysicalSlotProvider.createWithFailingPhysicalSlotCreation(
new FlinkException("test failure")),
false);
final CompletableFuture<LogicalSlot> slotFuture =
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
assertThat(slotFuture).isCompletedExceptionally();
assertThat(context.getSlotProvider().getCancellations().keySet())
.isEqualTo(context.getSlotProvider().getRequests().keySet());
}
@Test
void testSlotProviderBatchSlotRequestTimeoutCheckIsEnabled() {
final AllocationContext context = new AllocationContext();
assertThat(context.getSlotProvider().isBatchSlotRequestTimeoutCheckEnabled()).isTrue();
}
@Test
void testPreferredLocationsOfSlotProfile() {
final AllocationContext context = new AllocationContext();
List<TaskManagerLocation> taskManagerLocations =
Collections.singletonList(
new TaskManagerLocation(
ResourceID.generate(), InetAddress.getLoopbackAddress(), 41));
context.getLocations()
.put(EXECUTION_ATTEMPT_ID.getExecutionVertexId(), taskManagerLocations);
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
assertThat(context.getSlotProvider().getRequests()).hasSize(1);
final PhysicalSlotRequest slotRequest =
context.getSlotProvider().getRequests().values().iterator().next();
assertThat(slotRequest.getSlotProfile().getPreferredLocations()).hasSize(1);
assertThat(slotRequest.getSlotProfile().getPreferredLocations())
.isEqualTo(taskManagerLocations);
}
private static | SimpleExecutionSlotAllocatorTest |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 88840,
"end": 89742
} | interface ____ {",
" Builder blim(int x);",
" Builder blam(String x);",
" Baz build();",
" }",
"}");
Compilation compilation =
javac()
.withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor())
.compile(javaFileObject);
assertThat(compilation)
.hadErrorContaining("Method blim does not correspond to a property method of foo.bar.Baz")
.inFile(javaFileObject)
.onLineContaining("Builder blim(int x)");
}
@Test
public void autoValueBuilderSetPrefixAndNoSetPrefix() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoValue;",
"",
"@AutoValue",
"public abstract | Builder |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StringConcatToTextBlockTest.java | {
"start": 5903,
"end": 6281
} | class ____ {
String s =
\"""
string
literals\\
\""";
}
""")
.doTest(TEXT_MATCH);
}
// b/396965922
@Test
public void trailingSpacesInMultilineString() {
refactoringHelper
.addInputLines(
"Test.java",
"""
| Test |
java | elastic__elasticsearch | x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/TextStructureOverrides.java | {
"start": 5291,
"end": 7755
} | class ____ {
private String charset;
private TextStructure.Format format;
private List<String> columnNames;
private Boolean hasHeaderRow;
private Character delimiter;
private Character quote;
private Boolean shouldTrimFields;
private String grokPattern;
private String timestampFormat;
private String timestampField;
private String ecsCompatibility;
public Builder setCharset(String charset) {
this.charset = charset;
return this;
}
public Builder setFormat(TextStructure.Format format) {
this.format = format;
return this;
}
public Builder setColumnNames(List<String> columnNames) {
this.columnNames = columnNames;
return this;
}
public Builder setHasHeaderRow(Boolean hasHeaderRow) {
this.hasHeaderRow = hasHeaderRow;
return this;
}
public Builder setDelimiter(Character delimiter) {
this.delimiter = delimiter;
return this;
}
public Builder setQuote(Character quote) {
this.quote = quote;
return this;
}
public Builder setShouldTrimFields(Boolean shouldTrimFields) {
this.shouldTrimFields = shouldTrimFields;
return this;
}
public Builder setGrokPattern(String grokPattern) {
this.grokPattern = grokPattern;
return this;
}
public Builder setTimestampFormat(String timestampFormat) {
this.timestampFormat = timestampFormat;
return this;
}
public Builder setTimestampField(String timestampField) {
this.timestampField = timestampField;
return this;
}
public Builder setEcsCompatibility(String ecsCompatibility) {
this.ecsCompatibility = ecsCompatibility;
return this;
}
public TextStructureOverrides build() {
return new TextStructureOverrides(
charset,
format,
columnNames,
hasHeaderRow,
delimiter,
quote,
shouldTrimFields,
grokPattern,
timestampFormat,
timestampField,
ecsCompatibility
);
}
}
}
| Builder |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/naming/NamingFactoryTest.java | {
"start": 960,
"end": 1927
} | class ____ {
@Test
void testCreateNamingServiceByPropertiesSuccess() throws NacosException {
NacosNamingService.IS_THROW_EXCEPTION.set(false);
assertNotNull(NamingFactory.createNamingService(new Properties()));
}
@Test
void testCreateNamingServiceByPropertiesFailure() {
NacosNamingService.IS_THROW_EXCEPTION.set(true);
assertThrows(NacosException.class, () -> NamingFactory.createNamingService(new Properties()));
}
@Test
void testCreateNamingServiceByServerAddrSuccess() throws NacosException {
NacosNamingService.IS_THROW_EXCEPTION.set(false);
assertNotNull(NamingFactory.createNamingService("localhost:8848"));
}
@Test
void testCreateNamingServiceByServerAddrFailure() {
NacosNamingService.IS_THROW_EXCEPTION.set(true);
assertThrows(NacosException.class, () -> NamingFactory.createNamingService("localhost:8848"));
}
} | NamingFactoryTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java | {
"start": 4543,
"end": 5408
} | class
____.setFilterClass(job,
SequenceFileInputFilter.PercentFilter.class);
SequenceFileInputFilter.PercentFilter.setFrequency(job, 1000);
// clean input dir
fs.delete(inDir, true);
// for a variety of lengths
for (int length = 0; length < MAX_LENGTH;
length+= random.nextInt(MAX_LENGTH/10)+1) {
LOG.info("******Number of records: "+length);
createSequenceFile(length);
int count = countRecords(1);
LOG.info("Accepted "+count+" records");
int expectedCount = length/1000;
if (expectedCount*1000!=length)
expectedCount++;
assertThat(count).isEqualTo(expectedCount);
}
// clean up
fs.delete(inDir, true);
}
@Test
public void testMD5Filter() throws Exception {
// set the filter | SequenceFileInputFilter |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/filter/stat/StatFilterContextListener.java | {
"start": 663,
"end": 1225
} | interface ____ {
void addUpdateCount(int updateCount);
void addFetchRowCount(int fetchRowCount);
void executeBefore(String sql, boolean inTransaction);
void executeAfter(String sql, long nanoSpan, Throwable error);
void commit();
void rollback();
void pool_connect();
void pool_close(long nanos);
void physical_connection_connect();
void physical_connection_close(long nanos);
void resultSet_open();
void resultSet_close(long nanos);
void clob_open();
void blob_open();
}
| StatFilterContextListener |
java | apache__camel | components/camel-ignite/src/test/java/org/apache/camel/component/ignite/IgniteCreationTest.java | {
"start": 1130,
"end": 1730
} | class ____ extends AbstractIgniteTest {
private Ignite ignite;
@Override
protected String getScheme() {
return "ignite-cache";
}
@Override
protected AbstractIgniteComponent createComponent() {
ignite = Ignition.start(createConfiguration());
return IgniteCacheComponent.fromIgnite(ignite);
}
@Test
public void testCAMEL11382() {
assertNotNull(ignite());
}
@AfterEach
public void stopUserManagedIgnite() {
if (ignite != null) {
Ignition.stop(ignite.name(), true);
}
}
}
| IgniteCreationTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/io/support/SpringFactoriesLoaderTests.java | {
"start": 14338,
"end": 14419
} | class ____ {
SingleConstructor(String arg) {
}
}
static | SingleConstructor |
java | apache__kafka | group-coordinator/src/main/java/org/apache/kafka/coordinator/group/modern/share/ShareGroupAssignmentBuilder.java | {
"start": 1290,
"end": 1521
} | class ____ the reconciliation engine of the
* share group protocol. Given the current state of a member and a desired or target
* assignment state, the state machine takes the necessary steps to converge them.
*/
public | encapsulates |
java | apache__kafka | clients/src/test/java/org/apache/kafka/clients/admin/internals/RemoveMembersFromConsumerGroupHandlerTest.java | {
"start": 1913,
"end": 8078
} | class ____ {
private final LogContext logContext = new LogContext();
private final String groupId = "group-id";
private final MemberIdentity m1 = new MemberIdentity()
.setMemberId("m1")
.setGroupInstanceId("m1-gii");
private final MemberIdentity m2 = new MemberIdentity()
.setMemberId("m2")
.setGroupInstanceId("m2-gii");
private final List<MemberIdentity> members = Arrays.asList(m1, m2);
@Test
public void testBuildRequest() {
RemoveMembersFromConsumerGroupHandler handler = new RemoveMembersFromConsumerGroupHandler(groupId, members, logContext);
LeaveGroupRequest request = handler.buildBatchedRequest(1, singleton(CoordinatorKey.byGroupId(groupId))).build();
assertEquals(groupId, request.data().groupId());
assertEquals(2, request.data().members().size());
}
@Test
public void testSuccessfulHandleResponse() {
Map<MemberIdentity, Errors> responseData = Collections.singletonMap(m1, Errors.NONE);
assertCompleted(handleWithGroupError(Errors.NONE), responseData);
}
@Test
public void testUnmappedHandleResponse() {
assertUnmapped(handleWithGroupError(Errors.COORDINATOR_NOT_AVAILABLE));
assertUnmapped(handleWithGroupError(Errors.NOT_COORDINATOR));
}
@Test
public void testRetriableHandleResponse() {
assertRetriable(handleWithGroupError(Errors.COORDINATOR_LOAD_IN_PROGRESS));
}
@Test
public void testFailedHandleResponse() {
assertFailed(GroupAuthorizationException.class, handleWithGroupError(Errors.GROUP_AUTHORIZATION_FAILED));
assertFailed(UnknownServerException.class, handleWithGroupError(Errors.UNKNOWN_SERVER_ERROR));
}
@Test
public void testFailedHandleResponseInMemberLevel() {
assertMemberFailed(Errors.FENCED_INSTANCE_ID, handleWithMemberError(Errors.FENCED_INSTANCE_ID));
assertMemberFailed(Errors.UNKNOWN_MEMBER_ID, handleWithMemberError(Errors.UNKNOWN_MEMBER_ID));
}
private LeaveGroupResponse buildResponse(Errors error) {
return new LeaveGroupResponse(
new LeaveGroupResponseData()
.setErrorCode(error.code())
.setMembers(singletonList(
new MemberResponse()
.setErrorCode(Errors.NONE.code())
.setMemberId("m1")
.setGroupInstanceId("m1-gii"))));
}
private LeaveGroupResponse buildResponseWithMemberError(Errors error) {
return new LeaveGroupResponse(
new LeaveGroupResponseData()
.setErrorCode(Errors.NONE.code())
.setMembers(singletonList(
new MemberResponse()
.setErrorCode(error.code())
.setMemberId("m1")
.setGroupInstanceId("m1-gii"))));
}
private AdminApiHandler.ApiResult<CoordinatorKey, Map<MemberIdentity, Errors>> handleWithGroupError(
Errors error
) {
RemoveMembersFromConsumerGroupHandler handler = new RemoveMembersFromConsumerGroupHandler(groupId, members, logContext);
LeaveGroupResponse response = buildResponse(error);
return handler.handleResponse(new Node(1, "host", 1234), singleton(CoordinatorKey.byGroupId(groupId)), response);
}
private AdminApiHandler.ApiResult<CoordinatorKey, Map<MemberIdentity, Errors>> handleWithMemberError(
Errors error
) {
RemoveMembersFromConsumerGroupHandler handler = new RemoveMembersFromConsumerGroupHandler(groupId, members, logContext);
LeaveGroupResponse response = buildResponseWithMemberError(error);
return handler.handleResponse(new Node(1, "host", 1234), singleton(CoordinatorKey.byGroupId(groupId)), response);
}
private void assertUnmapped(
AdminApiHandler.ApiResult<CoordinatorKey, Map<MemberIdentity, Errors>> result
) {
assertEquals(emptySet(), result.completedKeys.keySet());
assertEquals(emptySet(), result.failedKeys.keySet());
assertEquals(singletonList(CoordinatorKey.byGroupId(groupId)), result.unmappedKeys);
}
private void assertRetriable(
AdminApiHandler.ApiResult<CoordinatorKey, Map<MemberIdentity, Errors>> result
) {
assertEquals(emptySet(), result.completedKeys.keySet());
assertEquals(emptySet(), result.failedKeys.keySet());
assertEquals(emptyList(), result.unmappedKeys);
}
private void assertCompleted(
AdminApiHandler.ApiResult<CoordinatorKey, Map<MemberIdentity, Errors>> result,
Map<MemberIdentity, Errors> expected
) {
CoordinatorKey key = CoordinatorKey.byGroupId(groupId);
assertEquals(emptySet(), result.failedKeys.keySet());
assertEquals(emptyList(), result.unmappedKeys);
assertEquals(singleton(key), result.completedKeys.keySet());
assertEquals(expected, result.completedKeys.get(key));
}
private void assertFailed(
Class<? extends Throwable> expectedExceptionType,
AdminApiHandler.ApiResult<CoordinatorKey, Map<MemberIdentity, Errors>> result
) {
CoordinatorKey key = CoordinatorKey.byGroupId(groupId);
assertEquals(emptySet(), result.completedKeys.keySet());
assertEquals(emptyList(), result.unmappedKeys);
assertEquals(singleton(key), result.failedKeys.keySet());
assertInstanceOf(expectedExceptionType, result.failedKeys.get(key));
}
private void assertMemberFailed(
Errors expectedError,
AdminApiHandler.ApiResult<CoordinatorKey, Map<MemberIdentity, Errors>> result
) {
Map<MemberIdentity, Errors> expectedResponseData = Collections.singletonMap(m1, expectedError);
CoordinatorKey key = CoordinatorKey.byGroupId(groupId);
assertEquals(emptySet(), result.failedKeys.keySet());
assertEquals(emptyList(), result.unmappedKeys);
assertEquals(singleton(key), result.completedKeys.keySet());
assertEquals(expectedResponseData, result.completedKeys.get(key));
}
}
| RemoveMembersFromConsumerGroupHandlerTest |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java | {
"start": 1444,
"end": 2039
} | class ____ generated. Edit `X-AllValueByTimestampAggregator.java.st` instead.
*/
@Aggregator(
{
@IntermediateState(name = "timestamps", type = "LONG"),
@IntermediateState(name = "values", type = "INT"),
@IntermediateState(name = "seen", type = "BOOLEAN"),
@IntermediateState(name = "hasValue", type = "BOOLEAN") }
)
@GroupingAggregator(
{
@IntermediateState(name = "timestamps", type = "LONG_BLOCK"),
@IntermediateState(name = "values", type = "INT_BLOCK"),
@IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") }
)
public | is |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/records/RecordDeserialization3897Test.java | {
"start": 308,
"end": 438
} | class ____ with single field annotated with
// `JsonProperty.Access.WRITE_ONLY`. Regression from Jackson 2.14.2
public | deserialization |
java | junit-team__junit5 | documentation/src/main/java/example/util/ListWriter.java | {
"start": 452,
"end": 673
} | class ____ {
private final Path file;
public ListWriter(Path file) {
this.file = file;
}
public void write(String... items) throws IOException {
Files.write(file, List.of(String.join(",", items)));
}
}
| ListWriter |
java | apache__camel | components/camel-aws/camel-aws2-s3/src/main/java/org/apache/camel/component/aws2/s3/client/AWS2S3ClientFactory.java | {
"start": 1338,
"end": 2249
} | class ____ {
private AWS2S3ClientFactory() {
}
/**
* Return the correct aws s3 client (based on remote vs local).
*
* @param configuration configuration
* @return AWSS3Client
*/
public static AWS2CamelS3InternalClient getAWSS3Client(AWS2S3Configuration configuration) {
if (Boolean.TRUE.equals(configuration.isUseDefaultCredentialsProvider())) {
return new AWS2S3ClientIAMOptimizedImpl(configuration);
} else if (Boolean.TRUE.equals(configuration.isUseProfileCredentialsProvider())) {
return new AWS2S3ClientIAMProfileOptimizedImpl(configuration);
} else if (Boolean.TRUE.equals(configuration.isUseSessionCredentials())) {
return new AWS2S3ClientSessionTokenImpl(configuration);
} else {
return new AWS2S3ClientStandardImpl(configuration);
}
}
}
| AWS2S3ClientFactory |
java | google__guice | core/test/com/google/inject/errors/MissingImplementationErrorTest.java | {
"start": 4923,
"end": 6614
} | class ____ {}
}
@Test
public void missingImplementationWithHints_memoizesSuggestion() throws Exception {
Injector injector = Guice.createInjector();
ConfigurationException ex =
assertThrows(ConfigurationException.class, () -> injector.getInstance(CustomType.class));
// Ensure that the message doesn't contain a "Did you mean?" by default,
// because there's no other type that fits.
assertThat(ex).hasMessageThat().doesNotContain("Did you mean?");
// And even after we insert another type that fits, we don't redo the suggestions.
injector.getInstance(CustomType.InnerType.class);
assertThat(ex).hasMessageThat().doesNotContain("Did you mean?");
}
@Test
public void missingImplementationWithHints_lazyInjectorUsage() throws Exception {
// Note: this test is extremely contrived. This scenario is unlikely to happen for real, but
// it's a very convenient way to assert that usage of the injector is lazy.
// By adding a type into the injector after the exception is thrown but before we
// call getMessage, we're validating that the suggestions are populated only on getMessage
// usage.
// This test works in tandem with the above one which asserts that by default,
// the message *will not* have suggestions.
Injector injector = Guice.createInjector();
ConfigurationException ex =
assertThrows(ConfigurationException.class, () -> injector.getInstance(CustomType.class));
injector.getInstance(CustomType.InnerType.class);
assertThat(ex).hasMessageThat().containsMatch("Did you mean?");
assertThat(ex).hasMessageThat().containsMatch("InnerType");
}
private static final | InnerType |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MasterEndpointBuilderFactory.java | {
"start": 1950,
"end": 7608
} | interface ____
extends
EndpointConsumerBuilder {
default MasterEndpointBuilder basic() {
return (MasterEndpointBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedMasterEndpointBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedMasterEndpointBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedMasterEndpointBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedMasterEndpointBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedMasterEndpointBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedMasterEndpointBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
}
public | AdvancedMasterEndpointBuilder |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/PathMatcher.java | {
"start": 1214,
"end": 5459
} | interface ____ {
/**
* Does the given {@code path} represent a pattern that can be matched
* by an implementation of this interface?
* <p>If the return value is {@code false}, then the {@link #match}
* method does not have to be used because direct equality comparisons
* on the static path Strings will lead to the same result.
* @param path the path to check
* @return {@code true} if the given {@code path} represents a pattern
*/
boolean isPattern(String path);
/**
* Match the given {@code path} against the given {@code pattern},
* according to this PathMatcher's matching strategy.
* @param pattern the pattern to match against
* @param path the path to test
* @return {@code true} if the supplied {@code path} matched,
* {@code false} if it didn't
*/
boolean match(String pattern, String path);
/**
* Match the given {@code path} against the corresponding part of the given
* {@code pattern}, according to this PathMatcher's matching strategy.
* <p>Determines whether the pattern at least matches as far as the given base
* path goes, assuming that a full path may then match as well.
* @param pattern the pattern to match against
* @param path the path to test
* @return {@code true} if the supplied {@code path} matched,
* {@code false} if it didn't
*/
boolean matchStart(String pattern, String path);
/**
* Given a pattern and a full path, determine the pattern-mapped part.
* <p>This method is supposed to find out which part of the path is matched
* dynamically through an actual pattern, that is, it strips off a statically
* defined leading path from the given full path, returning only the actually
* pattern-matched part of the path.
* <p>For example: For "myroot/*.html" as pattern and "myroot/myfile.html"
* as full path, this method should return "myfile.html". The detailed
* determination rules are specified to this PathMatcher's matching strategy.
* <p>A simple implementation may return the given full path as-is in case
* of an actual pattern, and the empty String in case of the pattern not
* containing any dynamic parts (i.e. the {@code pattern} parameter being
* a static path that wouldn't qualify as an actual {@link #isPattern pattern}).
* A sophisticated implementation will differentiate between the static parts
* and the dynamic parts of the given path pattern.
* @param pattern the path pattern
* @param path the full path to introspect
* @return the pattern-mapped part of the given {@code path}
* (never {@code null})
*/
String extractPathWithinPattern(String pattern, String path);
/**
* Given a pattern and a full path, extract the URI template variables. URI template
* variables are expressed through curly brackets ('{' and '}').
* <p>For example: For pattern "/hotels/{hotel}" and path "/hotels/1", this method will
* return a map containing "hotel" → "1".
* @param pattern the path pattern, possibly containing URI templates
* @param path the full path to extract template variables from
* @return a map, containing variable names as keys; variables values as values
*/
Map<String, String> extractUriTemplateVariables(String pattern, String path);
/**
* Given a full path, returns a {@link Comparator} suitable for sorting patterns
* in order of explicitness for that path.
* <p>The full algorithm used depends on the underlying implementation,
* but generally, the returned {@code Comparator} will
* {@linkplain java.util.List#sort(java.util.Comparator) sort}
* a list so that more specific patterns come before generic patterns.
* @param path the full path to use for comparison
* @return a comparator capable of sorting patterns in order of explicitness
*/
Comparator<String> getPatternComparator(String path);
/**
* Combines two patterns into a new pattern that is returned.
* <p>The full algorithm used for combining the two pattern depends on the underlying implementation.
* @param pattern1 the first pattern
* @param pattern2 the second pattern
* @return the combination of the two patterns
* @throws IllegalArgumentException when the two patterns cannot be combined
*/
String combine(String pattern1, String pattern2);
}
| PathMatcher |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/condition/NestableConditionFixtures.java | {
"start": 3741,
"end": 3832
} | class ____ {
final String name;
Country(String name) {
this.name = name;
}
}
| Country |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/BadAnnotationImplementationTest.java | {
"start": 5881,
"end": 6596
} | class ____ implements Annotation {
@Override
public Class<? extends Annotation> annotationType() {
return TestAnnotation.class;
}
@Override
public boolean equals(Object other) {
return false;
}
@Override
public int hashCode() {
return 0;
}
}
""")
.doTest();
}
@Test
public void declareInterfaceThatExtendsAnnotation() {
compilationHelper
.addSourceLines(
"TestAnnotation.java",
"""
import java.lang.annotation.Annotation;
public | TestAnnotation |
java | spring-projects__spring-boot | module/spring-boot-security/src/main/java/org/springframework/boot/security/autoconfigure/actuate/web/servlet/EndpointRequest.java | {
"start": 2635,
"end": 6181
} | class ____ {
private static final RequestMatcher EMPTY_MATCHER = (request) -> false;
private EndpointRequest() {
}
/**
* Returns a matcher that includes all {@link Endpoint actuator endpoints}. It also
* includes the links endpoint which is present at the base path of the actuator
* endpoints. The {@link EndpointRequestMatcher#excluding(Class...) excluding} method
* can be used to further remove specific endpoints if required. For example:
* <pre class="code">
* EndpointRequest.toAnyEndpoint().excluding(ShutdownEndpoint.class)
* </pre>
* @return the configured {@link RequestMatcher}
*/
public static EndpointRequestMatcher toAnyEndpoint() {
return new EndpointRequestMatcher(true);
}
/**
* Returns a matcher that includes the specified {@link Endpoint actuator endpoints}.
* For example: <pre class="code">
* EndpointRequest.to(ShutdownEndpoint.class, HealthEndpoint.class)
* </pre>
* @param endpoints the endpoints to include
* @return the configured {@link RequestMatcher}
*/
public static EndpointRequestMatcher to(Class<?>... endpoints) {
return new EndpointRequestMatcher(endpoints, false);
}
/**
* Returns a matcher that includes the specified {@link Endpoint actuator endpoints}.
* For example: <pre class="code">
* EndpointRequest.to("shutdown", "health")
* </pre>
* @param endpoints the endpoints to include
* @return the configured {@link RequestMatcher}
*/
public static EndpointRequestMatcher to(String... endpoints) {
return new EndpointRequestMatcher(endpoints, false);
}
/**
* Returns a matcher that matches only on the links endpoint. It can be used when
* security configuration for the links endpoint is different from the other
* {@link Endpoint actuator endpoints}. The
* {@link EndpointRequestMatcher#excludingLinks() excludingLinks} method can be used
* in combination with this to remove the links endpoint from
* {@link EndpointRequest#toAnyEndpoint() toAnyEndpoint}. For example:
* <pre class="code">
* EndpointRequest.toLinks()
* </pre>
* @return the configured {@link RequestMatcher}
*/
public static LinksRequestMatcher toLinks() {
return new LinksRequestMatcher();
}
/**
* Returns a matcher that includes additional paths under a {@link WebServerNamespace}
* for the specified {@link Endpoint actuator endpoints}. For example:
* <pre class="code">
* EndpointRequest.toAdditionalPaths(WebServerNamespace.SERVER, "health")
* </pre>
* @param webServerNamespace the web server namespace
* @param endpoints the endpoints to include
* @return the configured {@link RequestMatcher}
*/
public static AdditionalPathsEndpointRequestMatcher toAdditionalPaths(WebServerNamespace webServerNamespace,
Class<?>... endpoints) {
return new AdditionalPathsEndpointRequestMatcher(webServerNamespace, endpoints);
}
/**
* Returns a matcher that includes additional paths under a {@link WebServerNamespace}
* for the specified {@link Endpoint actuator endpoints}. For example:
* <pre class="code">
* EndpointRequest.toAdditionalPaths(WebServerNamespace.SERVER, HealthEndpoint.class)
* </pre>
* @param webServerNamespace the web server namespace
* @param endpoints the endpoints to include
* @return the configured {@link RequestMatcher}
*/
public static AdditionalPathsEndpointRequestMatcher toAdditionalPaths(WebServerNamespace webServerNamespace,
String... endpoints) {
return new AdditionalPathsEndpointRequestMatcher(webServerNamespace, endpoints);
}
/**
* Base | EndpointRequest |
java | google__guava | android/guava-tests/test/com/google/common/collect/PackageSanityTests.java | {
"start": 851,
"end": 1104
} | class ____ extends AbstractPackageSanityTests {
public PackageSanityTests() {
publicApiOnly(); // Many package-private classes are tested through the public API.
setDefault(DiscreteDomain.class, DiscreteDomain.integers());
}
}
| PackageSanityTests |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1300/Issue1341.java | {
"start": 1002,
"end": 2250
} | class ____ {
private int bookId;
private String bookName;
private String publisher;
private String isbn;
private Date publishTime;
private Object hello;
public int getBookId() {
return bookId;
}
public void setBookId(int bookId) {
this.bookId = bookId;
}
public String getBookName() {
return bookName;
}
public void setBookName(String bookName) {
this.bookName = bookName;
}
public String getPublisher() {
return publisher;
}
public void setPublisher(String publisher) {
this.publisher = publisher;
}
public String getIsbn() {
return isbn;
}
public void setIsbn(String isbn) {
this.isbn = isbn;
}
public Date getPublishTime() {
return publishTime;
}
public void setPublishTime(Date publishTime) {
this.publishTime = publishTime;
}
public Object getHello() {
return hello;
}
public void setHello(Object hello) {
this.hello = hello;
}
}
static | Book |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/writing/AssistedInjectionParameters.java | {
"start": 1924,
"end": 4888
} | class ____ {
/**
* Returns the list of assisted factory parameters as {@link XParameterSpec}s.
*
* <p>The type of each parameter will be the resolved type given by the binding key, and the name
* of each parameter will be the name given in the {@link
* dagger.assisted.AssistedInject}-annotated constructor.
*/
public static ImmutableList<XParameterSpec> assistedFactoryParameterSpecs(
Binding binding, ShardImplementation shardImplementation) {
checkArgument(binding.kind() == BindingKind.ASSISTED_FACTORY);
XTypeElement factory = asTypeElement(binding.bindingElement().get());
AssistedFactoryMetadata metadata = AssistedFactoryMetadata.create(factory.getType());
XMethodType factoryMethodType =
metadata.factoryMethod().asMemberOf(binding.key().type().xprocessing());
return assistedParameterSpecs(
// Use the order of the parameters from the @AssistedFactory method but use the parameter
// names of the @AssistedInject constructor.
metadata.assistedFactoryAssistedParameters().stream()
.map(metadata.assistedInjectAssistedParametersMap()::get)
.collect(toImmutableList()),
factoryMethodType.getParameterTypes(),
shardImplementation);
}
/**
* Returns the list of assisted parameters as {@link XParameterSpec}s.
*
* <p>The type of each parameter will be the resolved type given by the binding key, and the name
* of each parameter will be the name given in the {@link
* dagger.assisted.AssistedInject}-annotated constructor.
*/
public static ImmutableList<XParameterSpec> assistedParameterSpecs(
Binding binding, ShardImplementation shardImplementation) {
checkArgument(binding.kind() == BindingKind.ASSISTED_INJECTION);
XConstructorElement constructor = asConstructor(binding.bindingElement().get());
XConstructorType constructorType = constructor.asMemberOf(binding.key().type().xprocessing());
return assistedParameterSpecs(
constructor.getParameters(), constructorType.getParameterTypes(), shardImplementation);
}
private static ImmutableList<XParameterSpec> assistedParameterSpecs(
List<XExecutableParameterElement> paramElements,
List<XType> paramTypes,
ShardImplementation shardImplementation) {
ImmutableList.Builder<XParameterSpec> assistedParameterSpecs = ImmutableList.builder();
for (int i = 0; i < paramElements.size(); i++) {
XExecutableParameterElement paramElement = paramElements.get(i);
XType paramType = paramTypes.get(i);
if (AssistedInjectionAnnotations.isAssistedParameter(paramElement)) {
assistedParameterSpecs.add(
XParameterSpecs.of(
shardImplementation.getUniqueFieldNameForAssistedParam(paramElement),
paramType.asTypeName()));
}
}
return assistedParameterSpecs.build();
}
private AssistedInjectionParameters() {}
}
| AssistedInjectionParameters |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/builder/mappingTarget/simple/SimpleBuilderMapper.java | {
"start": 358,
"end": 1246
} | interface ____ {
SimpleBuilderMapper INSTANCE = Mappers.getMapper( SimpleBuilderMapper.class );
@Mapping(target = "builder.name", source = "source.fullName")
SimpleImmutableTarget toImmutable(SimpleMutableSource source, @MappingTarget SimpleImmutableTarget.Builder builder);
@Mapping(target = "builder.name", source = "source.fullName")
void updateImmutable(SimpleMutableSource source, @MappingTarget SimpleImmutableTarget.Builder builder);
@Mapping(target = "name", source = "fullName")
SimpleImmutableTarget toImmutable(SimpleMutableSource source);
@Mapping(target = "name", source = "fullName")
MutableTarget toMutableTarget(SimpleMutableSource simpleMutableSource);
@Mapping(target = "name", source = "fullName")
void updateMutableTarget(SimpleMutableSource simpleMutableSource, @MappingTarget MutableTarget target);
}
| SimpleBuilderMapper |
java | quarkusio__quarkus | extensions/security-webauthn/runtime/src/main/java/io/quarkus/security/webauthn/WebAuthnController.java | {
"start": 400,
"end": 5287
} | class ____ {
private WebAuthnSecurity security;
public WebAuthnController(WebAuthnSecurity security) {
this.security = security;
}
/**
* Endpoint for getting a list of allowed origins
*
* @param ctx the current request
*/
public void wellKnown(RoutingContext ctx) {
try {
ctx.response()
.putHeader(HttpHeaders.CONTENT_TYPE, "application/json")
.end(new JsonObject()
.put("origins", security.getAllowedOrigins(ctx))
.encode());
} catch (IllegalArgumentException e) {
ctx.fail(400, e);
} catch (RuntimeException e) {
ctx.fail(e);
}
}
/**
* Endpoint for getting a register challenge and options
*
* @param ctx the current request
*/
public void registerOptionsChallenge(RoutingContext ctx) {
try {
String username = ctx.queryParams().get("username");
String displayName = ctx.queryParams().get("displayName");
withContext(() -> security.getRegisterChallenge(username, displayName, ctx))
.map(challenge -> security.toJsonString(challenge))
.subscribe().with(challenge -> ok(ctx, challenge), ctx::fail);
} catch (IllegalArgumentException e) {
ctx.fail(400, e);
} catch (RuntimeException e) {
ctx.fail(e);
}
}
private <T> Uni<T> withContext(Supplier<Uni<T>> uni) {
ManagedContext requestContext = Arc.container().requestContext();
requestContext.activate();
ContextState contextState = requestContext.getState();
return uni.get().eventually(() -> requestContext.destroy(contextState));
}
/**
* Endpoint for getting a login challenge and options
*
* @param ctx the current request
*/
public void loginOptionsChallenge(RoutingContext ctx) {
try {
String username = ctx.queryParams().get("username");
withContext(() -> security.getLoginChallenge(username, ctx))
.map(challenge -> security.toJsonString(challenge))
.subscribe().with(challenge -> ok(ctx, challenge), ctx::fail);
} catch (IllegalArgumentException e) {
ctx.fail(400, e);
} catch (RuntimeException e) {
ctx.fail(e);
}
}
/**
* Endpoint for login. This will call {@link}
*
* @param ctx the current request
*/
public void login(RoutingContext ctx) {
try {
// might throw runtime exception if there's no json or is bad formed
final JsonObject webauthnResp = ctx.getBodyAsJson();
withContext(() -> security.login(webauthnResp, ctx))
.onItem().call(record -> security.storage().update(record.getCredentialID(), record.getCounter()))
.subscribe().with(record -> {
security.rememberUser(record.getUsername(), ctx);
ok(ctx);
}, x -> ctx.fail(400, x));
} catch (IllegalArgumentException e) {
ctx.fail(400, e);
} catch (RuntimeException e) {
ctx.fail(e);
}
}
/**
* Endpoint for registration
*
* @param ctx the current request
*/
public void register(RoutingContext ctx) {
try {
final String username = ctx.queryParams().get("username");
// might throw runtime exception if there's no json or is bad formed
final JsonObject webauthnResp = ctx.getBodyAsJson();
withContext(() -> security.register(username, webauthnResp, ctx))
.onItem().call(record -> security.storage().create(record))
.subscribe().with(record -> {
security.rememberUser(record.getUsername(), ctx);
ok(ctx);
}, x -> ctx.fail(400, x));
} catch (IllegalArgumentException e) {
ctx.fail(400, e);
} catch (RuntimeException e) {
ctx.fail(e);
}
}
/**
* Endpoint for logout, redirects to the root URI
*
* @param ctx the current request
*/
public void logout(RoutingContext ctx) {
security.logout(ctx);
ctx.redirect("/");
}
private static void ok(RoutingContext ctx, String json) {
ctx.response()
.putHeader(HttpHeaders.CONTENT_TYPE, "application/json")
.end(json);
}
private static void ok(RoutingContext ctx) {
ctx.response()
.setStatusCode(204)
.end();
}
public void javascript(RoutingContext ctx) {
ctx.response().sendFile("webauthn.js");
}
}
| WebAuthnController |
java | quarkusio__quarkus | extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/AccessTokenAnnotationTest.java | {
"start": 8585,
"end": 11942
} | class ____ {
@Inject
@RestClient
DefaultClientDefaultExchange defaultClientDefaultExchange;
@Inject
@RestClient
DefaultClientEnabledExchange defaultClientEnabledExchange;
@Inject
@RestClient
NamedClientDefaultExchange namedClientDefaultExchange;
@Inject
@RestClient
DefaultClientDefaultExchange_OnMethod defaultClientDefaultExchange_OnMethod;
@Inject
@RestClient
DefaultClientEnabledExchange_OnMethod defaultClientEnabledExchange_OnMethod;
@Inject
@RestClient
NamedClientDefaultExchange_OnMethod namedClientDefaultExchange_OnMethod;
@Inject
@RestClient
MultipleClientsAndMultipleMethods multipleClientsAndMultipleMethods;
@Inject
JsonWebToken jwt;
@GET
@Path("token-propagation")
@RolesAllowed("admin")
public String userNameTokenPropagation(@QueryParam("client-key") String clientKey) {
return getResponseWithExchangedUsername(clientKey);
}
@GET
@Path("token-propagation-with-augmentor")
@RolesAllowed("tester") // tester role is granted by SecurityIdentityAugmentor
public String userNameTokenPropagationWithSecIdentityAugmentor(@QueryParam("client-key") String clientKey) {
return getResponseWithExchangedUsername(clientKey);
}
private String getResponseWithExchangedUsername(String clientKey) {
if ("alice".equals(jwt.getName())) {
return "original token username: " + jwt.getName() + " new token username: " + getUserName(clientKey);
} else {
throw new RuntimeException();
}
}
private String getUserName(String clientKey) {
return switch (clientKey) {
case "defaultClientDefaultExchange" -> defaultClientDefaultExchange.getUserName();
case "defaultClientEnabledExchange" -> defaultClientEnabledExchange.getUserName();
case "namedClientDefaultExchange" -> namedClientDefaultExchange.getUserName();
case "defaultClientDefaultExchange_OnMethod" -> defaultClientDefaultExchange_OnMethod.getUserName();
case "defaultClientEnabledExchange_OnMethod" -> defaultClientEnabledExchange_OnMethod.getUserName();
case "namedClientDefaultExchange_OnMethod" -> namedClientDefaultExchange_OnMethod.getUserName();
case "multipleClientsAndMultipleMethods_DefaultClientDefaultExchange" ->
multipleClientsAndMultipleMethods.getUserName_DefaultClientDefaultExchange();
case "multipleClientsAndMultipleMethods_DefaultClientEnabledExchange" ->
multipleClientsAndMultipleMethods.getUserName_DefaultClientEnabledExchange();
case "multipleClientsAndMultipleMethods_NamedClientDefaultExchange" ->
multipleClientsAndMultipleMethods.getUserName_NamedClientDefaultExchange();
case "multipleClientsAndMultipleMethods_NoAccessToken" ->
multipleClientsAndMultipleMethods.getUserName_NoAccessToken();
default -> throw new IllegalArgumentException("Unknown client key");
};
}
}
}
| MultiProviderFrontendResource |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PreferredInterfaceTypeTest.java | {
"start": 8177,
"end": 8632
} | class ____ {
List<String> foo() {
return ImmutableList.of();
}
}
""")
.doTest();
}
@Test
public void providesAnnotatedMethod_doesNotSuggestFix() {
testHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.collect.ImmutableList;
import dagger.Provides;
import java.util.List;
| Test |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/tools/Generate.java | {
"start": 10240,
"end": 10747
} | class ____ be used as%n"
+ " * the logger name.%n"
+ " * @return The custom Logger.%n"
+ " */%n"
+ " public static CLASSNAME create(final Object value) {%n"
+ " final Logger wrapped = LogManager.getLogger(value);%n"
+ " return new CLASSNAME(wrapped);%n"
+ " }%n"
+ "%n"
+ " /**%n"
+ " * Returns a custom Logger using the fully qualified | will |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/columnar/vector/heap/HeapBytesVector.java | {
"start": 1884,
"end": 4486
} | class ____ extends AbstractHeapVector implements WritableBytesVector {
private static final long serialVersionUID = -8529155738773478597L;
/** start offset of each field. */
public int[] start;
/** The length of each field. */
public int[] length;
/** buffer to use when actually copying in data. */
public byte[] buffer;
/** Hang onto a byte array for holding smaller byte values. */
private int elementsAppended = 0;
private int capacity;
/**
* Don't call this constructor except for testing purposes.
*
* @param size number of elements in the column vector
*/
public HeapBytesVector(int size) {
super(size);
capacity = size;
buffer = new byte[capacity];
start = new int[size];
length = new int[size];
}
@Override
public void reset() {
super.reset();
elementsAppended = 0;
}
@Override
public void appendBytes(int elementNum, byte[] sourceBuf, int start, int length) {
reserve(elementsAppended + length);
System.arraycopy(sourceBuf, start, buffer, elementsAppended, length);
this.start[elementNum] = elementsAppended;
this.length[elementNum] = length;
elementsAppended += length;
}
@Override
public void fill(byte[] value) {
reserve(start.length * value.length);
for (int i = 0; i < start.length; i++) {
System.arraycopy(value, 0, buffer, i * value.length, value.length);
}
for (int i = 0; i < start.length; i++) {
this.start[i] = i * value.length;
}
Arrays.fill(this.length, value.length);
}
private void reserve(int requiredCapacity) {
if (requiredCapacity > capacity) {
int newCapacity = requiredCapacity * 2;
try {
byte[] newData = new byte[newCapacity];
System.arraycopy(buffer, 0, newData, 0, elementsAppended);
buffer = newData;
capacity = newCapacity;
} catch (OutOfMemoryError outOfMemoryError) {
throw new UnsupportedOperationException(
requiredCapacity + " cannot be satisfied.", outOfMemoryError);
}
}
}
@Override
public Bytes getBytes(int i) {
if (dictionary == null) {
return new Bytes(buffer, start[i], length[i]);
} else {
byte[] bytes = dictionary.decodeToBinary(dictionaryIds.vector[i]);
return new Bytes(bytes, 0, bytes.length);
}
}
}
| HeapBytesVector |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/path/JSONPath_list_field.java | {
"start": 194,
"end": 1197
} | class ____ extends TestCase {
public void test_list_field() throws Exception {
JSONPath path = new JSONPath("$.name");
List<Entity> entities = new ArrayList<Entity>();
entities.add(new Entity("wenshao"));
entities.add(new Entity("ljw2083"));
List<String> names = (List<String>)path.eval(entities);
Assert.assertSame(entities.get(0).getName(), names.get(0));
Assert.assertSame(entities.get(1).getName(), names.get(1));
}
public void test_list_field_simple() throws Exception {
JSONPath path = new JSONPath("name");
List<Entity> entities = new ArrayList<Entity>();
entities.add(new Entity("wenshao"));
entities.add(new Entity("ljw2083"));
List<String> names = (List<String>) path.eval(entities);
Assert.assertSame(entities.get(0).getName(), names.get(0));
Assert.assertSame(entities.get(1).getName(), names.get(1));
}
public static | JSONPath_list_field |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/stream/StreamExecGlobalGroupAggregate.java | {
"start": 4257,
"end": 16016
} | class ____ extends StreamExecAggregateBase {
private static final Logger LOG = LoggerFactory.getLogger(StreamExecGlobalGroupAggregate.class);
public static final String GLOBAL_GROUP_AGGREGATE_TRANSFORMATION = "global-group-aggregate";
public static final String FIELD_NAME_LOCAL_AGG_INPUT_ROW_TYPE = "localAggInputRowType";
public static final String FIELD_NAME_INDEX_OF_COUNT_STAR = "indexOfCountStar";
public static final String STATE_NAME = "globalGroupAggregateState";
@JsonProperty(FIELD_NAME_GROUPING)
private final int[] grouping;
@JsonProperty(FIELD_NAME_AGG_CALLS)
private final AggregateCall[] aggCalls;
/** Each element indicates whether the corresponding agg call needs `retract` method. */
@JsonProperty(FIELD_NAME_AGG_CALL_NEED_RETRACTIONS)
private final boolean[] aggCallNeedRetractions;
/** The input row type of this node's local agg. */
@JsonProperty(FIELD_NAME_LOCAL_AGG_INPUT_ROW_TYPE)
private final RowType localAggInputRowType;
/** Whether this node will generate UPDATE_BEFORE messages. */
@JsonProperty(FIELD_NAME_GENERATE_UPDATE_BEFORE)
private final boolean generateUpdateBefore;
/** Whether this node consumes retraction messages. */
@JsonProperty(FIELD_NAME_NEED_RETRACTION)
private final boolean needRetraction;
/** The position for the existing count star. */
@JsonProperty(FIELD_NAME_INDEX_OF_COUNT_STAR)
@JsonInclude(JsonInclude.Include.NON_NULL)
protected final Integer indexOfCountStar;
@Nullable
@JsonProperty(FIELD_NAME_STATE)
@JsonInclude(JsonInclude.Include.NON_NULL)
private final List<StateMetadata> stateMetadataList;
public StreamExecGlobalGroupAggregate(
ReadableConfig tableConfig,
int[] grouping,
AggregateCall[] aggCalls,
boolean[] aggCallNeedRetractions,
RowType localAggInputRowType,
boolean generateUpdateBefore,
boolean needRetraction,
@Nullable Integer indexOfCountStar,
@Nullable Long stateTtlFromHint,
InputProperty inputProperty,
RowType outputType,
String description) {
this(
ExecNodeContext.newNodeId(),
ExecNodeContext.newContext(StreamExecGlobalGroupAggregate.class),
ExecNodeContext.newPersistedConfig(
StreamExecGlobalGroupAggregate.class, tableConfig),
grouping,
aggCalls,
aggCallNeedRetractions,
localAggInputRowType,
generateUpdateBefore,
needRetraction,
indexOfCountStar,
StateMetadata.getOneInputOperatorDefaultMeta(
stateTtlFromHint, tableConfig, STATE_NAME),
Collections.singletonList(inputProperty),
outputType,
description);
}
@JsonCreator
public StreamExecGlobalGroupAggregate(
@JsonProperty(FIELD_NAME_ID) int id,
@JsonProperty(FIELD_NAME_TYPE) ExecNodeContext context,
@JsonProperty(FIELD_NAME_CONFIGURATION) ReadableConfig persistedConfig,
@JsonProperty(FIELD_NAME_GROUPING) int[] grouping,
@JsonProperty(FIELD_NAME_AGG_CALLS) AggregateCall[] aggCalls,
@JsonProperty(FIELD_NAME_AGG_CALL_NEED_RETRACTIONS) boolean[] aggCallNeedRetractions,
@JsonProperty(FIELD_NAME_LOCAL_AGG_INPUT_ROW_TYPE) RowType localAggInputRowType,
@JsonProperty(FIELD_NAME_GENERATE_UPDATE_BEFORE) boolean generateUpdateBefore,
@JsonProperty(FIELD_NAME_NEED_RETRACTION) boolean needRetraction,
@JsonProperty(FIELD_NAME_INDEX_OF_COUNT_STAR) @Nullable Integer indexOfCountStar,
@Nullable @JsonProperty(FIELD_NAME_STATE) List<StateMetadata> stateMetadataList,
@JsonProperty(FIELD_NAME_INPUT_PROPERTIES) List<InputProperty> inputProperties,
@JsonProperty(FIELD_NAME_OUTPUT_TYPE) RowType outputType,
@JsonProperty(FIELD_NAME_DESCRIPTION) String description) {
super(id, context, persistedConfig, inputProperties, outputType, description);
this.grouping = checkNotNull(grouping);
this.aggCalls = checkNotNull(aggCalls);
this.aggCallNeedRetractions = checkNotNull(aggCallNeedRetractions);
checkArgument(aggCalls.length == aggCallNeedRetractions.length);
this.localAggInputRowType = checkNotNull(localAggInputRowType);
this.generateUpdateBefore = generateUpdateBefore;
this.needRetraction = needRetraction;
checkArgument(indexOfCountStar == null || indexOfCountStar >= 0 && needRetraction);
this.indexOfCountStar = indexOfCountStar;
this.stateMetadataList = stateMetadataList;
}
@SuppressWarnings("unchecked")
@Override
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
long stateRetentionTime =
StateMetadata.getStateTtlForOneInputOperator(config, stateMetadataList);
if (grouping.length > 0 && stateRetentionTime < 0) {
LOG.warn(
"No state retention interval configured for a query which accumulates state. "
+ "Please provide a query configuration with valid retention interval to prevent excessive "
+ "state size. You may specify a retention time of 0 to not clean up the state.");
}
final ExecEdge inputEdge = getInputEdges().get(0);
final Transformation<RowData> inputTransform =
(Transformation<RowData>) inputEdge.translateToPlan(planner);
final RowType inputRowType = (RowType) inputEdge.getOutputType();
final AggregateInfoList localAggInfoList =
AggregateUtil.transformToStreamAggregateInfoList(
planner.getTypeFactory(),
localAggInputRowType,
JavaScalaConversionUtil.toScala(Arrays.asList(aggCalls)),
aggCallNeedRetractions,
needRetraction,
JavaScalaConversionUtil.toScala(Optional.ofNullable(indexOfCountStar)),
false, // isStateBackendDataViews
true); // needDistinctInfo
final AggregateInfoList globalAggInfoList =
AggregateUtil.transformToStreamAggregateInfoList(
planner.getTypeFactory(),
localAggInputRowType,
JavaScalaConversionUtil.toScala(Arrays.asList(aggCalls)),
aggCallNeedRetractions,
needRetraction,
JavaScalaConversionUtil.toScala(Optional.ofNullable(indexOfCountStar)),
true, // isStateBackendDataViews
true); // needDistinctInfo
final GeneratedAggsHandleFunction localAggsHandler =
generateAggsHandler(
"LocalGroupAggsHandler",
localAggInfoList,
grouping.length,
localAggInfoList.getAccTypes(),
config,
planner.getFlinkContext().getClassLoader(),
planner.createRelBuilder());
final GeneratedAggsHandleFunction globalAggsHandler =
generateAggsHandler(
"GlobalGroupAggsHandler",
globalAggInfoList,
0, // mergedAccOffset
localAggInfoList.getAccTypes(),
config,
planner.getFlinkContext().getClassLoader(),
planner.createRelBuilder());
final int indexOfCountStar = globalAggInfoList.getIndexOfCountStar();
final LogicalType[] globalAccTypes =
Arrays.stream(globalAggInfoList.getAccTypes())
.map(LogicalTypeDataTypeConverter::fromDataTypeToLogicalType)
.toArray(LogicalType[]::new);
final LogicalType[] globalAggValueTypes =
Arrays.stream(globalAggInfoList.getActualValueTypes())
.map(LogicalTypeDataTypeConverter::fromDataTypeToLogicalType)
.toArray(LogicalType[]::new);
final GeneratedRecordEqualiser recordEqualiser =
new EqualiserCodeGenerator(
globalAggValueTypes, planner.getFlinkContext().getClassLoader())
.generateRecordEqualiser("GroupAggValueEqualiser");
final OneInputStreamOperator<RowData, RowData> operator;
final boolean isMiniBatchEnabled = MinibatchUtil.isMiniBatchEnabled(config);
if (isMiniBatchEnabled) {
MiniBatchGlobalGroupAggFunction aggFunction =
new MiniBatchGlobalGroupAggFunction(
localAggsHandler,
globalAggsHandler,
recordEqualiser,
globalAccTypes,
indexOfCountStar,
generateUpdateBefore,
stateRetentionTime);
operator =
new KeyedMapBundleOperator<>(
aggFunction, MinibatchUtil.createMiniBatchTrigger(config));
} else {
throw new TableException("Local-Global optimization is only worked in miniBatch mode");
}
// partitioned aggregation
final OneInputTransformation<RowData, RowData> transform =
ExecNodeUtil.createOneInputTransformation(
inputTransform,
createTransformationMeta(GLOBAL_GROUP_AGGREGATE_TRANSFORMATION, config),
operator,
InternalTypeInfo.of(getOutputType()),
inputTransform.getParallelism(),
false);
// set KeyType and Selector for state
final RowDataKeySelector selector =
KeySelectorUtil.getRowDataSelector(
planner.getFlinkContext().getClassLoader(),
grouping,
InternalTypeInfo.of(inputRowType));
transform.setStateKeySelector(selector);
transform.setStateKeyType(selector.getProducedType());
return transform;
}
private GeneratedAggsHandleFunction generateAggsHandler(
String name,
AggregateInfoList aggInfoList,
int mergedAccOffset,
DataType[] mergedAccExternalTypes,
ExecNodeConfig config,
ClassLoader classLoader,
RelBuilder relBuilder) {
// For local aggregate, the result will be buffered, so copyInputField is true.
// For global aggregate, result will be put into state, then not need copy
// but this global aggregate result will be put into a buffered map first,
// then multi-put to state, so copyInputField is true.
AggsHandlerCodeGenerator generator =
new AggsHandlerCodeGenerator(
new CodeGeneratorContext(config, classLoader),
relBuilder,
JavaScalaConversionUtil.toScala(localAggInputRowType.getChildren()),
true);
return generator
.needMerge(mergedAccOffset, true, mergedAccExternalTypes)
.generateAggsHandler(name, aggInfoList);
}
}
| StreamExecGlobalGroupAggregate |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/annotation/AutowiredAnnotationBeanPostProcessorTests.java | {
"start": 153120,
"end": 153190
} | class ____ extends NestedTestBean {
}
public | FixedOrder2NestedTestBean |
java | netty__netty | codec-dns/src/main/java/io/netty/handler/codec/dns/DefaultDnsRawRecord.java | {
"start": 878,
"end": 1524
} | class ____ extends AbstractDnsRecord implements DnsRawRecord {
private final ByteBuf content;
/**
* Creates a new {@link #CLASS_IN IN-class} record.
*
* @param name the domain name
* @param type the type of the record
* @param timeToLive the TTL value of the record
*/
public DefaultDnsRawRecord(String name, DnsRecordType type, long timeToLive, ByteBuf content) {
this(name, type, DnsRecord.CLASS_IN, timeToLive, content);
}
/**
* Creates a new record.
*
* @param name the domain name
* @param type the type of the record
* @param dnsClass the | DefaultDnsRawRecord |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/type/AbstractAnnotationMetadataTests.java | {
"start": 13616,
"end": 13718
} | class ____ {
@DirectAnnotation1
public void test() {
}
}
public static | WithAnnotatedMethod |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cfg/PropertyAccessTypeDetectionTest.java | {
"start": 812,
"end": 1823
} | class ____ {
public static final String intValue = "intValue";
private Long id;
private Integer _intValue;
public Long getId() { return id; }
public void setId(Long id) { this.id = id; }
public Integer getIntValue() { return _intValue; }
public void setIntValue(Integer intValue) { this._intValue = intValue; }
}
@Test
public void testPropertyAccessIgnoresStaticFields(SessionFactoryScope factoryScope) {
// verify that the entity persister is configured with property intValue as an Integer rather than
// using the static field reference and determining the type to be String.
final EntityPersister entityDescriptor = factoryScope
.getSessionFactory()
.getMappingMetamodel()
.getEntityDescriptor( FooEntity.class );
final AttributeMapping attributeMapping = entityDescriptor.findAttributeMapping( "intValue" );
assertThat( attributeMapping ).isNotNull();
assertThat( attributeMapping.getJavaType().getJavaTypeClass() ).isAssignableFrom( Integer.class );
}
}
| FooEntity |
java | alibaba__nacos | console/src/main/java/com/alibaba/nacos/console/handler/impl/remote/ai/McpRemoteHandler.java | {
"start": 1954,
"end": 4924
} | class ____ implements McpHandler {
private final NacosMaintainerClientHolder clientHolder;
public McpRemoteHandler(NacosMaintainerClientHolder clientHolder) {
this.clientHolder = clientHolder;
}
@Override
public Page<McpServerBasicInfo> listMcpServers(String namespaceId, String mcpName, String search, int pageNo,
int pageSize) throws NacosException {
if (Constants.MCP_LIST_SEARCH_ACCURATE.equalsIgnoreCase(search)) {
return clientHolder.getAiMaintainerService().listMcpServer(namespaceId, mcpName, pageNo, pageSize);
} else {
return clientHolder.getAiMaintainerService().searchMcpServer(namespaceId, mcpName, pageNo, pageSize);
}
}
@Override
public McpServerDetailInfo getMcpServer(String namespaceId, String mcpName, String mcpId, String version)
throws NacosException {
return clientHolder.getAiMaintainerService().getMcpServerDetail(namespaceId, mcpName, mcpId, version);
}
@Override
public String createMcpServer(String namespaceId, McpServerBasicInfo serverSpecification,
McpToolSpecification toolSpecification, McpEndpointSpec endpointSpecification) throws NacosException {
return clientHolder.getAiMaintainerService()
.createMcpServer(namespaceId, serverSpecification.getName(), serverSpecification, toolSpecification,
endpointSpecification);
}
@Override
public void updateMcpServer(String namespaceId, boolean isPublish, McpServerBasicInfo serverSpecification,
McpToolSpecification toolSpecification, McpEndpointSpec endpointSpecification, boolean overrideExisting) throws NacosException {
clientHolder.getAiMaintainerService()
.updateMcpServer(namespaceId, serverSpecification.getName(), isPublish, serverSpecification,
toolSpecification, endpointSpecification, overrideExisting);
}
@Override
public void deleteMcpServer(String namespaceId, String mcpName, String mcpId, String version)
throws NacosException {
clientHolder.getAiMaintainerService().deleteMcpServer(namespaceId, mcpName, mcpId, version);
}
@Override
public McpServerImportValidationResult validateImport(String namespaceId, McpServerImportRequest request)
throws NacosException {
throw new NacosApiException(NacosException.SERVER_NOT_IMPLEMENTED, ErrorCode.API_FUNCTION_DISABLED,
"MCP import functionality is not supported in remote mode");
}
@Override
public McpServerImportResponse executeImport(String namespaceId, McpServerImportRequest request)
throws NacosException {
throw new NacosApiException(NacosException.SERVER_NOT_IMPLEMENTED, ErrorCode.API_FUNCTION_DISABLED,
"MCP import functionality is not supported in remote mode");
}
}
| McpRemoteHandler |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/MonoRunnableTest.java | {
"start": 1302,
"end": 4851
} | class ____ {
@Test
public void nullValue() {
assertThatExceptionOfType(NullPointerException.class).isThrownBy(() -> {
new MonoRunnable<>(null);
});
}
@Test
public void normal() {
AssertSubscriber<Void> ts = AssertSubscriber.create();
Mono.<Void>fromRunnable(() -> {
})
.subscribe(ts);
ts.assertNoValues()
.assertComplete()
.assertNoError();
}
@Test
public void normalBackpressured() {
AssertSubscriber<Void> ts = AssertSubscriber.create(0);
Mono.<Void>fromRunnable(() -> {
})
.hide()
.subscribe(ts);
ts.assertNoValues()
.assertComplete()
.assertNoError();
}
@Test
public void asyncRunnable() {
AtomicReference<Thread> t = new AtomicReference<>();
StepVerifier.create(Mono.fromRunnable(() -> t.set(Thread.currentThread()))
.subscribeOn(Schedulers.single()))
.verifyComplete();
assertThat(t).isNotNull();
assertThat(t).isNotEqualTo(Thread.currentThread());
}
@Test
public void asyncRunnableBackpressured() {
AtomicReference<Thread> t = new AtomicReference<>();
StepVerifier.create(Mono.fromRunnable(() -> t.set(Thread.currentThread()))
.subscribeOn(Schedulers.single()), 0)
.verifyComplete();
assertThat(t).isNotNull();
assertThat(t).isNotEqualTo(Thread.currentThread());
}
@Test
public void runnableThrows() {
AssertSubscriber<Object> ts = AssertSubscriber.create();
Mono.fromRunnable(() -> {
throw new RuntimeException("forced failure");
})
.subscribe(ts);
ts.assertNoValues()
.assertNotComplete()
.assertError(RuntimeException.class)
.assertErrorMessage("forced failure");
}
@Test
public void nonFused() {
AssertSubscriber<Void> ts = AssertSubscriber.create();
Mono.<Void>fromRunnable(() -> {
})
.subscribe(ts);
ts.assertNonFuseableSource()
.assertNoValues();
}
@Test
public void test() {
int c[] = { 0 };
Flux.range(1, 1000)
.flatMap(v -> Mono.fromRunnable(() -> { c[0]++; }))
.ignoreElements()
.block();
assertThat(c[0]).isEqualTo(1000);
}
//see https://github.com/reactor/reactor-core/issues/1503
@Test
public void runnableCancelledBeforeRun() {
AtomicBoolean actual = new AtomicBoolean(true);
Mono<?> mono = Mono.fromRunnable(() -> actual.set(false))
.doOnSubscribe(Subscription::cancel);
StepVerifier.create(mono)
.expectSubscription()
.expectNoEvent(Duration.ofSeconds(1))
.thenCancel()
.verify();
assertThat(actual).as("cancelled before run").isTrue();
}
//see https://github.com/reactor/reactor-core/issues/1503
//see https://github.com/reactor/reactor-core/issues/1504
@Test
public void runnableSubscribeToCompleteMeasurement() {
AtomicLong subscribeTs = new AtomicLong();
Mono<Object> mono = Mono.fromRunnable(() -> {
try {
Thread.sleep(500);
}
catch (InterruptedException e) {
e.printStackTrace();
}
})
.doOnSubscribe(sub -> subscribeTs.set(-1 * System.nanoTime()))
.doFinally(fin -> subscribeTs.addAndGet(System.nanoTime()));
StepVerifier.create(mono)
.verifyComplete();
assertThat(TimeUnit.NANOSECONDS.toMillis(subscribeTs.get())).isCloseTo(500L, Offset.offset(50L));
}
@Test
public void scanOperator(){
MonoRunnable<String> test = new MonoRunnable<>(() -> {});
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
}
| MonoRunnableTest |
java | dropwizard__dropwizard | dropwizard-jersey/src/test/java/io/dropwizard/jersey/errors/DefaultJacksonMessageBodyProvider.java | {
"start": 193,
"end": 369
} | class ____ extends JacksonMessageBodyProvider {
public DefaultJacksonMessageBodyProvider() {
super(Jackson.newObjectMapper());
}
}
| DefaultJacksonMessageBodyProvider |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java | {
"start": 15738,
"end": 15811
} | class ____<T extends StringProducer> extends InterBar<T> {
}
public | SubBar |
java | micronaut-projects__micronaut-core | http-server-netty/src/main/java/io/micronaut/http/server/netty/binders/NettyServerRequestBinderRegistry.java | {
"start": 1796,
"end": 4822
} | class ____ implements RequestBinderRegistry {
private final DefaultRequestBinderRegistry internalRequestBinderRegistry;
public NettyServerRequestBinderRegistry(ConversionService conversionService,
List<RequestArgumentBinder> binders,
BeanProvider<NettyHttpServerConfiguration> httpServerConfiguration,
@Named(TaskExecutors.BLOCKING)
BeanProvider<ExecutorService> executorService,
MessageBodyHandlerRegistry bodyHandlerRegistry) {
NettyBodyAnnotationBinder<Object> nettyBodyAnnotationBinder = new NettyBodyAnnotationBinder<>(conversionService, httpServerConfiguration.get(), bodyHandlerRegistry);
internalRequestBinderRegistry = new DefaultRequestBinderRegistry(conversionService, binders, nettyBodyAnnotationBinder);
internalRequestBinderRegistry.addArgumentBinder(new NettyCompletableFutureBodyBinder(
nettyBodyAnnotationBinder));
internalRequestBinderRegistry.addArgumentBinder(new NettyPublisherBodyBinder(
nettyBodyAnnotationBinder));
internalRequestBinderRegistry.addArgumentBinder(new MultipartBodyArgumentBinder(
httpServerConfiguration
));
internalRequestBinderRegistry.addArgumentBinder(new NettyInputStreamBodyBinder());
NettyStreamingFileUpload.Factory fileUploadFactory = new NettyStreamingFileUpload.Factory(
httpServerConfiguration.get().getMultipart(),
executorService.get()
);
internalRequestBinderRegistry.addArgumentBinder(new NettyStreamingFileUploadBinder(fileUploadFactory));
NettyCompletedFileUploadBinder completedFileUploadBinder = new NettyCompletedFileUploadBinder(conversionService);
internalRequestBinderRegistry.addArgumentBinder(completedFileUploadBinder);
NettyPublisherPartUploadBinder publisherPartUploadBinder = new NettyPublisherPartUploadBinder(conversionService, fileUploadFactory);
internalRequestBinderRegistry.addArgumentBinder(publisherPartUploadBinder);
NettyPartUploadAnnotationBinder<Object> partUploadAnnotationBinder = new NettyPartUploadAnnotationBinder<>(
conversionService,
completedFileUploadBinder,
publisherPartUploadBinder
);
internalRequestBinderRegistry.addArgumentBinder(partUploadAnnotationBinder);
internalRequestBinderRegistry.addUnmatchedRequestArgumentBinder(partUploadAnnotationBinder);
}
@Override
public <T> void addArgumentBinder(ArgumentBinder<T, HttpRequest<?>> binder) {
internalRequestBinderRegistry.addArgumentBinder(binder);
}
@Override
public <T> Optional<ArgumentBinder<T, HttpRequest<?>>> findArgumentBinder(Argument<T> argument) {
return internalRequestBinderRegistry.findArgumentBinder(argument);
}
}
| NettyServerRequestBinderRegistry |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/shareddata/AsyncMap.java | {
"start": 970,
"end": 1088
} | interface ____ handle {@link io.vertx.core.shareddata.ClusterSerializable} implementing objects.
*/
@VertxGen
public | must |
java | dropwizard__dropwizard | dropwizard-core/src/main/java/io/dropwizard/core/Application.java | {
"start": 424,
"end": 774
} | class ____ Dropwizard applications.
*
* Because the default constructor will be inherited by all
* subclasses, {BootstrapLogging.bootstrap()} will always be
* invoked. The log level used during the bootstrap process can be
* configured by {Application} subclasses by overriding
* {#bootstrapLogLevel}.
*
* @param <T> the type of configuration | for |
java | apache__maven | impl/maven-impl/src/test/java/org/apache/maven/impl/DefaultPathMatcherFactoryTest.java | {
"start": 1589,
"end": 10722
} | class ____ {
private final PathMatcherFactory factory = new DefaultPathMatcherFactory();
@Test
public void testCreatePathMatcherWithNullBaseDirectory() {
assertThrows(NullPointerException.class, () -> {
factory.createPathMatcher(null, List.of("**/*.java"), List.of("**/target/**"), false);
});
}
@Test
public void testCreatePathMatcherBasic(@TempDir Path tempDir) throws IOException {
// Create test files
Path srcDir = Files.createDirectories(tempDir.resolve("src/main/java"));
Path testDir = Files.createDirectories(tempDir.resolve("src/test/java"));
Path targetDir = Files.createDirectories(tempDir.resolve("target"));
Files.createFile(srcDir.resolve("Main.java"));
Files.createFile(testDir.resolve("Test.java"));
Files.createFile(targetDir.resolve("compiled.class"));
Files.createFile(tempDir.resolve("README.txt"));
PathMatcher matcher = factory.createPathMatcher(tempDir, List.of("**/*.java"), List.of("**/target/**"), false);
assertNotNull(matcher);
assertTrue(matcher.matches(srcDir.resolve("Main.java")));
assertTrue(matcher.matches(testDir.resolve("Test.java")));
assertFalse(matcher.matches(targetDir.resolve("compiled.class")));
assertFalse(matcher.matches(tempDir.resolve("README.txt")));
}
@Test
public void testCreatePathMatcherWithDefaultExcludes(@TempDir Path tempDir) throws IOException {
// Create test files including SCM files
Path srcDir = Files.createDirectories(tempDir.resolve("src"));
Path gitDir = Files.createDirectories(tempDir.resolve(".git"));
Files.createFile(srcDir.resolve("Main.java"));
Files.createFile(gitDir.resolve("config"));
Files.createFile(tempDir.resolve(".gitignore"));
PathMatcher matcher = factory.createPathMatcher(tempDir, List.of("**/*"), null, true); // Use default excludes
assertNotNull(matcher);
assertTrue(matcher.matches(srcDir.resolve("Main.java")));
assertFalse(matcher.matches(gitDir.resolve("config")));
assertFalse(matcher.matches(tempDir.resolve(".gitignore")));
}
@Test
public void testCreateIncludeOnlyMatcher(@TempDir Path tempDir) throws IOException {
Files.createFile(tempDir.resolve("Main.java"));
Files.createFile(tempDir.resolve("README.txt"));
PathMatcher matcher = factory.createIncludeOnlyMatcher(tempDir, List.of("**/*.java"));
assertNotNull(matcher);
assertTrue(matcher.matches(tempDir.resolve("Main.java")));
assertFalse(matcher.matches(tempDir.resolve("README.txt")));
}
@Test
public void testCreateExcludeOnlyMatcher(@TempDir Path tempDir) throws IOException {
// Create a simple file structure for testing
Files.createFile(tempDir.resolve("included.txt"));
Files.createFile(tempDir.resolve("excluded.txt"));
// Test that the method exists and returns a non-null matcher
PathMatcher matcher = factory.createExcludeOnlyMatcher(tempDir, List.of("excluded.txt"), false);
assertNotNull(matcher);
// Test that files not matching exclude patterns are included
assertTrue(matcher.matches(tempDir.resolve("included.txt")));
// Note: Due to a known issue in PathSelector (fixed in PR #10909),
// exclude-only patterns don't work correctly in the current codebase.
// This test verifies the API exists and basic functionality works.
// Full exclude-only functionality will work once PR #10909 is merged.
}
@Test
public void testCreatePathMatcherDefaultMethod(@TempDir Path tempDir) throws IOException {
Files.createFile(tempDir.resolve("Main.java"));
Files.createFile(tempDir.resolve("Test.java"));
// Test the default method without useDefaultExcludes parameter
PathMatcher matcher = factory.createPathMatcher(tempDir, List.of("**/*.java"), List.of("**/Test.java"));
assertNotNull(matcher);
assertTrue(matcher.matches(tempDir.resolve("Main.java")));
assertFalse(matcher.matches(tempDir.resolve("Test.java")));
}
@Test
public void testIncludesAll(@TempDir Path tempDir) {
PathMatcher matcher = factory.createPathMatcher(tempDir, null, null, false);
// Because no pattern has been specified, simplify to includes all.
// IT must be the same instance, by method contract.
assertSame(factory.includesAll(), matcher);
}
/**
* Test that verifies the factory creates matchers that work correctly with file trees,
* similar to the existing PathSelectorTest.
*/
@Test
public void testFactoryWithFileTree(@TempDir Path directory) throws IOException {
Path foo = Files.createDirectory(directory.resolve("foo"));
Path bar = Files.createDirectory(foo.resolve("bar"));
Path baz = Files.createDirectory(directory.resolve("baz"));
Files.createFile(directory.resolve("root.txt"));
Files.createFile(bar.resolve("leaf.txt"));
Files.createFile(baz.resolve("excluded.txt"));
PathMatcher matcher = factory.createPathMatcher(directory, List.of("**/*.txt"), List.of("baz/**"), false);
Set<Path> filtered =
new HashSet<>(Files.walk(directory).filter(matcher::matches).toList());
String[] expected = {"root.txt", "foo/bar/leaf.txt"};
assertEquals(expected.length, filtered.size());
for (String path : expected) {
assertTrue(filtered.contains(directory.resolve(path)), "Expected path not found: " + path);
}
}
@Test
public void testNullParameterThrowsNPE(@TempDir Path tempDir) {
// Test that null baseDirectory throws NullPointerException
assertThrows(
NullPointerException.class,
() -> factory.createPathMatcher(null, List.of("*.txt"), List.of("*.tmp"), false));
assertThrows(
NullPointerException.class, () -> factory.createPathMatcher(null, List.of("*.txt"), List.of("*.tmp")));
assertThrows(NullPointerException.class, () -> factory.createExcludeOnlyMatcher(null, List.of("*.tmp"), false));
assertThrows(NullPointerException.class, () -> factory.createIncludeOnlyMatcher(null, List.of("*.txt")));
// Test that PathSelector constructor also throws NPE for null directory
assertThrows(
NullPointerException.class, () -> PathSelector.of(null, List.of("*.txt"), List.of("*.tmp"), false));
// Test that deriveDirectoryMatcher throws NPE for null fileMatcher
assertThrows(NullPointerException.class, () -> factory.deriveDirectoryMatcher(null));
}
@Test
public void testDeriveDirectoryMatcher(@TempDir Path tempDir) throws IOException {
// Create directory structure
Path subDir = Files.createDirectory(tempDir.resolve("subdir"));
Path excludedDir = Files.createDirectory(tempDir.resolve("excluded"));
// Test basic functionality - method exists and returns non-null matcher
PathMatcher anyMatcher = factory.createPathMatcher(tempDir, List.of("**/*.txt"), null, false);
PathMatcher dirMatcher = factory.deriveDirectoryMatcher(anyMatcher);
assertNotNull(dirMatcher);
// Basic functionality test - should return a working matcher
assertTrue(dirMatcher.matches(subDir));
assertTrue(dirMatcher.matches(excludedDir));
// Test with matcher that has no directory filtering (null includes/excludes)
PathMatcher allMatcher = factory.createPathMatcher(tempDir, null, null, false);
PathMatcher dirMatcher2 = factory.deriveDirectoryMatcher(allMatcher);
assertNotNull(dirMatcher2);
// Should include all directories when no filtering is possible
assertTrue(dirMatcher2.matches(subDir));
assertTrue(dirMatcher2.matches(excludedDir));
// Test with non-PathSelector matcher (should return INCLUDES_ALL)
PathMatcher customMatcher = path -> true;
PathMatcher dirMatcher3 = factory.deriveDirectoryMatcher(customMatcher);
assertNotNull(dirMatcher3);
// Should include all directories for unknown matcher types
assertTrue(dirMatcher3.matches(subDir));
assertTrue(dirMatcher3.matches(excludedDir));
// Test that the method correctly identifies PathSelector instances
// and calls the appropriate methods (canFilterDirectories, couldHoldSelected)
PathMatcher pathSelectorMatcher = factory.createPathMatcher(tempDir, List.of("*.txt"), List.of("*.tmp"), false);
PathMatcher dirMatcher4 = factory.deriveDirectoryMatcher(pathSelectorMatcher);
assertNotNull(dirMatcher4);
// The exact behavior depends on PathSelector implementation
// We just verify the method works and returns a valid matcher
assertTrue(dirMatcher4.matches(subDir)
|| !dirMatcher4.matches(subDir)); // Always true, just testing it doesn't throw
}
}
| DefaultPathMatcherFactoryTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/HelloClient2.java | {
"start": 344,
"end": 704
} | interface ____ {
@POST
@Consumes(MediaType.TEXT_PLAIN)
@Path("/")
String echo(String name);
@POST
@Consumes(MediaType.TEXT_PLAIN)
@Path("/")
String echoIgnoreParams(String name, @NotBody String ignored, @NotBody String ignored2);
@GET
String bug18977();
@GET
@Path("delay")
Uni<String> delay();
}
| HelloClient2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.