language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
quarkusio__quarkus
|
test-framework/junit5/src/main/java/io/quarkus/test/junit/internal/TestInfoImpl.java
|
{
"start": 164,
"end": 991
}
|
class ____ implements TestInfo {
private final String displayName;
private final Set<String> tags;
private final Optional<Class<?>> testClass;
private final Optional<Method> testMethod;
TestInfoImpl(String displayName, Set<String> tags, Optional<Class<?>> testClass, Optional<Method> testMethod) {
this.displayName = displayName;
this.tags = tags;
this.testClass = testClass;
this.testMethod = testMethod;
}
@Override
public String getDisplayName() {
return displayName;
}
@Override
public Set<String> getTags() {
return tags;
}
@Override
public Optional<Class<?>> getTestClass() {
return testClass;
}
@Override
public Optional<Method> getTestMethod() {
return testMethod;
}
}
|
TestInfoImpl
|
java
|
quarkusio__quarkus
|
extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheProcessor.java
|
{
"start": 3940,
"end": 7486
}
|
class ____ {
private static final Logger LOGGER = Logger.getLogger(CacheProcessor.class);
@BuildStep
FeatureBuildItem feature() {
return new FeatureBuildItem(Feature.CACHE);
}
@BuildStep
AutoInjectAnnotationBuildItem autoInjectCacheName() {
return new AutoInjectAnnotationBuildItem(CACHE_NAME);
}
@BuildStep
AnnotationsTransformerBuildItem annotationsTransformer() {
return new AnnotationsTransformerBuildItem(new CacheAnnotationsTransformer());
}
@BuildStep
RestClientAnnotationsTransformerBuildItem restClientAnnotationsTransformer() {
return new RestClientAnnotationsTransformerBuildItem(new RestClientCacheAnnotationsTransformer());
}
@BuildStep
CacheTypeBuildItem type(CacheBuildConfig config) {
return new CacheTypeBuildItem(
CAFFEINE_CACHE_TYPE.equals(config.type()) ? CacheTypeBuildItem.Type.LOCAL : CacheTypeBuildItem.Type.REMOTE);
}
@BuildStep
void validateCacheAnnotationsAndProduceCacheNames(CombinedIndexBuildItem combinedIndex,
List<AdditionalCacheNameBuildItem> additionalCacheNames,
BuildProducer<ValidationErrorBuildItem> validationErrors,
BuildProducer<CacheNamesBuildItem> cacheNames, BeanDiscoveryFinishedBuildItem beanDiscoveryFinished) {
// Validation errors produced by this build step.
List<Throwable> throwables = new ArrayList<>();
// Cache names produced by this build step.
Set<String> names = new HashSet<>();
// The cache key generators constructors are validated at the end of this build step.
Set<DotName> keyGenerators = new HashSet<>();
/*
* First, for each non-repeated cache interceptor binding:
* - its target is validated
* - the corresponding cache name is collected
*/
for (DotName bindingName : INTERCEPTOR_BINDINGS) {
for (AnnotationInstance binding : combinedIndex.getIndex().getAnnotations(bindingName)) {
throwables.addAll(validateInterceptorBindingTarget(binding, binding.target()));
findCacheKeyGenerator(binding, binding.target()).ifPresent(keyGenerators::add);
if (binding.target().kind() == METHOD) {
/*
* Cache names from the interceptor bindings placed on cache interceptors must not be collected to prevent
* the instantiation of a cache with an empty name.
*/
names.add(binding.value(CACHE_NAME_PARAM).asString());
}
}
}
// The exact same things need to be done for repeated cache interceptor bindings.
for (DotName containerName : INTERCEPTOR_BINDING_CONTAINERS) {
for (AnnotationInstance container : combinedIndex.getIndex().getAnnotations(containerName)) {
for (AnnotationInstance binding : container.value("value").asNestedArray()) {
throwables.addAll(validateInterceptorBindingTarget(binding, container.target()));
findCacheKeyGenerator(binding, container.target()).ifPresent(keyGenerators::add);
names.add(binding.value(CACHE_NAME_PARAM).asString());
}
/*
* Interception from repeated interceptor bindings won't work with the CDI implementation from MicroProfile REST
* Client. Using repeated interceptor bindings on a method from a
|
CacheProcessor
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inject/QualifierWithTypeUseTest.java
|
{
"start": 3224,
"end": 3338
}
|
interface ____ {}
@Target({ElementType.TYPE_USE, ElementType.TYPE_PARAMETER})
@
|
Qualifier1
|
java
|
elastic__elasticsearch
|
x-pack/plugin/slm/src/yamlRestTest/java/org/elasticsearch/xpack/slm/SnapshotLifecycleYamlIT.java
|
{
"start": 1069,
"end": 2368
}
|
class ____ extends ESClientYamlSuiteTestCase {
private static final String USER = Objects.requireNonNull(System.getProperty("tests.rest.cluster.username", "test_admin"));
private static final String PASS = Objects.requireNonNull(System.getProperty("tests.rest.cluster.password", "x-pack-test-password"));
@ClassRule
public static ElasticsearchCluster cluster = ElasticsearchCluster.local()
.module("x-pack-slm")
.module("x-pack-ilm")
.setting("xpack.security.enabled", "true")
.setting("xpack.license.self_generated.type", "trial")
.user(USER, PASS)
.build();
public SnapshotLifecycleYamlIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return createParameters();
}
@Override
protected Settings restClientSettings() {
String token = basicAuthHeaderValue(USER, new SecureString(PASS.toCharArray()));
return Settings.builder().put(super.restClientSettings()).put(ThreadContext.PREFIX + ".Authorization", token).build();
}
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}
|
SnapshotLifecycleYamlIT
|
java
|
spring-projects__spring-framework
|
spring-jms/src/main/java/org/springframework/jms/connection/ConnectionFactoryUtils.java
|
{
"start": 15909,
"end": 17203
}
|
class ____ extends ResourceHolderSynchronization<JmsResourceHolder, Object> {
private final boolean transacted;
private boolean commitProcessed;
public JmsResourceSynchronization(JmsResourceHolder resourceHolder, Object resourceKey, boolean transacted) {
super(resourceHolder, resourceKey);
this.transacted = transacted;
}
@Override
protected boolean shouldReleaseBeforeCompletion() {
return !this.transacted;
}
@Override
protected void processResourceAfterCommit(JmsResourceHolder resourceHolder) {
this.commitProcessed = true;
try {
resourceHolder.commitAll();
}
catch (JMSException ex) {
throw new SynchedLocalTransactionFailedException("Local JMS transaction failed to commit", ex);
}
}
@Override
public void afterCompletion(int status) {
if (status == STATUS_COMMITTED && this.transacted && !this.commitProcessed) {
// JmsResourceSynchronization registered in afterCommit phase of other synchronization
// -> late local JMS transaction commit here, otherwise it would silently get dropped.
afterCommit();
}
super.afterCompletion(status);
}
@Override
protected void releaseResource(JmsResourceHolder resourceHolder, Object resourceKey) {
resourceHolder.closeAll();
}
}
}
|
JmsResourceSynchronization
|
java
|
quarkusio__quarkus
|
integration-tests/mongodb-rest-data-panache/src/test/java/io/quarkus/it/mongodb/rest/data/panache/MongoDbRestDataPanacheIT.java
|
{
"start": 273,
"end": 502
}
|
class ____ extends MongoDbRestDataPanacheTest {
DevServicesContext context;
@Test
public void testDevServicesProperties() {
assertThat(context.devServicesProperties()).isEmpty();
}
}
|
MongoDbRestDataPanacheIT
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerHealth.java
|
{
"start": 1125,
"end": 1736
}
|
class ____ the details of the schedulers operations.
*
* <p><code>SchedulerHealth</code> provides clients with information such as:
* <ol>
* <li>
* scheduler's latest timestamp
* </li>
* <li>
* resources allocated, reserved, released in the last scheduler run
* </li>
* <li>
* latest allocation, release, reservation, preemption details
* </li>
* <li>
* count of latest allocation, release, reservation, preemption
* </li>
* <li>
* aggregate count of latest allocation, release, reservation, preemption,
* fulfilled reservation
* </li>
*</ol>
*
*/
public
|
holds
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/plugins/MetadataUpgrader.java
|
{
"start": 1076,
"end": 3101
}
|
class ____ {
public final UnaryOperator<Map<String, IndexTemplateMetadata>> indexTemplateMetadataUpgraders;
public final Map<String, UnaryOperator<Metadata.ProjectCustom>> customMetadataUpgraders;
public MetadataUpgrader(
Collection<UnaryOperator<Map<String, IndexTemplateMetadata>>> indexTemplateMetadataUpgraders,
Collection<Map<String, UnaryOperator<Metadata.ProjectCustom>>> customMetadataUpgraders
) {
this.indexTemplateMetadataUpgraders = templates -> {
Map<String, IndexTemplateMetadata> upgradedTemplates = new HashMap<>(templates);
for (UnaryOperator<Map<String, IndexTemplateMetadata>> upgrader : indexTemplateMetadataUpgraders) {
upgradedTemplates = upgrader.apply(upgradedTemplates);
}
return upgradedTemplates;
};
this.customMetadataUpgraders = customMetadataUpgraders.stream()
// Flatten the stream of maps into a stream of entries
.flatMap(map -> map.entrySet().stream())
.collect(
groupingBy(
// Group by the type of custom metadata to be upgraded (the entry key)
Map.Entry::getKey,
// For each type, extract the operators (the entry values), collect to a list, and make an operator which combines them
collectingAndThen(mapping(Map.Entry::getValue, toList()), CombiningCustomUpgrader::new)
)
);
}
private record CombiningCustomUpgrader(List<UnaryOperator<Metadata.ProjectCustom>> upgraders)
implements
UnaryOperator<Metadata.ProjectCustom> {
@Override
public Metadata.ProjectCustom apply(Metadata.ProjectCustom custom) {
Metadata.ProjectCustom upgraded = custom;
for (UnaryOperator<Metadata.ProjectCustom> upgrader : upgraders) {
upgraded = upgrader.apply(upgraded);
}
return upgraded;
}
}
}
|
MetadataUpgrader
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/oracle/create/OracleCreateFunctionTest_4.java
|
{
"start": 1021,
"end": 5773
}
|
class ____ extends OracleTest {
public void test_types() throws Exception {
String sql = //
"FUNCTION SPLITSTR(STR IN CLOB,\n" +
" I IN NUMBER := 0,\n" +
" SEP IN VARCHAR2 := ',') RETURN VARCHAR2\n" +
" DETERMINISTIC\n" +
"/**************************************\n" +
" * NAME: SPLITSTR\n" +
" * AUTHOR: SEAN ZHANG.\n" +
" * DATE: 2012-09-03.\n" +
" * FUNCTION: ??????????????????????\n" +
" * PARAMETERS: STR: ????????\n" +
" I: ?????????I?0??STR????????I ??????????????\n" +
" SEP: ????????????????????????????????STR????SEP?????\n" +
" * EXAMPLE: SELECT SPLITSTR('ABC,DEF', 1) AS STR FROM DUAL; ?? ABC\n" +
" SELECT SPLITSTR('ABC,DEF', 3) AS STR FROM DUAL; ?? ?\n" +
" **************************************/\n" +
" IS\n" +
" T_COUNT NUMBER;\n" +
" T_STR VARCHAR2(4000);\n" +
"BEGIN\n" +
" IF I = 0 THEN\n" +
" T_STR := STR;\n" +
" ELSIF INSTR(STR, SEP) = 0 THEN\n" +
" T_STR := SEP;\n" +
" ELSE\n" +
" SELECT COUNT(*) INTO T_COUNT FROM TABLE(SPLIT(STR, SEP));\n" +
" IF I <= T_COUNT THEN\n" +
" SELECT STR\n" +
" INTO T_STR\n" +
" FROM (SELECT ROWNUM AS ITEM, COLUMN_VALUE AS STR\n" +
" FROM TABLE(SPLIT(STR, SEP)))\n" +
" WHERE ITEM = I;\n" +
" END IF;\n" +
" END IF;\n" +
" RETURN T_STR;\n" +
"END;";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("FUNCTION SPLITSTR (\n" +
"\tSTR IN CLOB,\n" +
"\tI IN NUMBER := 0,\n" +
"\tSEP IN VARCHAR2 := ','\n" +
")\n" +
"RETURN VARCHAR2DETERMINISTIC \n" +
"IS\n" +
"T_COUNT NUMBER;\n" +
"\tT_STR VARCHAR2(4000);\n" +
"BEGIN\n" +
"\tIF I = 0 THEN\n" +
"\t\tT_STR := STR;\n" +
"\tELSIF INSTR(STR, SEP) = 0 THEN\n" +
"\t\tT_STR := SEP;\n" +
"\tELSE\n" +
"\t\tSELECT COUNT(*)\n" +
"\t\tINTO T_COUNT\n" +
"\t\tFROM TABLE(SPLIT(STR, SEP));\n" +
"\t\tIF I <= T_COUNT THEN\n" +
"\t\t\tSELECT STR\n" +
"\t\t\tINTO T_STR\n" +
"\t\t\tFROM (\n" +
"\t\t\t\tSELECT ROWNUM AS ITEM, COLUMN_VALUE AS STR\n" +
"\t\t\t\tFROM TABLE(SPLIT(STR, SEP))\n" +
"\t\t\t)\n" +
"\t\t\tWHERE ITEM = I;\n" +
"\t\tEND IF;\n" +
"\tEND IF;\n" +
"\tRETURN T_STR;\n" +
"END;",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(3, visitor.getColumns().size());
// assertTrue(visitor.getColumns().contains(new TableStat.Column("orders", "order_total")));
}
}
|
OracleCreateFunctionTest_4
|
java
|
google__guice
|
core/src/com/google/inject/internal/DuplicateElementError.java
|
{
"start": 533,
"end": 4527
}
|
class ____<T> extends InternalErrorDetail<DuplicateElementError<T>> {
private final Key<Set<T>> setKey;
private final ImmutableMultimap<T, Element<T>> elements;
DuplicateElementError(
Key<Set<T>> setKey, List<Binding<T>> bindings, T[] values, List<Object> sources) {
this(setKey, indexElements(bindings, values), sources);
}
private DuplicateElementError(
Key<Set<T>> setKey, ImmutableMultimap<T, Element<T>> elements, List<Object> sources) {
super(
ErrorId.DUPLICATE_ELEMENT,
String.format("Duplicate elements found in Multibinder %s.", Messages.convert(setKey)),
sources,
null);
this.setKey = setKey;
this.elements = elements;
}
@Override
protected void formatDetail(List<ErrorDetail<?>> others, Formatter formatter) {
formatter.format("\n%s\n", Messages.bold("Duplicates:"));
int duplicateIndex = 1;
for (Map.Entry<T, Collection<Element<T>>> entry : elements.asMap().entrySet()) {
formatter.format("%-2s: ", duplicateIndex++);
if (entry.getValue().size() > 1) {
Set<String> valuesAsString =
entry.getValue().stream()
.map(element -> element.value.toString())
.collect(Collectors.toSet());
if (valuesAsString.size() == 1) {
// String representation of the duplicates elements are the same, so only print out one.
formatter.format("Element: %s\n", Messages.redBold(valuesAsString.iterator().next()));
formatter.format(" Bound at:\n");
int index = 1;
for (Element<T> element : entry.getValue()) {
formatter.format(" %-2s: ", index++);
formatElement(element, formatter);
}
} else {
// Print out all elements as string when there are different string representations of the
// elements. To keep the logic simple, same strings are not grouped together unless all
// elements have the same string represnetation. This means some strings may be printed
// out multiple times.
// There is no indentation for the first duplicate element.
boolean indent = false;
for (Element<T> element : entry.getValue()) {
if (indent) {
formatter.format(" ");
} else {
indent = true;
}
formatter.format("Element: %s\n", Messages.redBold(element.value.toString()));
formatter.format(" Bound at: ");
formatElement(element, formatter);
}
}
}
}
formatter.format("\n%s\n", Messages.bold("Multibinder declared at:"));
// Multibinder source includes the key of the set. Filter it out since it's not useful in the
// printed error stack.
List<Object> filteredSource =
getSources().stream()
.filter(
source -> {
if (source instanceof Dependency) {
return !((Dependency<?>) source).getKey().equals(setKey);
}
return true;
})
.collect(Collectors.toList());
ErrorFormatter.formatSources(filteredSource, formatter);
}
private void formatElement(Element<T> element, Formatter formatter) {
Object source = element.binding.getSource();
new SourceFormatter(
source,
formatter,
/** omitPreposition= */
true)
.format();
}
@Override
public DuplicateElementError<T> withSources(List<Object> newSources) {
return new DuplicateElementError<>(setKey, elements, newSources);
}
static <T> ImmutableMultimap<T, Element<T>> indexElements(List<Binding<T>> bindings, T[] values) {
ImmutableMultimap.Builder<T, Element<T>> map = ImmutableMultimap.builder();
for (int i = 0; i < values.length; i++) {
map.put(values[i], new Element<T>(values[i], bindings.get(i)));
}
return map.build();
}
static
|
DuplicateElementError
|
java
|
apache__rocketmq
|
client/src/test/java/org/apache/rocketmq/acl/common/AclSignerTest.java
|
{
"start": 899,
"end": 1388
}
|
class ____ {
@Test(expected = Exception.class)
public void calSignatureExceptionTest() {
AclSigner.calSignature(new byte[]{},"");
}
@Test
public void calSignatureTest() {
String expectedSignature = "IUc8rrO/0gDch8CjObLQsW2rsiA=";
Assert.assertEquals(expectedSignature, AclSigner.calSignature("RocketMQ", "12345678"));
Assert.assertEquals(expectedSignature, AclSigner.calSignature("RocketMQ".getBytes(), "12345678"));
}
}
|
AclSignerTest
|
java
|
elastic__elasticsearch
|
modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java
|
{
"start": 5820,
"end": 7464
}
|
class ____ extends AbstractMap<Object, Object> implements Iterable<Object> {
private final Collection<Object> col;
CollectionMap(Collection<Object> col) {
this.col = col;
}
@Override
public Object get(Object key) {
if ("size".equals(key)) {
return col.size();
} else if (key instanceof Number number) {
return number.intValue() >= 0 && number.intValue() < col.size() ? Iterables.get(col, number.intValue()) : null;
}
try {
int index = Integer.parseInt(key.toString());
return index >= 0 && index < col.size() ? Iterables.get(col, index) : null;
} catch (NumberFormatException nfe) {
// if it's not a number it is as if the key doesn't exist
return null;
}
}
@Override
public boolean containsKey(Object key) {
return get(key) != null;
}
@Override
public Set<Entry<Object, Object>> entrySet() {
Map<Object, Object> map = Maps.newMapWithExpectedSize(col.size());
int i = 0;
for (Object item : col) {
map.put(i++, item);
}
return map.entrySet();
}
@Override
public Iterator<Object> iterator() {
return col.iterator();
}
}
@Override
public String stringify(Object object) {
CollectionUtils.ensureNoSelfReferences(object, "CustomReflectionObjectHandler stringify");
return super.stringify(object);
}
}
|
CollectionMap
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/event/internal/PostUpdateEventListenerStandardImpl.java
|
{
"start": 603,
"end": 1502
}
|
class ____ implements PostUpdateEventListener, CallbackRegistryConsumer {
private CallbackRegistry callbackRegistry;
@Override
public void injectCallbackRegistry(CallbackRegistry callbackRegistry) {
this.callbackRegistry = callbackRegistry;
}
@Override
public void onPostUpdate(PostUpdateEvent event) {
handlePostUpdate( event.getEntity(), event.getSession() );
}
private void handlePostUpdate(Object entity, SharedSessionContractImplementor source) {
// mimic the preUpdate filter
if ( source.isStateless()
|| source.getPersistenceContextInternal().getEntry( entity ).getStatus() != Status.DELETED ) {
callbackRegistry.postUpdate(entity);
}
}
@Override
public boolean requiresPostCommitHandling(EntityPersister persister) {
return callbackRegistry.hasRegisteredCallbacks( persister.getMappedClass(), CallbackType.POST_UPDATE );
}
}
|
PostUpdateEventListenerStandardImpl
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/push/v2/task/NamingPushCallback.java
|
{
"start": 838,
"end": 1221
}
|
interface ____ extends PushCallBack {
/**
* Set actual pushed service info, the host list of service info may be changed by selector. Detail see implement of
* {@link com.alibaba.nacos.naming.push.v2.executor.PushExecutor}.
*
* @param serviceInfo actual pushed service info
*/
void setActualServiceInfo(ServiceInfo serviceInfo);
}
|
NamingPushCallback
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/ExtendedBeanInfoTests.java
|
{
"start": 5474,
"end": 5548
}
|
class ____ {
public Number getProperty1() {
return 1;
}
}
|
Parent
|
java
|
apache__camel
|
components/camel-test/camel-test-spring-junit5/src/test/java/org/apache/camel/test/spring/CamelSpringDisableJmxTest.java
|
{
"start": 1079,
"end": 1311
}
|
class ____
extends CamelSpringPlainTest {
@Test
@Override
public void testJmx() {
assertEquals(JmxManagementStrategy.class, camelContext.getManagementStrategy().getClass());
}
}
|
CamelSpringDisableJmxTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/odps/issues/Issue4933.java
|
{
"start": 276,
"end": 1166
}
|
class ____ extends TestCase {
public void testInsert() {
String sql = "with \n" +
" a as (select * from src where key is not null),\n" +
" b as (select * from src2 where value>0),\n" +
" c as (select * from src3 where value>0),\n" +
" d as (select a.key,b.value from a join b on a.key=b.key ),\n" +
" e as (select a.key,c.value from a left outer join c on a.key=c.key and c.key is not null )\n" +
" insert overwrite table x select * from y;";
List<String> tables = new ArrayList<>();
SQLUtils.acceptTableSource(
sql,
DbType.odps,
e -> tables.add(((SQLExprTableSource) e).getTableName()),
e -> e instanceof SQLExprTableSource
);
assertTrue(tables.contains("src"));
}
}
|
Issue4933
|
java
|
quarkusio__quarkus
|
extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheManagerImpl.java
|
{
"start": 245,
"end": 365
}
|
class ____ registered as an {@link jakarta.enterprise.context.ApplicationScoped} synthetic bean at build time.
*/
public
|
is
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/WildcardImportTest.java
|
{
"start": 10459,
"end": 10681
}
|
class ____ {
Inner t;
}
""")
.addOutputLines(
"out/test/Test.java",
"""
package test;
import test.A.Inner;
public
|
Test
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java
|
{
"start": 26308,
"end": 35351
}
|
class ____ {
private final Set<ClusterBlock> global = new HashSet<>();
private final Map<ProjectId, ProjectBlocks> projects = new HashMap<>();
public Builder() {}
private static ProjectBlocks emptyMutableProjectBlocks() {
return new ProjectBlocks(new HashMap<>(), new HashSet<>());
}
public Builder blocks(ClusterBlocks blocks) {
global.addAll(blocks.global());
for (var projectId : blocks.projectBlocksMap.keySet()) {
final var projectBlocks = projects.computeIfAbsent(projectId, k -> emptyMutableProjectBlocks());
projectBlocks.projectGlobal.addAll(blocks.projectGlobal(projectId));
for (Map.Entry<String, Set<ClusterBlock>> entry : blocks.indices(projectId).entrySet()) {
if (projectBlocks.indices.containsKey(entry.getKey()) == false) {
projectBlocks.indices.put(entry.getKey(), new HashSet<>());
}
projectBlocks.indices.get(entry.getKey()).addAll(entry.getValue());
}
}
return this;
}
@Deprecated(forRemoval = true)
public Builder addBlocks(IndexMetadata indexMetadata) {
return addBlocks(Metadata.DEFAULT_PROJECT_ID, indexMetadata);
}
public Builder addBlocks(ProjectId projectId, IndexMetadata indexMetadata) {
String indexName = indexMetadata.getIndex().getName();
if (indexMetadata.getState() == IndexMetadata.State.CLOSE) {
addIndexBlock(projectId, indexName, MetadataIndexStateService.INDEX_CLOSED_BLOCK);
}
if (IndexMetadata.INDEX_READ_ONLY_SETTING.get(indexMetadata.getSettings())) {
addIndexBlock(projectId, indexName, IndexMetadata.INDEX_READ_ONLY_BLOCK);
}
if (IndexMetadata.INDEX_BLOCKS_READ_SETTING.get(indexMetadata.getSettings())) {
addIndexBlock(projectId, indexName, IndexMetadata.INDEX_READ_BLOCK);
}
if (IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.get(indexMetadata.getSettings())) {
addIndexBlock(projectId, indexName, IndexMetadata.INDEX_WRITE_BLOCK);
}
if (IndexMetadata.INDEX_BLOCKS_METADATA_SETTING.get(indexMetadata.getSettings())) {
addIndexBlock(projectId, indexName, IndexMetadata.INDEX_METADATA_BLOCK);
}
if (IndexMetadata.INDEX_BLOCKS_READ_ONLY_ALLOW_DELETE_SETTING.get(indexMetadata.getSettings())) {
addIndexBlock(projectId, indexName, IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK);
}
return this;
}
@Deprecated(forRemoval = true)
public Builder updateBlocks(IndexMetadata indexMetadata) {
return updateBlocks(Metadata.DEFAULT_PROJECT_ID, indexMetadata);
}
public Builder updateBlocks(ProjectId projectId, IndexMetadata indexMetadata) {
// let's remove all blocks for this index and add them back -- no need to remove all individual blocks....
projects.computeIfAbsent(projectId, k -> emptyMutableProjectBlocks()).indices.remove(indexMetadata.getIndex().getName());
return addBlocks(projectId, indexMetadata);
}
public Builder addGlobalBlock(ClusterBlock block) {
global.add(block);
return this;
}
public Builder removeGlobalBlock(ClusterBlock block) {
global.remove(block);
return this;
}
public Builder removeGlobalBlock(int blockId) {
global.removeIf(block -> block.id() == blockId);
return this;
}
public Builder removeProject(ProjectId projectId) {
projects.remove(projectId);
return this;
}
public Builder addProjectGlobalBlock(ProjectId projectId, ClusterBlock block) {
assert projectId.equals(ProjectId.DEFAULT) == false;
projects.computeIfAbsent(projectId, k -> emptyMutableProjectBlocks()).projectGlobal.add(block);
return this;
}
public Builder removeProjectGlobalBlock(ProjectId projectId, ClusterBlock block) {
var project = projects.get(projectId);
if (project != null) {
project.projectGlobal.remove(block);
}
return this;
}
@Deprecated(forRemoval = true)
public Builder addIndexBlock(String index, ClusterBlock block) {
return addIndexBlock(Metadata.DEFAULT_PROJECT_ID, index, block);
}
public Builder addIndexBlock(ProjectId projectId, String index, ClusterBlock block) {
final var projectBlocks = projects.computeIfAbsent(projectId, k -> emptyMutableProjectBlocks());
if (projectBlocks.indices().containsKey(index) == false) {
projectBlocks.indices.put(index, new HashSet<>());
}
projectBlocks.indices.get(index).add(block);
return this;
}
@Deprecated(forRemoval = true)
public Builder removeIndexBlocks(String index) {
return removeIndexBlocks(Metadata.DEFAULT_PROJECT_ID, index);
}
public Builder removeIndexBlocks(ProjectId projectId, String index) {
final var projectBlocks = projects.get(projectId);
if (projectBlocks == null) {
return this;
}
if (projectBlocks.indices.containsKey(index) == false) {
return this;
}
projectBlocks.indices.remove(index);
return this;
}
public boolean hasIndexBlock(ProjectId projectId, String index, ClusterBlock block) {
final var projectBlocks = projects.get(projectId);
if (projectBlocks == null) {
return false;
}
return projectBlocks.indices.getOrDefault(index, Set.of()).contains(block);
}
public boolean hasIndexBlockLevel(ProjectId projectId, String index, ClusterBlockLevel level) {
final var projectBlocks = projects.get(projectId);
if (projectBlocks == null) {
return false;
}
return projectBlocks.indices.getOrDefault(index, Set.of()).stream().anyMatch(clusterBlock -> clusterBlock.contains(level));
}
public Builder removeIndexBlock(ProjectId projectId, String index, ClusterBlock block) {
final var projectBlocks = projects.get(projectId);
if (projectBlocks == null) {
return this;
}
if (projectBlocks.indices.containsKey(index) == false) {
return this;
}
projectBlocks.get(index).remove(block);
if (projectBlocks.get(index).isEmpty()) {
projectBlocks.indices.remove(index);
}
return this;
}
public Builder removeIndexBlockWithId(ProjectId projectId, String index, int blockId) {
final var projectBlocks = projects.get(projectId);
if (projectBlocks == null) {
return this;
}
final Set<ClusterBlock> indexBlocks = projectBlocks.get(index);
if (indexBlocks == null) {
return this;
}
indexBlocks.removeIf(block -> block.id() == blockId);
if (indexBlocks.isEmpty()) {
projectBlocks.indices.remove(index);
}
return this;
}
public ClusterBlocks build() {
if (global.isEmpty()
&& noProjectOrDefaultProjectOnly(projects)
&& projects.getOrDefault(Metadata.DEFAULT_PROJECT_ID, ProjectBlocks.EMPTY).isEmpty()) {
return EMPTY_CLUSTER_BLOCK;
}
// We copy the block sets here in case of the builder is modified after build is called
Map<ProjectId, ProjectBlocks> projectsBuilder = new HashMap<>(projects.size());
for (Map.Entry<ProjectId, ProjectBlocks> projectEntry : projects.entrySet()) {
Map<String, Set<ClusterBlock>> indicesBuilder = new HashMap<>(projectEntry.getValue().indices.size());
for (Map.Entry<String, Set<ClusterBlock>> indexEntry : projectEntry.getValue().indices.entrySet()) {
indicesBuilder.put(indexEntry.getKey(), Set.copyOf(indexEntry.getValue()));
}
if (indicesBuilder.isEmpty() == false || projectEntry.getValue().projectGlobals().isEmpty() == false) {
projectsBuilder.put(
projectEntry.getKey(),
new ProjectBlocks(Map.copyOf(indicesBuilder), Set.copyOf(projectEntry.getValue().projectGlobals()))
);
}
}
return new ClusterBlocks(Set.copyOf(global), Map.copyOf(projectsBuilder));
}
}
}
|
Builder
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassPostProcessorAotContributionTests.java
|
{
"start": 24172,
"end": 24445
}
|
class ____ implements BeanRegistrar {
@Override
public void register(BeanRegistry registry, Environment env) {
registry.registerBean(Foo.class);
}
}
@Configuration
@Import(InstanceSupplierBeanRegistrar.class)
public static
|
DefaultConstructorBeanRegistrar
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/configuration/CoreOptions.java
|
{
"start": 11187,
"end": 12377
}
|
class ____. This setting should generally not be modified. To add another "
+ " pattern we recommend to use \"plugin.classloader.parent-first-patterns.additional\" instead.");
@Documentation.ExcludeFromDocumentation(
"Plugin classloader list is considered an implementation detail. "
+ "Configuration only included in case to mitigate unintended side-effects of this young feature.")
public static final ConfigOption<List<String>>
PLUGIN_ALWAYS_PARENT_FIRST_LOADER_PATTERNS_ADDITIONAL =
ConfigOptions.key("plugin.classloader.parent-first-patterns.additional")
.stringType()
.asList()
.defaultValues()
.withDescription(
"A (semicolon-separated) list of patterns that specifies which classes should always be"
+ " resolved through the plugin parent ClassLoader first. A pattern is a simple prefix that is checked "
+ " against the fully qualified
|
name
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/module/TestTypeModifierNameResolution.java
|
{
"start": 836,
"end": 934
}
|
interface ____ {
String getData();
void setData(String data);
}
static
|
MyType
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/scan/combined/d/OtherCombinedConfiguration.java
|
{
"start": 876,
"end": 968
}
|
class ____ {
@RestController
@ConfigurationProperties("c")
static
|
OtherCombinedConfiguration
|
java
|
jhy__jsoup
|
src/main/java/org/jsoup/helper/Regex.java
|
{
"start": 3943,
"end": 4246
}
|
class ____ implements Matcher {
private final java.util.regex.Matcher delegate;
JdkMatcher(java.util.regex.Matcher delegate) {
this.delegate = delegate;
}
@Override
public boolean find() {
return delegate.find();
}
}
}
|
JdkMatcher
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/NativeQuery.java
|
{
"start": 15058,
"end": 15530
}
|
interface ____<J> extends ReturnableResultNode {
default InstantiationResultNode<J> addBasicArgument(String columnAlias) {
return addBasicArgument( columnAlias, null );
}
InstantiationResultNode<J> addBasicArgument(String columnAlias, String argumentAlias);
}
/**
* Allows access to further control how properties within a root or join
* fetch are mapped back from the result set. Generally used in composite
* value scenarios.
*/
|
InstantiationResultNode
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/model/common/DateFormatValidatorFactory.java
|
{
"start": 5288,
"end": 6162
}
|
class ____ implements DateFormatValidator {
@Override
public DateFormatValidationResult validate(String dateFormat) {
try {
Class<?> aClass = Class.forName( ORG_JODA_TIME_FORMAT_DATE_TIME_FORMAT );
Method forPatternMethod = aClass.getMethod( FOR_PATTERN, String.class );
forPatternMethod.invoke( aClass, dateFormat );
return validDateFormat( dateFormat );
}
catch ( InvocationTargetException e ) {
return invalidDateFormat( dateFormat, e.getCause() );
}
catch ( ClassNotFoundException e ) {
return noJodaOnClassPath();
}
catch ( Exception e ) {
return invalidDateFormat( dateFormat, e );
}
}
}
private static
|
JodaTimeDateFormatValidator
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStartupVersions.java
|
{
"start": 3027,
"end": 12752
}
|
class ____ {
private final StorageInfo storageInfo;
private final String blockPoolId;
StorageData(int layoutVersion, int namespaceId, String clusterId,
long cTime, String bpid) {
storageInfo = new StorageInfo(layoutVersion, namespaceId, clusterId,
cTime, NodeType.DATA_NODE);
blockPoolId = bpid;
}
}
/**
* Initialize the versions array. This array stores all combinations
* of cross product:
* {oldLayoutVersion,currentLayoutVersion,futureLayoutVersion} X
* {currentNamespaceId,incorrectNamespaceId} X
* {pastFsscTime,currentFsscTime,futureFsscTime}
*/
private StorageData[] initializeVersions() throws Exception {
int layoutVersionOld = Storage.LAST_UPGRADABLE_LAYOUT_VERSION;
int layoutVersionCur = DataNodeLayoutVersion.getCurrentLayoutVersion();
int layoutVersionNew = Integer.MIN_VALUE;
int namespaceIdCur = UpgradeUtilities.getCurrentNamespaceID(null);
int namespaceIdOld = Integer.MIN_VALUE;
long fsscTimeOld = Long.MIN_VALUE;
long fsscTimeCur = UpgradeUtilities.getCurrentFsscTime(null);
long fsscTimeNew = Long.MAX_VALUE;
String clusterID = "testClusterID";
String invalidClusterID = "testClusterID";
String bpid = UpgradeUtilities.getCurrentBlockPoolID(null);
String invalidBpid = "invalidBpid";
return new StorageData[] {
new StorageData(layoutVersionOld, namespaceIdCur, clusterID,
fsscTimeOld, bpid), // 0
new StorageData(layoutVersionOld, namespaceIdCur, clusterID,
fsscTimeCur, bpid), // 1
new StorageData(layoutVersionOld, namespaceIdCur, clusterID,
fsscTimeNew, bpid), // 2
new StorageData(layoutVersionOld, namespaceIdOld, clusterID,
fsscTimeOld, bpid), // 3
new StorageData(layoutVersionOld, namespaceIdOld, clusterID,
fsscTimeCur, bpid), // 4
new StorageData(layoutVersionOld, namespaceIdOld, clusterID,
fsscTimeNew, bpid), // 5
new StorageData(layoutVersionCur, namespaceIdCur, clusterID,
fsscTimeOld, bpid), // 6
new StorageData(layoutVersionCur, namespaceIdCur, clusterID,
fsscTimeCur, bpid), // 7
new StorageData(layoutVersionCur, namespaceIdCur, clusterID,
fsscTimeNew, bpid), // 8
new StorageData(layoutVersionCur, namespaceIdOld, clusterID,
fsscTimeOld, bpid), // 9
new StorageData(layoutVersionCur, namespaceIdOld, clusterID,
fsscTimeCur, bpid), // 10
new StorageData(layoutVersionCur, namespaceIdOld, clusterID,
fsscTimeNew, bpid), // 11
new StorageData(layoutVersionNew, namespaceIdCur, clusterID,
fsscTimeOld, bpid), // 12
new StorageData(layoutVersionNew, namespaceIdCur, clusterID,
fsscTimeCur, bpid), // 13
new StorageData(layoutVersionNew, namespaceIdCur, clusterID,
fsscTimeNew, bpid), // 14
new StorageData(layoutVersionNew, namespaceIdOld, clusterID,
fsscTimeOld, bpid), // 15
new StorageData(layoutVersionNew, namespaceIdOld, clusterID,
fsscTimeCur, bpid), // 16
new StorageData(layoutVersionNew, namespaceIdOld, clusterID,
fsscTimeNew, bpid), // 17
// Test with invalid clusterId
new StorageData(layoutVersionCur, namespaceIdCur, invalidClusterID,
fsscTimeCur, bpid), // 18
// Test with invalid block pool Id
new StorageData(layoutVersionCur, namespaceIdCur, clusterID,
fsscTimeCur, invalidBpid) // 19
};
}
/**
* Determines if the given Namenode version and Datanode version
* are compatible with each other. Compatibility in this case mean
* that the Namenode and Datanode will successfully start up and
* will work together. The rules for compatibility,
* taken from the DFS Upgrade Design, are as follows:
* <pre>
* <ol>
* <li>Check 0: Datanode namespaceID != Namenode namespaceID the startup fails
* </li>
* <li>Check 1: Datanode clusterID != Namenode clusterID the startup fails
* </li>
* <li>Check 2: Datanode blockPoolID != Namenode blockPoolID the startup fails
* </li>
* <li>Check 3: The data-node does regular startup (no matter which options
* it is started with) if
* softwareLV == storedLV AND
* DataNode.FSSCTime == NameNode.FSSCTime
* </li>
* <li>Check 4: The data-node performs an upgrade if it is started without any
* options and
* |softwareLV| > |storedLV| OR
* (softwareLV == storedLV AND
* DataNode.FSSCTime < NameNode.FSSCTime)
* </li>
* <li>NOT TESTED: The data-node rolls back if it is started with
* the -rollback option and
* |softwareLV| >= |previous.storedLV| AND
* DataNode.previous.FSSCTime <= NameNode.FSSCTime
* </li>
* <li>Check 5: In all other cases the startup fails.</li>
* </ol>
* </pre>
*/
boolean isVersionCompatible(StorageData namenodeSd, StorageData datanodeSd) {
final StorageInfo namenodeVer = namenodeSd.storageInfo;
final StorageInfo datanodeVer = datanodeSd.storageInfo;
// check #0
if (namenodeVer.getNamespaceID() != datanodeVer.getNamespaceID()) {
LOG.info("namespaceIDs are not equal: isVersionCompatible=false");
return false;
}
// check #1
if (!namenodeVer.getClusterID().equals(datanodeVer.getClusterID())) {
LOG.info("clusterIDs are not equal: isVersionCompatible=false");
return false;
}
// check #2
if (!namenodeSd.blockPoolId.equals(datanodeSd.blockPoolId)) {
LOG.info("blockPoolIDs are not equal: isVersionCompatible=false");
return false;
}
// check #3
int softwareLV = DataNodeLayoutVersion.getCurrentLayoutVersion();
int storedLV = datanodeVer.getLayoutVersion();
if (softwareLV == storedLV &&
datanodeVer.getCTime() == namenodeVer.getCTime())
{
LOG.info("layoutVersions and cTimes are equal: isVersionCompatible=true");
return true;
}
// check #4
long absSoftwareLV = Math.abs((long)softwareLV);
long absStoredLV = Math.abs((long)storedLV);
if (absSoftwareLV > absStoredLV ||
(softwareLV == storedLV &&
datanodeVer.getCTime() < namenodeVer.getCTime()))
{
LOG.info("softwareLayoutVersion is newer OR namenode cTime is newer: isVersionCompatible=true");
return true;
}
// check #5
LOG.info("default case: isVersionCompatible=false");
return false;
}
/**
* This test ensures the appropriate response (successful or failure) from
* a Datanode when the system is started with differing version combinations.
* <pre>
* For each 3-tuple in the cross product
* ({oldLayoutVersion,currentLayoutVersion,futureLayoutVersion},
* {currentNamespaceId,incorrectNamespaceId},
* {pastFsscTime,currentFsscTime,futureFsscTime})
* 1. Startup Namenode with version file containing
* (currentLayoutVersion,currentNamespaceId,currentFsscTime)
* 2. Attempt to startup Datanode with version file containing
* this iterations version 3-tuple
* </pre>
*/
@Test
@Timeout(value = 300)
public void testVersions() throws Exception {
UpgradeUtilities.initialize();
Configuration conf = UpgradeUtilities.initializeStorageStateConf(1,
new HdfsConfiguration());
StorageData[] versions = initializeVersions();
UpgradeUtilities.createNameNodeStorageDirs(
conf.getStrings(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY), "current");
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0)
.format(false)
.manageDataDfsDirs(false)
.manageNameDfsDirs(false)
.startupOption(StartupOption.REGULAR)
.build();
StorageData nameNodeVersion = new StorageData(
HdfsServerConstants.NAMENODE_LAYOUT_VERSION,
UpgradeUtilities.getCurrentNamespaceID(cluster),
UpgradeUtilities.getCurrentClusterID(cluster),
UpgradeUtilities.getCurrentFsscTime(cluster),
UpgradeUtilities.getCurrentBlockPoolID(cluster));
log("NameNode version info", NAME_NODE, null, nameNodeVersion);
String bpid = UpgradeUtilities.getCurrentBlockPoolID(cluster);
for (int i = 0; i < versions.length; i++) {
File[] storage = UpgradeUtilities.createDataNodeStorageDirs(
conf.getStrings(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY), "current");
log("DataNode version info", DATA_NODE, i, versions[i]);
UpgradeUtilities.createDataNodeVersionFile(storage,
versions[i].storageInfo, bpid, versions[i].blockPoolId, conf);
try {
cluster.startDataNodes(conf, 1, false, StartupOption.REGULAR, null);
} catch (Exception ignore) {
// Ignore. The asserts below will check for problems.
// ignore.printStackTrace();
}
assertTrue(cluster.getNameNode() != null);
assertEquals(isVersionCompatible(nameNodeVersion, versions[i]),
cluster.isDataNodeUp());
cluster.shutdownDataNodes();
}
}
@AfterEach
public void tearDown() throws Exception {
LOG.info("Shutting down MiniDFSCluster");
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
public static void main(String[] args) throws Exception {
new TestDFSStartupVersions().testVersions();
}
}
|
StorageData
|
java
|
apache__camel
|
components/camel-resilience4j-micrometer/src/test/java/org/apache/camel/component/resilience4j/micrometer/MicrometerTest.java
|
{
"start": 1244,
"end": 3896
}
|
class ____ extends CamelTestSupport {
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
// enable micrometer by adding the factory
context.addService(new DefaultResilience4jMicrometerFactory());
return context;
}
@Test
public void testResilience() throws Exception {
test("direct:start");
DefaultResilience4jMicrometerFactory factory = context.hasService(DefaultResilience4jMicrometerFactory.class);
Assertions.assertNotNull(factory);
MeterRegistry reg = factory.getMeterRegistry();
Assertions.assertNotNull(reg);
Assertions.assertEquals(35, reg.getMeters().size());
}
@Test
public void testResilienceWithTimeOut() throws Exception {
test("direct:start.with.timeout.enabled");
DefaultResilience4jMicrometerFactory factory = context.hasService(DefaultResilience4jMicrometerFactory.class);
Assertions.assertNotNull(factory);
MeterRegistry reg = factory.getMeterRegistry();
Assertions.assertNotNull(reg);
Assertions.assertEquals(35, reg.getMeters().size());
}
private void test(String endPointUri) throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
getMockEndpoint("mock:result").expectedPropertyReceived(CircuitBreakerConstants.RESPONSE_SUCCESSFUL_EXECUTION, true);
getMockEndpoint("mock:result").expectedPropertyReceived(CircuitBreakerConstants.RESPONSE_FROM_FALLBACK, false);
getMockEndpoint("mock:result").expectedPropertyReceived(CircuitBreakerConstants.RESPONSE_STATE, "CLOSED");
template.sendBody(endPointUri, "Hello World");
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").circuitBreaker().to("direct:foo").to("log:foo").onFallback().transform()
.constant("Fallback message").end().to("log:result").to("mock:result");
from("direct:start.with.timeout.enabled").circuitBreaker().resilience4jConfiguration()
.timeoutEnabled(true).timeoutDuration(2000).end()
.to("direct:foo").to("log:foo").onFallback().transform()
.constant("Fallback message").end().to("log:result").to("mock:result");
from("direct:foo").transform().constant("Bye World");
}
};
}
}
|
MicrometerTest
|
java
|
quarkusio__quarkus
|
integration-tests/awt-packaging/src/main/java/io/quarkus/it/jaxb/Resource.java
|
{
"start": 347,
"end": 892
}
|
class ____ {
private static final Logger LOGGER = Logger.getLogger(Resource.class);
@Path("/book")
@POST
@Consumes(MediaType.APPLICATION_XML)
@Produces(MediaType.TEXT_PLAIN)
public Response postBook(Book book) {
LOGGER.info("Received book: " + book);
try {
return Response.accepted().entity(book.getCover().getHeight(null)).build();
} catch (Exception e) {
LOGGER.error(e);
return Response.serverError().entity(e.getMessage()).build();
}
}
}
|
Resource
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/saml2/Saml2LoginConfigurerTests.java
|
{
"start": 24582,
"end": 24945
}
|
class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
http.saml2Login((login) -> login.authenticationManager(getAuthenticationManagerMock("ROLE_AUTH_MANAGER")));
return http.build();
}
}
@Configuration
@EnableWebSecurity
@Import(Saml2LoginConfigBeans.class)
static
|
Saml2LoginConfigWithCustomAuthenticationManager
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProducerConcurrentTest.java
|
{
"start": 1647,
"end": 3697
}
|
class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Test
public void testNoConcurrentProducers() throws Exception {
doSendMessages(1, 1);
}
@Test
public void testConcurrentProducers() throws Exception {
doSendMessages(10, 5);
}
private void doSendMessages(int files, int poolSize) throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(files);
getMockEndpoint("mock:result").expectsNoDuplicates(body());
ExecutorService executor = Executors.newFixedThreadPool(poolSize);
try {
for (int i = 0; i < files; i++) {
final int index = i;
executor.submit(() -> {
template.sendBody("direct:start", "Message " + index);
return null;
});
}
MockEndpoint.assertIsSatisfied(context, 20, TimeUnit.SECONDS);
} finally {
executor.shutdownNow();
}
}
@Override
protected String getComponentName() {
return "jms";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("jms:queue:fooJmsProducerConcurrentTest");
from("jms:queue:fooJmsProducerConcurrentTest").to("mock:result");
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
|
JmsProducerConcurrentTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/collection/set/PersistentSetNonLazyTest.java
|
{
"start": 719,
"end": 1046
}
|
class ____ extends PersistentSetTest {
@Test
@JiraKey("HHH-3799")
@FailureExpected(reason = "known to fail with non-lazy collection using query cache")
public void testLoadChildCheckParentContainsChildCache(SessionFactoryScope scope) {
super.testLoadChildCheckParentContainsChildCache( scope );
}
}
|
PersistentSetNonLazyTest
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/common/internals/PartitionStatesTest.java
|
{
"start": 1080,
"end": 9196
}
|
class ____ {
@Test
public void testSet() {
PartitionStates<String> states = new PartitionStates<>();
LinkedHashMap<TopicPartition, String> map = createMap();
states.set(map);
LinkedHashMap<TopicPartition, String> expected = new LinkedHashMap<>();
expected.put(new TopicPartition("foo", 2), "foo 2");
expected.put(new TopicPartition("foo", 0), "foo 0");
expected.put(new TopicPartition("blah", 2), "blah 2");
expected.put(new TopicPartition("blah", 1), "blah 1");
expected.put(new TopicPartition("baz", 2), "baz 2");
expected.put(new TopicPartition("baz", 3), "baz 3");
checkState(states, expected);
states.set(new LinkedHashMap<>());
checkState(states, new LinkedHashMap<>());
}
private LinkedHashMap<TopicPartition, String> createMap() {
LinkedHashMap<TopicPartition, String> map = new LinkedHashMap<>();
map.put(new TopicPartition("foo", 2), "foo 2");
map.put(new TopicPartition("blah", 2), "blah 2");
map.put(new TopicPartition("blah", 1), "blah 1");
map.put(new TopicPartition("baz", 2), "baz 2");
map.put(new TopicPartition("foo", 0), "foo 0");
map.put(new TopicPartition("baz", 3), "baz 3");
return map;
}
private void checkState(PartitionStates<String> states, LinkedHashMap<TopicPartition, String> expected) {
assertEquals(expected.keySet(), states.partitionSet());
assertEquals(expected.size(), states.size());
assertEquals(expected, states.partitionStateMap());
}
@Test
public void testMoveToEnd() {
PartitionStates<String> states = new PartitionStates<>();
LinkedHashMap<TopicPartition, String> map = createMap();
states.set(map);
states.moveToEnd(new TopicPartition("baz", 2));
LinkedHashMap<TopicPartition, String> expected = new LinkedHashMap<>();
expected.put(new TopicPartition("foo", 2), "foo 2");
expected.put(new TopicPartition("foo", 0), "foo 0");
expected.put(new TopicPartition("blah", 2), "blah 2");
expected.put(new TopicPartition("blah", 1), "blah 1");
expected.put(new TopicPartition("baz", 3), "baz 3");
expected.put(new TopicPartition("baz", 2), "baz 2");
checkState(states, expected);
states.moveToEnd(new TopicPartition("foo", 2));
expected = new LinkedHashMap<>();
expected.put(new TopicPartition("foo", 0), "foo 0");
expected.put(new TopicPartition("blah", 2), "blah 2");
expected.put(new TopicPartition("blah", 1), "blah 1");
expected.put(new TopicPartition("baz", 3), "baz 3");
expected.put(new TopicPartition("baz", 2), "baz 2");
expected.put(new TopicPartition("foo", 2), "foo 2");
checkState(states, expected);
// no-op
states.moveToEnd(new TopicPartition("foo", 2));
checkState(states, expected);
// partition doesn't exist
states.moveToEnd(new TopicPartition("baz", 5));
checkState(states, expected);
// topic doesn't exist
states.moveToEnd(new TopicPartition("aaa", 2));
checkState(states, expected);
}
@Test
public void testUpdateAndMoveToEnd() {
PartitionStates<String> states = new PartitionStates<>();
LinkedHashMap<TopicPartition, String> map = createMap();
states.set(map);
states.updateAndMoveToEnd(new TopicPartition("foo", 0), "foo 0 updated");
LinkedHashMap<TopicPartition, String> expected = new LinkedHashMap<>();
expected.put(new TopicPartition("foo", 2), "foo 2");
expected.put(new TopicPartition("blah", 2), "blah 2");
expected.put(new TopicPartition("blah", 1), "blah 1");
expected.put(new TopicPartition("baz", 2), "baz 2");
expected.put(new TopicPartition("baz", 3), "baz 3");
expected.put(new TopicPartition("foo", 0), "foo 0 updated");
checkState(states, expected);
states.updateAndMoveToEnd(new TopicPartition("baz", 2), "baz 2 updated");
expected = new LinkedHashMap<>();
expected.put(new TopicPartition("foo", 2), "foo 2");
expected.put(new TopicPartition("blah", 2), "blah 2");
expected.put(new TopicPartition("blah", 1), "blah 1");
expected.put(new TopicPartition("baz", 3), "baz 3");
expected.put(new TopicPartition("foo", 0), "foo 0 updated");
expected.put(new TopicPartition("baz", 2), "baz 2 updated");
checkState(states, expected);
// partition doesn't exist
states.updateAndMoveToEnd(new TopicPartition("baz", 5), "baz 5 new");
expected = new LinkedHashMap<>();
expected.put(new TopicPartition("foo", 2), "foo 2");
expected.put(new TopicPartition("blah", 2), "blah 2");
expected.put(new TopicPartition("blah", 1), "blah 1");
expected.put(new TopicPartition("baz", 3), "baz 3");
expected.put(new TopicPartition("foo", 0), "foo 0 updated");
expected.put(new TopicPartition("baz", 2), "baz 2 updated");
expected.put(new TopicPartition("baz", 5), "baz 5 new");
checkState(states, expected);
// topic doesn't exist
states.updateAndMoveToEnd(new TopicPartition("aaa", 2), "aaa 2 new");
expected = new LinkedHashMap<>();
expected.put(new TopicPartition("foo", 2), "foo 2");
expected.put(new TopicPartition("blah", 2), "blah 2");
expected.put(new TopicPartition("blah", 1), "blah 1");
expected.put(new TopicPartition("baz", 3), "baz 3");
expected.put(new TopicPartition("foo", 0), "foo 0 updated");
expected.put(new TopicPartition("baz", 2), "baz 2 updated");
expected.put(new TopicPartition("baz", 5), "baz 5 new");
expected.put(new TopicPartition("aaa", 2), "aaa 2 new");
checkState(states, expected);
}
@Test
public void testPartitionValues() {
PartitionStates<String> states = new PartitionStates<>();
LinkedHashMap<TopicPartition, String> map = createMap();
states.set(map);
List<String> expected = new ArrayList<>();
expected.add("foo 2");
expected.add("foo 0");
expected.add("blah 2");
expected.add("blah 1");
expected.add("baz 2");
expected.add("baz 3");
assertEquals(expected, states.partitionStateValues());
}
@Test
public void testClear() {
PartitionStates<String> states = new PartitionStates<>();
LinkedHashMap<TopicPartition, String> map = createMap();
states.set(map);
states.clear();
checkState(states, new LinkedHashMap<>());
}
@Test
public void testRemove() {
PartitionStates<String> states = new PartitionStates<>();
LinkedHashMap<TopicPartition, String> map = createMap();
states.set(map);
states.remove(new TopicPartition("foo", 2));
LinkedHashMap<TopicPartition, String> expected = new LinkedHashMap<>();
expected.put(new TopicPartition("foo", 0), "foo 0");
expected.put(new TopicPartition("blah", 2), "blah 2");
expected.put(new TopicPartition("blah", 1), "blah 1");
expected.put(new TopicPartition("baz", 2), "baz 2");
expected.put(new TopicPartition("baz", 3), "baz 3");
checkState(states, expected);
states.remove(new TopicPartition("blah", 1));
expected = new LinkedHashMap<>();
expected.put(new TopicPartition("foo", 0), "foo 0");
expected.put(new TopicPartition("blah", 2), "blah 2");
expected.put(new TopicPartition("baz", 2), "baz 2");
expected.put(new TopicPartition("baz", 3), "baz 3");
checkState(states, expected);
states.remove(new TopicPartition("baz", 3));
expected = new LinkedHashMap<>();
expected.put(new TopicPartition("foo", 0), "foo 0");
expected.put(new TopicPartition("blah", 2), "blah 2");
expected.put(new TopicPartition("baz", 2), "baz 2");
checkState(states, expected);
}
}
|
PartitionStatesTest
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/masterreplica/NoOpPushHandler.java
|
{
"start": 223,
"end": 558
}
|
enum ____ implements PushHandler {
INSTANCE;
@Override
public void addListener(PushListener listener) {
}
@Override
public void removeListener(PushListener listener) {
}
@Override
public Collection<PushListener> getPushListeners() {
return Collections.emptyList();
}
}
|
NoOpPushHandler
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/BindToRegistryBeanInitDestroyMethodServiceTest.java
|
{
"start": 1165,
"end": 2685
}
|
class ____ extends ContextTestSupport {
@BindToRegistry
public FooService myFoo() {
return new FooService("World");
}
@Test
public void testStop() throws Exception {
// bean post processing dont run on ContextTestSupport
Runnable task = PluginHelper.getDependencyInjectionAnnotationFactory(context)
.createBindToRegistryFactory("myFoo", myFoo(), FooService.class, "myFoo", false, "start", "stop");
task.run();
FooService foo = context.getRegistry().lookupByNameAndType("myFoo", FooService.class);
assertNotNull(foo);
assertEquals("Started World", foo.getMessage());
// stop camel should trigger destroy
context.stop();
assertEquals("Stopped", foo.getMessage());
}
@Test
public void testUnbind() throws Exception {
// bean post processing dont run on ContextTestSupport
Runnable task = PluginHelper.getDependencyInjectionAnnotationFactory(context)
.createBindToRegistryFactory("myFoo", myFoo(), FooService.class, "myFoo", false, "start", "stop");
task.run();
FooService foo = context.getRegistry().lookupByNameAndType("myFoo", FooService.class);
assertNotNull(foo);
assertEquals("Started World", foo.getMessage());
// unbind should trigger destroy
context.getRegistry().unbind("myFoo");
assertEquals("Stopped", foo.getMessage());
}
public static
|
BindToRegistryBeanInitDestroyMethodServiceTest
|
java
|
quarkusio__quarkus
|
extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcBuildTimeConfig.java
|
{
"start": 417,
"end": 597
}
|
interface ____ {
/**
* Configuration gRPC dev mode.
*/
@ConfigDocSection(generated = true)
GrpcDevModeConfig devMode();
@ConfigGroup
|
GrpcBuildTimeConfig
|
java
|
spring-projects__spring-framework
|
spring-jms/src/main/java/org/springframework/jms/listener/LocallyExposedJmsResourceHolder.java
|
{
"start": 956,
"end": 1106
}
|
class ____ extends JmsResourceHolder {
public LocallyExposedJmsResourceHolder(Session session) {
super(session);
}
}
|
LocallyExposedJmsResourceHolder
|
java
|
apache__camel
|
core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedInflightRepository.java
|
{
"start": 1598,
"end": 4211
}
|
class ____ extends ManagedService implements ManagedInflightRepositoryMBean {
private final InflightRepository inflightRepository;
public ManagedInflightRepository(CamelContext context, InflightRepository inflightRepository) {
super(context, inflightRepository);
this.inflightRepository = inflightRepository;
}
public InflightRepository getInflightRepository() {
return inflightRepository;
}
@Override
public int getSize() {
return inflightRepository.size();
}
@Override
public boolean isInflightBrowseEnabled() {
return inflightRepository.isInflightBrowseEnabled();
}
@Override
public int size(String routeId) {
return inflightRepository.size(routeId);
}
@Override
public TabularData browse() {
return browse(null, -1, false);
}
@Override
public TabularData browse(int limit, boolean sortByLongestDuration) {
return browse(null, limit, sortByLongestDuration);
}
@Override
public TabularData browse(String routeId, int limit, boolean sortByLongestDuration) {
try {
TabularData answer = new TabularDataSupport(CamelOpenMBeanTypes.listInflightExchangesTabularType());
Collection<InflightRepository.InflightExchange> exchanges
= inflightRepository.browse(routeId, limit, sortByLongestDuration);
for (InflightRepository.InflightExchange entry : exchanges) {
CompositeType ct = CamelOpenMBeanTypes.listInflightExchangesCompositeType();
final CompositeData data = toCompositeData(entry, ct);
answer.put(data);
}
return answer;
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
private static CompositeData toCompositeData(InflightRepository.InflightExchange entry, CompositeType ct)
throws OpenDataException {
String exchangeId = entry.getExchange().getExchangeId();
String fromRouteId = entry.getFromRouteId();
String atRouteId = entry.getAtRouteId();
String nodeId = entry.getNodeId();
String elapsed = Long.toString(entry.getElapsed());
String duration = Long.toString(entry.getDuration());
return new CompositeDataSupport(
ct,
new String[] { "exchangeId", "fromRouteId", "routeId", "nodeId", "elapsed", "duration" },
new Object[] { exchangeId, fromRouteId, atRouteId, nodeId, elapsed, duration });
}
}
|
ManagedInflightRepository
|
java
|
elastic__elasticsearch
|
x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseParser.java
|
{
"start": 33147,
"end": 35932
}
|
class ____ extends ParserRuleContext {
public TerminalNode BY() {
return getToken(EqlBaseParser.BY, 0);
}
public List<ExpressionContext> expression() {
return getRuleContexts(ExpressionContext.class);
}
public ExpressionContext expression(int i) {
return getRuleContext(ExpressionContext.class, i);
}
public List<TerminalNode> COMMA() {
return getTokens(EqlBaseParser.COMMA);
}
public TerminalNode COMMA(int i) {
return getToken(EqlBaseParser.COMMA, i);
}
public JoinKeysContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override
public int getRuleIndex() {
return RULE_joinKeys;
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).enterJoinKeys(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).exitJoinKeys(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof EqlBaseVisitor) return ((EqlBaseVisitor<? extends T>) visitor).visitJoinKeys(this);
else return visitor.visitChildren(this);
}
}
public final JoinKeysContext joinKeys() throws RecognitionException {
JoinKeysContext _localctx = new JoinKeysContext(_ctx, getState());
enterRule(_localctx, 18, RULE_joinKeys);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(143);
match(BY);
setState(144);
expression();
setState(149);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la == COMMA) {
{
{
setState(145);
match(COMMA);
setState(146);
expression();
}
}
setState(151);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
} catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
} finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static
|
JoinKeysContext
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/function/array/AbstractArrayRemoveFunction.java
|
{
"start": 482,
"end": 917
}
|
class ____ extends AbstractSqmSelfRenderingFunctionDescriptor {
public AbstractArrayRemoveFunction() {
super(
"array_remove",
StandardArgumentsValidators.composite(
StandardArgumentsValidators.exactly( 2 ),
ArrayAndElementArgumentValidator.DEFAULT_INSTANCE
),
ArrayViaArgumentReturnTypeResolver.DEFAULT_INSTANCE,
ArrayAndElementArgumentTypeResolver.DEFAULT_INSTANCE
);
}
}
|
AbstractArrayRemoveFunction
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/boolean2darray/Boolean2DArrayAssert_isEmpty_Test.java
|
{
"start": 953,
"end": 1398
}
|
class ____ extends Boolean2DArrayAssertBaseTest {
@Override
protected Boolean2DArrayAssert invoke_api_method() {
assertions.isEmpty();
return null;
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertEmpty(getInfo(assertions), getActual(assertions));
}
@Override
@Test
public void should_return_this() {
// Disable this test because isEmpty is void
}
}
|
Boolean2DArrayAssert_isEmpty_Test
|
java
|
square__moshi
|
moshi/src/test/java/com/squareup/moshi/AdapterMethodsTest.java
|
{
"start": 3006,
"end": 3505
}
|
class ____ {
@FromJson
Point fromJson(JsonReader reader, JsonAdapter<Point> delegate) throws IOException {
reader.beginArray();
Point value = delegate.fromJson(reader);
reader.endArray();
return value;
}
@ToJson
void toJson(JsonWriter writer, Point value, JsonAdapter<Point> delegate) throws IOException {
writer.beginArray();
delegate.toJson(writer, value);
writer.endArray();
}
}
private static final
|
PointJsonAdapterWithDelegate
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/internal/impl/DefaultLifecycleRegistry.java
|
{
"start": 15365,
"end": 19490
}
|
class ____ implements Lifecycle {
@Override
public String id() {
return Lifecycle.DEFAULT;
}
@Override
public Collection<Phase> phases() {
// START SNIPPET: default
return List.of(phase(
ALL,
children(ALL),
phase(
EACH,
phase(VALIDATE, phase(INITIALIZE)),
phase(
BUILD,
after(VALIDATE),
phase(SOURCES),
phase(RESOURCES),
phase(COMPILE, after(SOURCES), dependencies(SCOPE_COMPILE, READY)),
phase(READY, after(COMPILE), after(RESOURCES)),
phase(PACKAGE, after(READY), dependencies(SCOPE_RUNTIME, PACKAGE))),
phase(
VERIFY,
after(VALIDATE),
phase(
UNIT_TEST,
phase(TEST_SOURCES),
phase(TEST_RESOURCES),
phase(
TEST_COMPILE,
after(TEST_SOURCES),
after(READY),
dependencies(SCOPE_TEST_ONLY, READY)),
phase(
TEST,
after(TEST_COMPILE),
after(TEST_RESOURCES),
dependencies(SCOPE_TEST, READY))),
phase(INTEGRATION_TEST)),
phase(INSTALL, after(PACKAGE)),
phase(DEPLOY, after(PACKAGE)))));
// END SNIPPET: default
}
@Override
public Collection<Phase> v3phases() {
return List.of(phase(
ALL,
phase(INITIALIZE, phase(VALIDATE)),
phase(
BUILD,
phase(SOURCES),
phase(RESOURCES),
phase(COMPILE),
phase(READY),
phase(TEST_SOURCES),
phase(TEST_RESOURCES),
phase(TEST_COMPILE),
phase(TEST),
phase(UNIT_TEST),
phase(PACKAGE)),
phase(VERIFY, phase(INTEGRATION_TEST)),
phase(INSTALL),
phase(DEPLOY)));
}
@Override
public Collection<Alias> aliases() {
return List.of(
alias("generate-sources", SOURCES),
alias("process-sources", AFTER + SOURCES),
alias("generate-resources", RESOURCES),
alias("process-resources", AFTER + RESOURCES),
alias("process-classes", AFTER + COMPILE),
alias("generate-test-sources", TEST_SOURCES),
alias("process-test-sources", AFTER + TEST_SOURCES),
alias("generate-test-resources", TEST_RESOURCES),
alias("process-test-resources", AFTER + TEST_RESOURCES),
alias("process-test-classes", AFTER + TEST_COMPILE),
alias("prepare-package", BEFORE + PACKAGE),
alias("pre-integration-test", BEFORE + INTEGRATION_TEST),
alias("post-integration-test", AFTER + INTEGRATION_TEST));
}
}
static
|
DefaultLifecycle
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ConsulEndpointBuilderFactory.java
|
{
"start": 1456,
"end": 1589
}
|
interface ____ {
/**
* Builder for endpoint consumers for the Consul component.
*/
public
|
ConsulEndpointBuilderFactory
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/ObjectAssert.java
|
{
"start": 976,
"end": 1149
}
|
class ____<ACTUAL> extends AbstractObjectAssert<ObjectAssert<ACTUAL>, ACTUAL> {
public ObjectAssert(ACTUAL actual) {
super(actual, ObjectAssert.class);
}
}
|
ObjectAssert
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/logging/NacosLoggingAdapterBuilder.java
|
{
"start": 929,
"end": 1131
}
|
interface ____ {
/**
* Build {@link NacosLoggingAdapter} implementation.
*
* @return {@link NacosLoggingAdapter}
*/
NacosLoggingAdapter build();
}
|
NacosLoggingAdapterBuilder
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/reflect/testbed/PublicChild.java
|
{
"start": 870,
"end": 1216
}
|
class ____ extends Parent {
static final String VALUE = "child";
@Override
public void parentNotAnnotatedMethod() {
}
@Override
public void parentProtectedAnnotatedMethod() {
}
@Annotated
private void privateAnnotatedMethod() {
}
@Annotated
public void publicAnnotatedMethod() {
}
}
|
PublicChild
|
java
|
apache__camel
|
components/camel-dropbox/src/generated/java/org/apache/camel/component/dropbox/DropboxEndpointConfigurer.java
|
{
"start": 734,
"end": 7263
}
|
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
DropboxEndpoint target = (DropboxEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": target.getConfiguration().setAccessToken(property(camelContext, java.lang.String.class, value)); return true;
case "apikey":
case "apiKey": target.getConfiguration().setApiKey(property(camelContext, java.lang.String.class, value)); return true;
case "apisecret":
case "apiSecret": target.getConfiguration().setApiSecret(property(camelContext, java.lang.String.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "client": target.getConfiguration().setClient(property(camelContext, com.dropbox.core.v2.DbxClientV2.class, value)); return true;
case "clientidentifier":
case "clientIdentifier": target.getConfiguration().setClientIdentifier(property(camelContext, java.lang.String.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "expirein":
case "expireIn": target.getConfiguration().setExpireIn(property(camelContext, java.lang.Long.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "localpath":
case "localPath": target.getConfiguration().setLocalPath(property(camelContext, java.lang.String.class, value)); return true;
case "newremotepath":
case "newRemotePath": target.getConfiguration().setNewRemotePath(property(camelContext, java.lang.String.class, value)); return true;
case "query": target.getConfiguration().setQuery(property(camelContext, java.lang.String.class, value)); return true;
case "refreshtoken":
case "refreshToken": target.getConfiguration().setRefreshToken(property(camelContext, java.lang.String.class, value)); return true;
case "remotepath":
case "remotePath": target.getConfiguration().setRemotePath(property(camelContext, java.lang.String.class, value)); return true;
case "uploadmode":
case "uploadMode": target.getConfiguration().setUploadMode(property(camelContext, org.apache.camel.component.dropbox.util.DropboxUploadMode.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": return java.lang.String.class;
case "apikey":
case "apiKey": return java.lang.String.class;
case "apisecret":
case "apiSecret": return java.lang.String.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "client": return com.dropbox.core.v2.DbxClientV2.class;
case "clientidentifier":
case "clientIdentifier": return java.lang.String.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "expirein":
case "expireIn": return java.lang.Long.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "localpath":
case "localPath": return java.lang.String.class;
case "newremotepath":
case "newRemotePath": return java.lang.String.class;
case "query": return java.lang.String.class;
case "refreshtoken":
case "refreshToken": return java.lang.String.class;
case "remotepath":
case "remotePath": return java.lang.String.class;
case "uploadmode":
case "uploadMode": return org.apache.camel.component.dropbox.util.DropboxUploadMode.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
DropboxEndpoint target = (DropboxEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": return target.getConfiguration().getAccessToken();
case "apikey":
case "apiKey": return target.getConfiguration().getApiKey();
case "apisecret":
case "apiSecret": return target.getConfiguration().getApiSecret();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "client": return target.getConfiguration().getClient();
case "clientidentifier":
case "clientIdentifier": return target.getConfiguration().getClientIdentifier();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "expirein":
case "expireIn": return target.getConfiguration().getExpireIn();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "localpath":
case "localPath": return target.getConfiguration().getLocalPath();
case "newremotepath":
case "newRemotePath": return target.getConfiguration().getNewRemotePath();
case "query": return target.getConfiguration().getQuery();
case "refreshtoken":
case "refreshToken": return target.getConfiguration().getRefreshToken();
case "remotepath":
case "remotePath": return target.getConfiguration().getRemotePath();
case "uploadmode":
case "uploadMode": return target.getConfiguration().getUploadMode();
default: return null;
}
}
}
|
DropboxEndpointConfigurer
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerManagerEvent.java
|
{
"start": 903,
"end": 1075
}
|
class ____ extends
AbstractEvent<ContainerManagerEventType> {
public ContainerManagerEvent(ContainerManagerEventType type) {
super(type);
}
}
|
ContainerManagerEvent
|
java
|
apache__maven
|
impl/maven-di/src/main/java/org/apache/maven/di/impl/Types.java
|
{
"start": 19342,
"end": 22523
}
|
class ____ implements ParameterizedType {
private final @Nullable Type ownerType;
private final Type rawType;
private final Type[] actualTypeArguments;
ParameterizedTypeImpl(@Nullable Type ownerType, Type rawType, Type[] actualTypeArguments) {
this.ownerType = ownerType;
this.rawType = rawType;
this.actualTypeArguments = actualTypeArguments;
}
@Override
public Type getRawType() {
return rawType;
}
@Override
public Type[] getActualTypeArguments() {
return actualTypeArguments;
}
@Override
public @Nullable Type getOwnerType() {
return ownerType;
}
@Override
public int hashCode() {
return Objects.hashCode(ownerType) ^ Arrays.hashCode(actualTypeArguments) ^ rawType.hashCode();
}
@Override
public boolean equals(Object other) {
if (!(other instanceof ParameterizedType that)) {
return false;
}
return this.getRawType().equals(that.getRawType())
&& Objects.equals(this.getOwnerType(), that.getOwnerType())
&& Arrays.equals(this.getActualTypeArguments(), that.getActualTypeArguments());
}
@Override
public String toString() {
return rawType.getTypeName()
+ Arrays.stream(actualTypeArguments).map(Types::toString).collect(joining(", ", "<", ">"));
}
}
/**
* Creates an instance of {@link WildcardType} bound by upper and lower bounds
*
* @param upperBounds a wildcard upper bound types
* @param lowerBounds a wildcard lower bound types
* @return an instance of {@link WildcardType}
*/
public static WildcardType wildcardType(Type[] upperBounds, Type[] lowerBounds) {
return new WildcardTypeImpl(upperBounds, lowerBounds);
}
/**
* Returns an instance of {@link WildcardType} that matches any type
* <p>
* E.g. {@code <?>}
*
* @see #wildcardType(Type[], Type[])
*/
public static WildcardType wildcardTypeAny() {
return WILDCARD_TYPE_ANY;
}
/**
* Creates an instance of {@link WildcardType} bound by a single upper bound
* <p>
* E.g. {@code <? extends UpperBound>}
*
* @param upperBound a wildcard upper bound type
* @return an instance of {@link WildcardType}
* @see #wildcardType(Type[], Type[])
*/
public static WildcardType wildcardTypeExtends(Type upperBound) {
return new WildcardTypeImpl(new Type[] {upperBound}, NO_TYPES);
}
/**
* Creates an instance of {@link WildcardType} bound by a single lower bound
* <p>
* E.g. {@code <? super LowerBound>}
*
* @param lowerBound a wildcard lower bound type
* @return an instance of {@link WildcardType}
* @see #wildcardType(Type[], Type[])
*/
public static WildcardType wildcardTypeSuper(Type lowerBound) {
return new WildcardTypeImpl(NO_TYPES, new Type[] {lowerBound});
}
public static
|
ParameterizedTypeImpl
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/filter/BurstFilter.java
|
{
"start": 2560,
"end": 9642
}
|
class ____ extends AbstractFilter {
private static final long NANOS_IN_SECONDS = 1000000000;
private static final int DEFAULT_RATE = 10;
private static final int DEFAULT_RATE_MULTIPLE = 100;
private static final int HASH_SHIFT = 32;
/**
* Level of messages to be filtered. Anything at or below this level will be
* filtered out if <code>maxBurst</code> has been exceeded. The default is
* WARN meaning any messages that are higher than warn will be logged
* regardless of the size of a burst.
*/
private final Level level;
private final long burstInterval;
private final DelayQueue<LogDelay> history = new DelayQueue<>();
private final Queue<LogDelay> available = new ConcurrentLinkedQueue<>();
static LogDelay createLogDelay(final long expireTime) {
return new LogDelay(expireTime);
}
private BurstFilter(
final Level level, final float rate, final long maxBurst, final Result onMatch, final Result onMismatch) {
super(onMatch, onMismatch);
this.level = level;
this.burstInterval = (long) (NANOS_IN_SECONDS * (maxBurst / rate));
for (int i = 0; i < maxBurst; ++i) {
available.add(createLogDelay(0));
}
}
@Override
public Result filter(
final Logger logger, final Level level, final Marker marker, final String msg, final Object... params) {
return filter(level);
}
@Override
public Result filter(
final Logger logger, final Level level, final Marker marker, final Object msg, final Throwable t) {
return filter(level);
}
@Override
public Result filter(
final Logger logger, final Level level, final Marker marker, final Message msg, final Throwable t) {
return filter(level);
}
@Override
public Result filter(final LogEvent event) {
return filter(event.getLevel());
}
@Override
public Result filter(
final Logger logger, final Level level, final Marker marker, final String msg, final Object p0) {
return filter(level);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1) {
return filter(level);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2) {
return filter(level);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3) {
return filter(level);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4) {
return filter(level);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5) {
return filter(level);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6) {
return filter(level);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7) {
return filter(level);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7,
final Object p8) {
return filter(level);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7,
final Object p8,
final Object p9) {
return filter(level);
}
/**
* Decide if we're going to log <code>event</code> based on whether the
* maximum burst of log statements has been exceeded.
*
* @param level The log level.
* @return The onMatch value if the filter passes, onMismatch otherwise.
*/
private Result filter(final Level level) {
if (this.level.isMoreSpecificThan(level)) {
LogDelay delay = history.poll();
while (delay != null) {
available.add(delay);
delay = history.poll();
}
delay = available.poll();
if (delay != null) {
delay.setDelay(burstInterval);
history.add(delay);
return onMatch;
}
return onMismatch;
}
return onMatch;
}
/**
* Returns the number of available slots. Used for unit testing.
* @return The number of available slots.
*/
public int getAvailable() {
return available.size();
}
/**
* Clear the history. Used for unit testing.
*/
public void clear() {
for (final LogDelay delay : history) {
history.remove(delay);
available.add(delay);
}
}
@Override
public String toString() {
return "level=" + level.toString() + ", interval=" + burstInterval + ", max=" + history.size();
}
/**
* Delay object to represent each log event that has occurred within the timespan.
*
* Consider this
|
BurstFilter
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/JobKillCommitter.java
|
{
"start": 2012,
"end": 2211
}
|
class ____ extends FileOutputCommitter {
@Override
public void commitJob(JobContext context) throws IOException {
throw new IOException();
}
}
/**
* The
|
CommitterWithFailCleanup
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/handler/RoutePredicateHandlerMapping.java
|
{
"start": 7111,
"end": 7373
}
|
enum ____ {
/**
* The management port has been disabled.
*/
DISABLED,
/**
* The management port is the same as the server port.
*/
SAME,
/**
* The management port and server port are different.
*/
DIFFERENT;
}
}
|
ManagementPortType
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/executor/RedissonClassLoader.java
|
{
"start": 691,
"end": 939
}
|
class ____ extends ClassLoader {
public RedissonClassLoader(ClassLoader parent) {
super(parent);
}
public void loadClass(String name, byte[] body) {
defineClass(name, body, 0, body.length);
}
}
|
RedissonClassLoader
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/dualvalue/DualValue_isExpectedAThrowable_Test.java
|
{
"start": 1091,
"end": 2391
}
|
class ____ {
private static final List<String> PATH = list("foo", "bar");
@ParameterizedTest
@MethodSource
void isExpectedAThrowable_should_return_true_when_expected_is_a_throwable(Throwable expected) {
// GIVEN
DualValue dualValue = new DualValue(PATH, "unused", expected);
// WHEN
boolean isExpectedAThrowable = dualValue.isExpectedAThrowable();
// THEN
then(isExpectedAThrowable).isTrue();
}
static Stream<Throwable> isExpectedAThrowable_should_return_true_when_expected_is_a_throwable() throws Exception {
return Stream.of(new RuntimeException("boom"),
new Throwable("bam"));
}
@Test
void isExpectedAThrowable_should_return_false_when_expected_is_not_a_throwable() {
// GIVEN
DualValue dualValue = new DualValue(PATH, "unused", Pair.of(1, "a"));
// WHEN
boolean isExpectedAThrowable = dualValue.isExpectedAThrowable();
// THEN
then(isExpectedAThrowable).isFalse();
}
@Test
void isExpectedAThrowable_should_return_false_when_expected_is_null() {
// GIVEN
DualValue dualValue = new DualValue(PATH, "unused", null);
// WHEN
boolean isExpectedAThrowable = dualValue.isExpectedAThrowable();
// THEN
then(isExpectedAThrowable).isFalse();
}
}
|
DualValue_isExpectedAThrowable_Test
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableCreateTest.java
|
{
"start": 1277,
"end": 38151
}
|
class ____ extends RxJavaTest {
@Test
public void basic() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Disposable d = Disposable.empty();
Flowable.<Integer>create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
e.setDisposable(d);
e.onNext(1);
e.onNext(2);
e.onNext(3);
e.onComplete();
e.onError(new TestException("first"));
e.onNext(4);
e.onError(new TestException("second"));
e.onComplete();
}
}, BackpressureStrategy.BUFFER)
.test()
.assertResult(1, 2, 3);
assertTrue(d.isDisposed());
TestHelper.assertUndeliverable(errors, 0, TestException.class, "first");
TestHelper.assertUndeliverable(errors, 1, TestException.class, "second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void basicWithCancellable() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Disposable d1 = Disposable.empty();
final Disposable d2 = Disposable.empty();
Flowable.<Integer>create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
e.setDisposable(d1);
e.setCancellable(new Cancellable() {
@Override
public void cancel() throws Exception {
d2.dispose();
}
});
e.onNext(1);
e.onNext(2);
e.onNext(3);
e.onComplete();
e.onError(new TestException("first"));
e.onNext(4);
e.onError(new TestException("second"));
e.onComplete();
}
}, BackpressureStrategy.BUFFER)
.test()
.assertResult(1, 2, 3);
assertTrue(d1.isDisposed());
assertTrue(d2.isDisposed());
TestHelper.assertUndeliverable(errors, 0, TestException.class, "first");
TestHelper.assertUndeliverable(errors, 1, TestException.class, "second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void basicWithError() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Disposable d = Disposable.empty();
Flowable.<Integer>create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
e.setDisposable(d);
e.onNext(1);
e.onNext(2);
e.onNext(3);
e.onError(new TestException());
e.onComplete();
e.onNext(4);
e.onError(new TestException("second"));
}
}, BackpressureStrategy.BUFFER)
.test()
.assertFailure(TestException.class, 1, 2, 3);
assertTrue(d.isDisposed());
TestHelper.assertUndeliverable(errors, 0, TestException.class, "second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void basicSerialized() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Disposable d = Disposable.empty();
Flowable.<Integer>create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
e = e.serialize();
e.setDisposable(d);
e.onNext(1);
e.onNext(2);
e.onNext(3);
e.onComplete();
e.onError(new TestException("first"));
e.onNext(4);
e.onError(new TestException("second"));
e.onComplete();
}
}, BackpressureStrategy.BUFFER)
.test()
.assertResult(1, 2, 3);
assertTrue(d.isDisposed());
TestHelper.assertUndeliverable(errors, 0, TestException.class, "first");
TestHelper.assertUndeliverable(errors, 1, TestException.class, "second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void basicWithErrorSerialized() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Disposable d = Disposable.empty();
Flowable.<Integer>create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
e = e.serialize();
e.setDisposable(d);
e.onNext(1);
e.onNext(2);
e.onNext(3);
e.onError(new TestException());
e.onComplete();
e.onNext(4);
e.onError(new TestException("second"));
}
}, BackpressureStrategy.BUFFER)
.test()
.assertFailure(TestException.class, 1, 2, 3);
assertTrue(d.isDisposed());
TestHelper.assertUndeliverable(errors, 0, TestException.class, "second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void wrap() {
Flowable.fromPublisher(new Publisher<Integer>() {
@Override
public void subscribe(Subscriber<? super Integer> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
subscriber.onNext(1);
subscriber.onNext(2);
subscriber.onNext(3);
subscriber.onNext(4);
subscriber.onNext(5);
subscriber.onComplete();
}
})
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void unsafe() {
Flowable.unsafeCreate(new Publisher<Integer>() {
@Override
public void subscribe(Subscriber<? super Integer> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
subscriber.onNext(1);
subscriber.onNext(2);
subscriber.onNext(3);
subscriber.onNext(4);
subscriber.onNext(5);
subscriber.onComplete();
}
})
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test(expected = IllegalArgumentException.class)
public void unsafeWithFlowable() {
Flowable.unsafeCreate(Flowable.just(1));
}
@Test
public void createNullValueBuffer() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, BackpressureStrategy.BUFFER)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void createNullValueLatest() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, BackpressureStrategy.LATEST)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void createNullValueError() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, BackpressureStrategy.ERROR)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void createNullValueDrop() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, BackpressureStrategy.DROP)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void createNullValueMissing() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, BackpressureStrategy.MISSING)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void createNullValueBufferSerialized() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
e = e.serialize();
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, BackpressureStrategy.BUFFER)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void createNullValueLatestSerialized() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
e = e.serialize();
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, BackpressureStrategy.LATEST)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void createNullValueErrorSerialized() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
e = e.serialize();
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, BackpressureStrategy.ERROR)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void createNullValueDropSerialized() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
e = e.serialize();
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, BackpressureStrategy.DROP)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void createNullValueMissingSerialized() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
e = e.serialize();
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, BackpressureStrategy.MISSING)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onErrorRace() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
Flowable<Object> source = Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
final FlowableEmitter<Object> f = e.serialize();
final TestException ex = new TestException();
Runnable r1 = new Runnable() {
@Override
public void run() {
f.onError(null);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
f.onError(ex);
}
};
TestHelper.race(r1, r2);
}
}, m);
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
source
.test()
.assertFailure(Throwable.class);
}
} finally {
RxJavaPlugins.reset();
}
assertFalse(errors.isEmpty());
}
}
@Test
public void onCompleteRace() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
Flowable<Object> source = Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
final FlowableEmitter<Object> f = e.serialize();
Runnable r1 = new Runnable() {
@Override
public void run() {
f.onComplete();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
f.onComplete();
}
};
TestHelper.race(r1, r2);
}
}, m);
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
source
.test()
.assertResult();
}
}
}
@Test
public void nullValue() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
e.onNext(null);
}
}, m)
.test()
.assertFailure(NullPointerException.class);
}
}
@Test
public void nullThrowable() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
System.out.println(m);
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
e.onError(null);
}
}, m)
.test()
.assertFailure(NullPointerException.class);
}
}
@Test
public void serializedConcurrentOnNextOnError() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
final FlowableEmitter<Object> f = e.serialize();
Runnable r1 = new Runnable() {
@Override
public void run() {
for (int i = 0; i < 1000; i++) {
f.onNext(1);
}
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
for (int i = 0; i < 100; i++) {
f.onNext(1);
}
f.onError(new TestException());
}
};
TestHelper.race(r1, r2);
}
}, m)
.to(TestHelper.<Object>testConsumer())
.assertSubscribed()
.assertNotComplete()
.assertError(TestException.class);
}
}
@Test
public void callbackThrows() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
throw new TestException();
}
}, m)
.test()
.assertFailure(TestException.class);
}
}
@Test
public void nullValueSync() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
e.serialize().onNext(null);
}
}, m)
.test()
.assertFailure(NullPointerException.class);
}
}
@Test
public void createNullValue() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, m)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
}
@Test
public void onErrorCrash() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
Disposable d = Disposable.empty();
e.setDisposable(d);
try {
e.onError(new IOException());
fail("Should have thrown");
} catch (TestException ex) {
// expected
}
assertTrue(d.isDisposed());
}
}, m)
.subscribe(new FlowableSubscriber<Object>() {
@Override
public void onSubscribe(Subscription s) {
}
@Override
public void onNext(Object value) {
}
@Override
public void onError(Throwable e) {
throw new TestException();
}
@Override
public void onComplete() {
}
});
}
}
@Test
public void onCompleteCrash() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
Disposable d = Disposable.empty();
e.setDisposable(d);
try {
e.onComplete();
fail("Should have thrown");
} catch (TestException ex) {
// expected
}
assertTrue(d.isDisposed());
}
}, m)
.subscribe(new FlowableSubscriber<Object>() {
@Override
public void onSubscribe(Subscription s) {
}
@Override
public void onNext(Object value) {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onComplete() {
throw new TestException();
}
});
}
}
@Test
public void createNullValueSerialized() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Throwable[] error = { null };
Flowable.create(new FlowableOnSubscribe<Integer>() {
@Override
public void subscribe(FlowableEmitter<Integer> e) throws Exception {
e = e.serialize();
try {
e.onNext(null);
e.onNext(1);
e.onError(new TestException());
e.onComplete();
} catch (Throwable ex) {
error[0] = ex;
}
}
}, m)
.test()
.assertFailure(NullPointerException.class);
assertNull(error[0]);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
}
@Test
public void nullThrowableSync() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
e.serialize().onError(null);
}
}, m)
.test()
.assertFailure(NullPointerException.class);
}
}
@Test
public void serializedConcurrentOnNext() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
final FlowableEmitter<Object> f = e.serialize();
Runnable r1 = new Runnable() {
@Override
public void run() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
f.onNext(1);
}
}
};
TestHelper.race(r1, r1);
}
}, m)
.take(TestHelper.RACE_DEFAULT_LOOPS)
.to(TestHelper.<Object>testConsumer())
.assertSubscribed()
.assertValueCount(TestHelper.RACE_DEFAULT_LOOPS)
.assertComplete()
.assertNoErrors();
}
}
@Test
public void serializedConcurrentOnNextOnComplete() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
TestSubscriberEx<Object> ts = Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
final FlowableEmitter<Object> f = e.serialize();
Runnable r1 = new Runnable() {
@Override
public void run() {
for (int i = 0; i < 1000; i++) {
f.onNext(1);
}
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
for (int i = 0; i < 100; i++) {
f.onNext(1);
}
f.onComplete();
}
};
TestHelper.race(r1, r2);
}
}, m)
.to(TestHelper.<Object>testConsumer())
.assertSubscribed()
.assertComplete()
.assertNoErrors();
int c = ts.values().size();
assertTrue("" + c, c >= 100);
}
}
@Test
public void serialized() {
for (BackpressureStrategy m : BackpressureStrategy.values()) {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
FlowableEmitter<Object> f = e.serialize();
assertSame(f, f.serialize());
assertFalse(f.isCancelled());
final int[] calls = { 0 };
f.setCancellable(new Cancellable() {
@Override
public void cancel() throws Exception {
calls[0]++;
}
});
e.onComplete();
assertTrue(f.isCancelled());
assertEquals(1, calls[0]);
}
}, m)
.test()
.assertResult();
assertTrue(errors.toString(), errors.isEmpty());
} finally {
RxJavaPlugins.reset();
}
}
}
@Test
public void tryOnError() {
for (BackpressureStrategy strategy : BackpressureStrategy.values()) {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Boolean[] response = { null };
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
e.onNext(1);
response[0] = e.tryOnError(new TestException());
}
}, strategy)
.take(1)
.test()
.withTag(strategy.toString())
.assertResult(1);
assertFalse(response[0]);
assertTrue(strategy + ": " + errors.toString(), errors.isEmpty());
} finally {
RxJavaPlugins.reset();
}
}
}
@Test
public void tryOnErrorSerialized() {
for (BackpressureStrategy strategy : BackpressureStrategy.values()) {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final Boolean[] response = { null };
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> e) throws Exception {
e = e.serialize();
e.onNext(1);
response[0] = e.tryOnError(new TestException());
}
}, strategy)
.take(1)
.test()
.withTag(strategy.toString())
.assertResult(1);
assertFalse(response[0]);
assertTrue(strategy + ": " + errors.toString(), errors.isEmpty());
} finally {
RxJavaPlugins.reset();
}
}
}
@SuppressWarnings("rawtypes")
@Test
public void emittersHasToString() {
Map<BackpressureStrategy, Class<? extends FlowableEmitter>> emitterMap =
new HashMap<>();
emitterMap.put(BackpressureStrategy.MISSING, FlowableCreate.MissingEmitter.class);
emitterMap.put(BackpressureStrategy.ERROR, FlowableCreate.ErrorAsyncEmitter.class);
emitterMap.put(BackpressureStrategy.DROP, FlowableCreate.DropAsyncEmitter.class);
emitterMap.put(BackpressureStrategy.LATEST, FlowableCreate.LatestAsyncEmitter.class);
emitterMap.put(BackpressureStrategy.BUFFER, FlowableCreate.BufferAsyncEmitter.class);
for (final Map.Entry<BackpressureStrategy, Class<? extends FlowableEmitter>> entry : emitterMap.entrySet()) {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> emitter) throws Exception {
assertTrue(emitter.toString().contains(entry.getValue().getSimpleName()));
assertTrue(emitter.serialize().toString().contains(entry.getValue().getSimpleName()));
}
}, entry.getKey()).test().assertEmpty();
}
}
@Test
public void serializedMissingMoreWorkWithComplete() {
AtomicReference<FlowableEmitter<Integer>> ref = new AtomicReference<>();
Flowable.<Integer>create(emitter -> {
emitter = emitter.serialize();
ref.set(emitter);
assertEquals(Long.MAX_VALUE, emitter.requested());
emitter.onNext(1);
}, BackpressureStrategy.MISSING)
.doOnNext(v -> {
if (v == 1) {
ref.get().onNext(2);
ref.get().onComplete();
}
})
.test()
.assertResult(1, 2);
}
@Test
public void badRequest() {
TestHelper.assertBadRequestReported(Flowable.create(e -> { }, BackpressureStrategy.BUFFER));
}
@Test
public void tryOnErrorNull() {
Flowable.create(emitter -> emitter.tryOnError(null), BackpressureStrategy.MISSING)
.test()
.assertFailure(NullPointerException.class);
}
@Test
public void serializedCompleteOnNext() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
Flowable.<Integer>create(emitter -> {
emitter = emitter.serialize();
emitter.onComplete();
emitter.onNext(1);
}, BackpressureStrategy.MISSING)
.subscribe(ts);
ts.assertResult();
}
@Test
public void serializedCancelOnNext() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
Flowable.<Integer>create(emitter -> {
emitter = emitter.serialize();
ts.cancel();
emitter.onNext(1);
}, BackpressureStrategy.MISSING)
.subscribe(ts);
ts.assertEmpty();
}
}
|
FlowableCreateTest
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/kstream/internals/foreignkeyjoin/CombinedKeySchemaTest.java
|
{
"start": 1160,
"end": 3567
}
|
class ____ {
@Test
public void nonNullPrimaryKeySerdeTest() {
final CombinedKeySchema<String, Integer> cks = new CombinedKeySchema<>(
() -> "fkTopic", Serdes.String(),
() -> "pkTopic", Serdes.Integer()
);
final Integer primary = -999;
final Bytes result = cks.toBytes("foreignKey", primary);
final CombinedKey<String, Integer> deserializedKey = cks.fromBytes(result);
assertEquals("foreignKey", deserializedKey.foreignKey());
assertEquals(primary, deserializedKey.primaryKey());
}
@Test
public void nullPrimaryKeySerdeTest() {
final CombinedKeySchema<String, Integer> cks = new CombinedKeySchema<>(
() -> "fkTopic", Serdes.String(),
() -> "pkTopic", Serdes.Integer()
);
assertThrows(NullPointerException.class, () -> cks.toBytes("foreignKey", null));
}
@Test
public void nullForeignKeySerdeTest() {
final CombinedKeySchema<String, Integer> cks = new CombinedKeySchema<>(
() -> "fkTopic", Serdes.String(),
() -> "pkTopic", Serdes.Integer()
);
assertThrows(NullPointerException.class, () -> cks.toBytes(null, 10));
}
@Test
public void prefixKeySerdeTest() {
final CombinedKeySchema<String, Integer> cks = new CombinedKeySchema<>(
() -> "fkTopic", Serdes.String(),
() -> "pkTopic", Serdes.Integer()
);
final String foreignKey = "someForeignKey";
final byte[] foreignKeySerializedData =
Serdes.String().serializer().serialize("fkTopic", foreignKey);
final Bytes prefix = cks.prefixBytes(foreignKey);
final ByteBuffer buf = ByteBuffer.allocate(Integer.BYTES + foreignKeySerializedData.length);
buf.putInt(foreignKeySerializedData.length);
buf.put(foreignKeySerializedData);
final Bytes expectedPrefixBytes = Bytes.wrap(buf.array());
assertEquals(expectedPrefixBytes, prefix);
}
@Test
public void nullPrefixKeySerdeTest() {
final CombinedKeySchema<String, Integer> cks = new CombinedKeySchema<>(
() -> "fkTopic", Serdes.String(),
() -> "pkTopic", Serdes.Integer()
);
final String foreignKey = null;
assertThrows(NullPointerException.class, () -> cks.prefixBytes(foreignKey));
}
}
|
CombinedKeySchemaTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/SetUnrecognizedTest.java
|
{
"start": 2372,
"end": 2549
}
|
class ____ {
void test() {
TestProto3Message.newBuilder().setMyString("");
}
}
""")
.doTest();
}
}
|
Test
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/heartbeat/DefaultHeartbeatMonitor.java
|
{
"start": 1487,
"end": 6493
}
|
class ____<O> implements HeartbeatMonitor<O>, Runnable {
private static final Logger LOG = LoggerFactory.getLogger(DefaultHeartbeatMonitor.class);
/** Resource ID of the monitored heartbeat target. */
private final ResourceID resourceID;
/** Associated heartbeat target. */
private final HeartbeatTarget<O> heartbeatTarget;
private final ScheduledExecutor scheduledExecutor;
/** Listener which is notified about heartbeat timeouts. */
private final HeartbeatListener<?, ?> heartbeatListener;
/** Maximum heartbeat timeout interval. */
private final long heartbeatTimeoutIntervalMs;
private final int failedRpcRequestsUntilUnreachable;
private volatile ScheduledFuture<?> futureTimeout;
private final AtomicReference<State> state = new AtomicReference<>(State.RUNNING);
private final AtomicInteger numberFailedRpcRequestsSinceLastSuccess = new AtomicInteger(0);
private volatile long lastHeartbeat;
DefaultHeartbeatMonitor(
ResourceID resourceID,
HeartbeatTarget<O> heartbeatTarget,
ScheduledExecutor scheduledExecutor,
HeartbeatListener<?, O> heartbeatListener,
long heartbeatTimeoutIntervalMs,
int failedRpcRequestsUntilUnreachable) {
this.resourceID = Preconditions.checkNotNull(resourceID);
this.heartbeatTarget = Preconditions.checkNotNull(heartbeatTarget);
this.scheduledExecutor = Preconditions.checkNotNull(scheduledExecutor);
this.heartbeatListener = Preconditions.checkNotNull(heartbeatListener);
Preconditions.checkArgument(
heartbeatTimeoutIntervalMs > 0L,
"The heartbeat timeout interval has to be larger than 0.");
this.heartbeatTimeoutIntervalMs = heartbeatTimeoutIntervalMs;
Preconditions.checkArgument(
failedRpcRequestsUntilUnreachable > 0
|| failedRpcRequestsUntilUnreachable
== HeartbeatManagerOptions.FAILED_RPC_DETECTION_DISABLED,
"The number of failed heartbeat RPC requests has to be larger than 0 or -1 (deactivated).");
this.failedRpcRequestsUntilUnreachable = failedRpcRequestsUntilUnreachable;
lastHeartbeat = 0L;
resetHeartbeatTimeout(heartbeatTimeoutIntervalMs);
}
@Override
public HeartbeatTarget<O> getHeartbeatTarget() {
return heartbeatTarget;
}
@Override
public ResourceID getHeartbeatTargetId() {
return resourceID;
}
@Override
public long getLastHeartbeat() {
return lastHeartbeat;
}
@Override
public void reportHeartbeatRpcFailure() {
final int failedRpcRequestsSinceLastSuccess =
numberFailedRpcRequestsSinceLastSuccess.incrementAndGet();
if (isHeartbeatRpcFailureDetectionEnabled()
&& failedRpcRequestsSinceLastSuccess >= failedRpcRequestsUntilUnreachable) {
if (state.compareAndSet(State.RUNNING, State.UNREACHABLE)) {
LOG.debug(
"Mark heartbeat target {} as unreachable because {} consecutive heartbeat RPCs have failed.",
resourceID,
failedRpcRequestsSinceLastSuccess);
cancelTimeout();
heartbeatListener.notifyTargetUnreachable(resourceID);
}
}
}
private boolean isHeartbeatRpcFailureDetectionEnabled() {
return failedRpcRequestsUntilUnreachable > 0;
}
@Override
public void reportHeartbeatRpcSuccess() {
numberFailedRpcRequestsSinceLastSuccess.set(0);
}
@Override
public void reportHeartbeat() {
lastHeartbeat = System.currentTimeMillis();
resetHeartbeatTimeout(heartbeatTimeoutIntervalMs);
}
@Override
public void cancel() {
// we can only cancel if we are in state running
if (state.compareAndSet(State.RUNNING, State.CANCELED)) {
cancelTimeout();
}
}
@Override
public void run() {
// The heartbeat has timed out if we're in state running
if (state.compareAndSet(State.RUNNING, State.TIMEOUT)) {
heartbeatListener.notifyHeartbeatTimeout(resourceID);
}
}
public boolean isCanceled() {
return state.get() == State.CANCELED;
}
void resetHeartbeatTimeout(long heartbeatTimeout) {
if (state.get() == State.RUNNING) {
cancelTimeout();
futureTimeout =
scheduledExecutor.schedule(this, heartbeatTimeout, TimeUnit.MILLISECONDS);
// Double check for concurrent accesses (e.g. a firing of the scheduled future)
if (state.get() != State.RUNNING) {
cancelTimeout();
}
}
}
private void cancelTimeout() {
if (futureTimeout != null) {
futureTimeout.cancel(true);
}
}
private
|
DefaultHeartbeatMonitor
|
java
|
spring-projects__spring-boot
|
core/spring-boot-testcontainers/src/dockerTest/java/org/springframework/boot/testcontainers/lifecycle/TestcontainersParallelStartupIntegrationTests.java
|
{
"start": 2167,
"end": 2543
}
|
class ____ {
@Bean
static PostgreSQLContainer container1() {
return TestImage.container(PostgreSQLContainer.class);
}
@Bean
static PostgreSQLContainer container2() {
return TestImage.container(PostgreSQLContainer.class);
}
@Bean
static PostgreSQLContainer container3() {
return TestImage.container(PostgreSQLContainer.class);
}
}
}
|
ContainerConfig
|
java
|
spring-projects__spring-boot
|
module/spring-boot-servlet/src/main/java/org/springframework/boot/servlet/autoconfigure/ServletEncodingProperties.java
|
{
"start": 2874,
"end": 3006
}
|
enum ____ {
/**
* HTTP request message.
*/
REQUEST,
/**
* HTTP response message.
*/
RESPONSE
}
}
|
HttpMessageType
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/common/security/authenticator/SaslAuthenticatorTest.java
|
{
"start": 136801,
"end": 140936
}
|
class ____ implements AuthenticateCallbackHandler {
private static final OAuthBearerUnsecuredLoginCallbackHandler DELEGATE = new OAuthBearerUnsecuredLoginCallbackHandler();
private static final String QUOTE = "\"";
private static int numInvocations = 0;
@Override
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
DELEGATE.handle(callbacks);
// now change any returned token to have a different principal name
for (Callback callback : callbacks) {
if (callback instanceof OAuthBearerTokenCallback) {
OAuthBearerTokenCallback oauthBearerTokenCallback = (OAuthBearerTokenCallback) callback;
OAuthBearerToken token = oauthBearerTokenCallback.token();
if (token != null) {
String changedPrincipalNameToUse = token.principalName()
+ ++numInvocations;
String headerJson = "{" + claimOrHeaderJsonText("alg", "none") + "}";
/*
* If we're testing large expiration scenario, use a large lifetime.
* Otherwise, use a short lifetime so the background refresh thread replaces it before we
* re-authenticate
*/
String lifetimeSecondsValueToUse = needLargeExpiration ? String.valueOf(Long.MAX_VALUE) : "1";
String claimsJson;
try {
claimsJson = String.format("{%s,%s,%s}",
expClaimText(Long.parseLong(lifetimeSecondsValueToUse)),
claimOrHeaderJsonText("iat", time.milliseconds() / 1000.0),
claimOrHeaderJsonText("sub", changedPrincipalNameToUse));
} catch (NumberFormatException e) {
throw new OAuthBearerConfigException(e.getMessage());
}
try {
Encoder urlEncoderNoPadding = Base64.getUrlEncoder().withoutPadding();
OAuthBearerUnsecuredJws jws = new OAuthBearerUnsecuredJws(String.format("%s.%s.",
urlEncoderNoPadding.encodeToString(headerJson.getBytes(StandardCharsets.UTF_8)),
urlEncoderNoPadding
.encodeToString(claimsJson.getBytes(StandardCharsets.UTF_8))),
"sub", "scope");
oauthBearerTokenCallback.token(jws);
} catch (OAuthBearerIllegalTokenException e) {
// occurs if the principal claim doesn't exist or has an empty value
throw new OAuthBearerConfigException(e.getMessage(), e);
}
}
}
}
}
private static String claimOrHeaderJsonText(String claimName, String claimValue) {
return QUOTE + claimName + QUOTE + ":" + QUOTE + claimValue + QUOTE;
}
private static String claimOrHeaderJsonText(String claimName, Number claimValue) {
return QUOTE + claimName + QUOTE + ":" + claimValue;
}
private static String expClaimText(long lifetimeSeconds) {
return claimOrHeaderJsonText("exp", time.milliseconds() / 1000.0 + lifetimeSeconds);
}
@Override
public void configure(Map<String, ?> configs, String saslMechanism,
List<AppConfigurationEntry> jaasConfigEntries) {
DELEGATE.configure(configs, saslMechanism, jaasConfigEntries);
}
@Override
public void close() {
DELEGATE.close();
}
}
/*
* Define a channel builder that starts with the DIGEST-MD5 mechanism and then
* switches to the PLAIN mechanism
*/
private static
|
AlternateLoginCallbackHandler
|
java
|
spring-projects__spring-boot
|
module/spring-boot-batch/src/test/java/org/springframework/boot/batch/autoconfigure/BatchJobLauncherAutoConfigurationTests.java
|
{
"start": 8019,
"end": 8561
}
|
class ____ {
@Autowired
private JobRepository jobRepository;
@Bean
Job job() {
AbstractJob job = new AbstractJob() {
@Override
public Collection<String> getStepNames() {
return Collections.emptySet();
}
@Override
public Step getStep(String stepName) {
return mock(Step.class);
}
@Override
protected void doExecute(JobExecution execution) {
execution.setStatus(BatchStatus.COMPLETED);
}
};
job.setJobRepository(this.jobRepository);
return job;
}
}
}
|
JobConfiguration
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsInOutPipelineWithBeanTest.java
|
{
"start": 1577,
"end": 4455
}
|
class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@BindToRegistry("dummyBean")
private final MyDummyBean bean = new MyDummyBean();
@Test
public void testA() {
Object response = template.requestBody("activemq:JmsInOutPipelineWithBeanTest.A", "Hello World");
assertEquals("Hello World,From Bean,From A,From B", response, "Reply");
}
@Test
public void testB() {
Object response = template.requestBody("activemq:JmsInOutPipelineWithBeanTest.B", "Hello World");
assertEquals("Hello World,From A,From Bean,From B", response, "Reply");
}
@Test
public void testC() {
Object response = template.requestBody("activemq:JmsInOutPipelineWithBeanTest.C", "Hello World");
assertEquals("Hello World,From A,From B,From Bean", response, "Reply");
}
@Override
protected String getComponentName() {
return "activemq";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("activemq:JmsInOutPipelineWithBeanTest.A").to("bean:dummyBean")
.to("activemq:JmsInOutPipelineWithBeanTest.dest.a").to("activemq:JmsInOutPipelineWithBeanTest.dest.b");
from("activemq:JmsInOutPipelineWithBeanTest.B").to("activemq:JmsInOutPipelineWithBeanTest.dest.a")
.to("bean:dummyBean").to("activemq:JmsInOutPipelineWithBeanTest.dest.b");
from("activemq:JmsInOutPipelineWithBeanTest.C").to("activemq:JmsInOutPipelineWithBeanTest.dest.a")
.to("activemq:JmsInOutPipelineWithBeanTest.dest.b").to("bean:dummyBean");
from("activemq:JmsInOutPipelineWithBeanTest.dest.a").process(exchange -> {
String body = exchange.getIn().getBody(String.class);
exchange.getMessage().setBody(body + ",From A");
});
from("activemq:JmsInOutPipelineWithBeanTest.dest.b").process(exchange -> {
String body = exchange.getIn().getBody(String.class);
exchange.getMessage().setBody(body + ",From B");
});
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
public static
|
JmsInOutPipelineWithBeanTest
|
java
|
google__guice
|
core/test/com/google/inject/MethodInterceptionTest.java
|
{
"start": 20750,
"end": 21557
}
|
class ____ extends GenericReturn<String> implements RawReturn {}
@Test
public void testInterceptionWithMixedReturnTypes() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(RawReturn.class).to(MixedReturn.class);
bindInterceptor(
Matchers.any(),
Matchers.annotatedWith(Intercept.class),
mi -> {
Object result = mi.proceed();
return result != null ? result : "NULL_RETURN";
});
}
});
RawReturn ret = injector.getInstance(RawReturn.class);
assertEquals("NULL_RETURN", ret.testReturn());
}
|
MixedReturn
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/shuffle/NettyShuffleDescriptor.java
|
{
"start": 1444,
"end": 3420
}
|
class ____ implements ShuffleDescriptor {
private static final long serialVersionUID = 852181945034989215L;
private final ResourceID producerLocation;
private final PartitionConnectionInfo partitionConnectionInfo;
private final ResultPartitionID resultPartitionID;
@Nullable private final List<TierShuffleDescriptor> tierShuffleDescriptors;
public NettyShuffleDescriptor(
ResourceID producerLocation,
PartitionConnectionInfo partitionConnectionInfo,
ResultPartitionID resultPartitionID) {
this(producerLocation, partitionConnectionInfo, resultPartitionID, null);
}
public NettyShuffleDescriptor(
ResourceID producerLocation,
PartitionConnectionInfo partitionConnectionInfo,
ResultPartitionID resultPartitionID,
@Nullable List<TierShuffleDescriptor> tierShuffleDescriptors) {
this.producerLocation = producerLocation;
this.partitionConnectionInfo = partitionConnectionInfo;
this.resultPartitionID = resultPartitionID;
this.tierShuffleDescriptors = tierShuffleDescriptors;
}
public ConnectionID getConnectionId() {
return new ConnectionID(
producerLocation,
partitionConnectionInfo.getAddress(),
partitionConnectionInfo.getConnectionIndex());
}
@Override
public ResultPartitionID getResultPartitionID() {
return resultPartitionID;
}
@Override
public Optional<ResourceID> storesLocalResourcesOn() {
return Optional.of(producerLocation);
}
public boolean isLocalTo(ResourceID consumerLocation) {
return producerLocation.equals(consumerLocation);
}
@Nullable
public List<TierShuffleDescriptor> getTierShuffleDescriptors() {
return tierShuffleDescriptors;
}
/** Information for connection to partition producer for shuffle exchange. */
public
|
NettyShuffleDescriptor
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/runtime/src/main/java/io/quarkus/rest/client/reactive/ClientFormParam.java
|
{
"start": 3314,
"end": 3831
}
|
interface ____ {
/**
* @return the name of the form param.
*/
String name();
/**
* @return the value(s) of the param - or the method to invoke to get the value (surrounded by curly braces).
*/
String[] value();
/**
* @return whether to abort the request if the method to compute the form parameter value throws an exception (true;
* default) or just
* skip this form parameter (false)
*/
boolean required() default true;
}
|
ClientFormParam
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/filter/UrlHandlerFilter.java
|
{
"start": 2352,
"end": 3976
}
|
class ____ extends OncePerRequestFilter {
private static final Log logger = LogFactory.getLog(UrlHandlerFilter.class);
private final MultiValueMap<Handler, PathPattern> handlers;
private UrlHandlerFilter(MultiValueMap<Handler, PathPattern> handlers) {
this.handlers = new LinkedMultiValueMap<>(handlers);
}
@Override
protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain chain)
throws ServletException, IOException {
RequestPath path = (ServletRequestPathUtils.hasParsedRequestPath(request) ?
ServletRequestPathUtils.getParsedRequestPath(request) :
ServletRequestPathUtils.parse(request));
for (Map.Entry<Handler, List<PathPattern>> entry : this.handlers.entrySet()) {
if (!entry.getKey().supports(request, path)) {
continue;
}
for (PathPattern pattern : entry.getValue()) {
if (pattern.matches(path)) {
entry.getKey().handle(request, response, chain);
return;
}
}
}
chain.doFilter(request, response);
}
/**
* Create a builder by adding a handler for URL's with a trailing slash.
* @param pathPatterns path patterns to map the handler to, for example,
* <code>"/path/*"</code>, <code>"/path/**"</code>,
* <code>"/path/foo/"</code>.
* @return a spec to configure the trailing slash handler with
* @see Builder#trailingSlashHandler(String...)
*/
public static Builder.TrailingSlashSpec trailingSlashHandler(String... pathPatterns) {
return new DefaultBuilder().trailingSlashHandler(pathPatterns);
}
/**
* Builder for {@link UrlHandlerFilter}.
*/
public
|
UrlHandlerFilter
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/ser/std/StdContainerSerializer.java
|
{
"start": 217,
"end": 522
}
|
class ____ serializers used for serializing
* types that contain element(s) of other types, such as arrays,
* {@link java.util.Collection}s (<code>Lists</code>, <code>Sets</code>
* etc) and {@link java.util.Map}s and iterable things
* ({@link java.util.Iterator}s).
*<p>
* NOTE: in Jackson 2.x, this
|
for
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotStartWith_Test.java
|
{
"start": 1347,
"end": 3330
}
|
class ____ extends StringsBaseTest {
@Test
void should_fail_if_actual_starts_with_prefix() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> strings.assertDoesNotStartWith(someInfo(), "Yoda", "Yo"))
.withMessage(shouldNotStartWith("Yoda", "Yo").create());
}
@Test
void should_throw_error_if_prefix_is_null() {
assertThatNullPointerException().isThrownBy(() -> strings.assertDoesNotStartWith(someInfo(), "Yoda", null))
.withMessage("The given prefix should not be null");
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> strings.assertDoesNotStartWith(someInfo(), null, "Yoda"))
.withMessage(actualIsNull());
}
@Test
void should_pass_if_actual_does_not_start_with_prefix() {
strings.assertDoesNotStartWith(someInfo(), "Yoda", "Luke");
strings.assertDoesNotStartWith(someInfo(), "Yoda", "YO");
}
@Test
void should_pass_if_actual_does_not_start_with_prefix_according_to_custom_comparison_strategy() {
stringsWithCaseInsensitiveComparisonStrategy.assertDoesNotStartWith(someInfo(), "Yoda", "Luke");
}
@Test
void should_fail_if_actual_starts_with_prefix_according_to_custom_comparison_strategy() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> stringsWithCaseInsensitiveComparisonStrategy.assertDoesNotStartWith(someInfo(),
"Yoda",
"yODA"))
.withMessage(shouldNotStartWith("Yoda", "yODA", comparisonStrategy).create());
}
}
|
Strings_assertDoesNotStartWith_Test
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/FormMapTest.java
|
{
"start": 1632,
"end": 2307
}
|
class ____ {
@POST
@Consumes("application/x-www-form-urlencoded")
public String response(MultivaluedMap<String, String> all) {
StringBuilder sb = new StringBuilder();
boolean isFirst = true;
List<String> keys = new ArrayList<>(all.keySet());
Collections.sort(keys);
for (var key : keys) {
if (!isFirst) {
sb.append("-");
}
isFirst = false;
sb.append(key).append("=").append(String.join(",", all.get(key)));
}
return sb.toString();
}
}
@Path("/test")
public
|
Resource
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/TypeConverter.java
|
{
"start": 1081,
"end": 5278
}
|
interface ____ {
Object MISS_VALUE = Void.TYPE;
/**
* Whether the type converter allows returning null as a valid response.
* <p/>
* By default <tt>null</tt> is not a valid response, returning <tt>false</tt> from this method.
*/
boolean allowNull();
/**
* Converts the value to the specified type
*
* @param type the requested type
* @param value the value to be converted
* @return the converted value, or <tt>null</tt> if not possible to convert
* @throws TypeConversionException is thrown if error during type conversion
*/
<T> T convertTo(Class<T> type, Object value) throws TypeConversionException;
/**
* Converts the value to the specified type in the context of an exchange
* <p/>
* Used when conversion requires extra information from the current exchange (such as encoding).
*
* @param type the requested type
* @param exchange the current exchange
* @param value the value to be converted
* @return the converted value, or <tt>null</tt> if not possible to convert
* @throws TypeConversionException is thrown if error during type conversion
*/
<T> T convertTo(Class<T> type, Exchange exchange, Object value) throws TypeConversionException;
/**
* Converts the value to the specified type
*
* @param type the requested type
* @param value the value to be converted
* @return the converted value, is never <tt>null</tt>
* @throws TypeConversionException is thrown if error during type conversion
* @throws NoTypeConversionAvailableException if no type converters exists to convert to the given type
*/
<T> T mandatoryConvertTo(Class<T> type, Object value) throws TypeConversionException, NoTypeConversionAvailableException;
/**
* Converts the value to the specified type in the context of an exchange
* <p/>
* Used when conversion requires extra information from the current exchange (such as encoding).
*
* @param type the requested type
* @param exchange the current exchange
* @param value the value to be converted
* @return the converted value, is never <tt>null</tt>
* @throws TypeConversionException is thrown if error during type conversion
* @throws NoTypeConversionAvailableException if no type converters exists to convert to the given type
*/
<T> T mandatoryConvertTo(Class<T> type, Exchange exchange, Object value)
throws TypeConversionException, NoTypeConversionAvailableException;
/**
* Tries to convert the value to the specified type, returning <tt>null</tt> if not possible to convert.
* <p/>
* This method will <b>not</b> throw an exception if an exception occurred during conversion.
*
* @param type the requested type
* @param value the value to be converted
* @return the converted value, or <tt>null</tt> if not possible to convert
*/
<T> T tryConvertTo(Class<T> type, Object value);
/**
* Tries to convert the value to the specified type in the context of an exchange, returning <tt>null</tt> if not
* possible to convert.
* <p/>
* This method will <b>not</b> throw an exception if an exception occurred during conversion. Converts the value to
* the specified type in the context of an exchange
* <p/>
* Used when conversion requires extra information from the current exchange (such as encoding).
*
* @param type the requested type
* @param exchange the current exchange
* @param value the value to be converted
* @return the converted value, or <tt>null</tt> if not possible to convert
*/
<T> T tryConvertTo(Class<T> type, Exchange exchange, Object value);
}
|
TypeConverter
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/atomic/integerarray/AtomicIntegerArrayAssert_hasSizeLessThan_Test.java
|
{
"start": 828,
"end": 1188
}
|
class ____ extends AtomicIntegerArrayAssertBaseTest {
@Override
protected AtomicIntegerArrayAssert invoke_api_method() {
return assertions.hasSizeLessThan(6);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertHasSizeLessThan(getInfo(assertions), internalArray(), 6);
}
}
|
AtomicIntegerArrayAssert_hasSizeLessThan_Test
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/spi/file/FileResolver.java
|
{
"start": 1333,
"end": 1988
}
|
interface ____ extends Closeable {
/**
* Create a file resolver.
*
* @param options the fs options
* @return the file resolver
*/
static FileResolver fileResolver(FileSystemOptions options) {
return new FileResolverImpl(options);
}
/**
* Resolve the file for the specified {@code fileName}.
*
* This method should never return {@code null}, it can return a file that does not exist.
*
* @param fileName the name to resolve
* @return a file resolved
*/
File resolve(String fileName);
/**
* Close this file resolver, this is a blocking operation.
*/
void close() throws IOException;
}
|
FileResolver
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/configuration/ConfigurationClassProcessingTests.java
|
{
"start": 19791,
"end": 20137
}
|
class ____ {
@Resource(name = "adaptive1")
public TestBean adaptiveInjectionPoint1;
public TestBean adaptiveInjectionPoint2;
@Resource(name = "adaptive2")
public void setAdaptiveInjectionPoint2(TestBean adaptiveInjectionPoint2) {
this.adaptiveInjectionPoint2 = adaptiveInjectionPoint2;
}
}
static
|
AdaptiveResourceInjectionPoints
|
java
|
apache__camel
|
components/camel-saxon/src/main/java/org/apache/camel/component/xquery/XQueryEndpoint.java
|
{
"start": 1823,
"end": 8542
}
|
class ____ extends ProcessorEndpoint {
private static final Logger LOG = LoggerFactory.getLogger(XQueryEndpoint.class);
private volatile XQueryBuilder xquery;
@UriPath
@Metadata(required = true)
private String resourceUri;
@UriParam(label = "advanced")
private Configuration configuration;
@UriParam(label = "advanced")
private Map<String, Object> configurationProperties = new HashMap<>();
@UriParam(label = "advanced")
private StaticQueryContext staticQueryContext;
@UriParam(label = "advanced")
private Map<String, Object> parameters = new HashMap<>();
@UriParam
private Map<String, String> namespacePrefixes = new HashMap<>();
@UriParam(defaultValue = "DOM")
private ResultFormat resultsFormat = ResultFormat.DOM;
@UriParam(label = "advanced")
private Properties properties = new Properties();
@UriParam
private Class<?> resultType;
@UriParam(defaultValue = "true")
private boolean stripsAllWhiteSpace = true;
@UriParam(label = "advanced")
private ModuleURIResolver moduleURIResolver;
@UriParam
private boolean allowStAX;
@UriParam
private String source;
public XQueryEndpoint(String endpointUri, Component component) {
super(endpointUri, component);
}
@Override
public boolean isRemote() {
return false;
}
public String getResourceUri() {
return resourceUri;
}
/**
* The name of the template to load from classpath or file system
*/
public void setResourceUri(String resourceUri) {
this.resourceUri = resourceUri;
}
public Configuration getConfiguration() {
return configuration;
}
/**
* To use a custom Saxon configuration
*/
public void setConfiguration(Configuration configuration) {
this.configuration = configuration;
}
public Map<String, Object> getConfigurationProperties() {
return configurationProperties;
}
/**
* To set custom Saxon configuration properties
*/
public void setConfigurationProperties(Map<String, Object> configurationProperties) {
this.configurationProperties = configurationProperties;
}
public StaticQueryContext getStaticQueryContext() {
return staticQueryContext;
}
/**
* To use a custom Saxon StaticQueryContext
*/
public void setStaticQueryContext(StaticQueryContext staticQueryContext) {
this.staticQueryContext = staticQueryContext;
}
public Map<String, Object> getParameters() {
return parameters;
}
/**
* Additional parameters
*/
public void setParameters(Map<String, Object> parameters) {
this.parameters = parameters;
}
public Map<String, String> getNamespacePrefixes() {
return namespacePrefixes;
}
/**
* Allows to control which namespace prefixes to use for a set of namespace mappings
*/
public void setNamespacePrefixes(Map<String, String> namespacePrefixes) {
this.namespacePrefixes = namespacePrefixes;
}
public ResultFormat getResultsFormat() {
return resultsFormat;
}
/**
* What output result to use
*/
public void setResultsFormat(ResultFormat resultsFormat) {
this.resultsFormat = resultsFormat;
}
public Properties getProperties() {
return properties;
}
/**
* Properties to configure the serialization parameters
*/
public void setProperties(Properties properties) {
this.properties = properties;
}
public Class<?> getResultType() {
return resultType;
}
/**
* What output result to use defined as a class
*/
public void setResultType(Class<?> resultType) {
this.resultType = resultType;
}
public boolean isStripsAllWhiteSpace() {
return stripsAllWhiteSpace;
}
/**
* Whether to strip all whitespaces
*/
public void setStripsAllWhiteSpace(boolean stripsAllWhiteSpace) {
this.stripsAllWhiteSpace = stripsAllWhiteSpace;
}
public ModuleURIResolver getModuleURIResolver() {
return moduleURIResolver;
}
/**
* To use the custom {@link ModuleURIResolver}
*/
public void setModuleURIResolver(ModuleURIResolver moduleURIResolver) {
this.moduleURIResolver = moduleURIResolver;
}
public boolean isAllowStAX() {
return allowStAX;
}
/**
* Whether to allow using StAX mode
*/
public void setAllowStAX(boolean allowStAX) {
this.allowStAX = allowStAX;
}
public String getSource() {
return source;
}
/**
* Source to use, instead of message body. You can prefix with variable:, header:, or property: to specify kind of
* source. Otherwise, the source is assumed to be a variable. Use empty or null to use default source, which is the
* message body.
*/
public void setSource(String source) {
this.source = source;
}
@Override
protected void doInit() throws Exception {
super.doInit();
if (ResourceHelper.isClasspathUri(resourceUri)) {
doInitXQuery();
}
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (!ResourceHelper.isClasspathUri(resourceUri)) {
doInitXQuery();
}
ServiceHelper.startService(xquery);
}
protected void doInitXQuery() throws Exception {
LOG.debug("{} using schema resource: {}", this, resourceUri);
InputStream is = ResourceHelper.resolveMandatoryResourceAsInputStream(getCamelContext(), resourceUri);
this.xquery = XQueryBuilder.xquery(is);
this.xquery.setConfiguration(getConfiguration());
this.xquery.setConfigurationProperties(getConfigurationProperties());
this.xquery.setStaticQueryContext(getStaticQueryContext());
this.xquery.setParameters(getParameters());
this.xquery.setNamespaces(namespacePrefixes);
this.xquery.setResultsFormat(getResultsFormat());
this.xquery.setProperties(getProperties());
this.xquery.setResultType(getResultType());
this.xquery.setStripsAllWhiteSpace(isStripsAllWhiteSpace());
this.xquery.setAllowStAX(isAllowStAX());
this.xquery.setModuleURIResolver(getModuleURIResolver());
this.xquery.setSource(ExpressionBuilder.singleInputExpression(getSource()));
this.xquery.init(getCamelContext());
setProcessor(xquery);
}
@Override
protected void doStop() throws Exception {
super.doStop();
ServiceHelper.stopService(xquery);
}
}
|
XQueryEndpoint
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/value/nametransformation/CheeseCaseMapper.java
|
{
"start": 443,
"end": 2348
}
|
interface ____ {
CheeseCaseMapper INSTANCE = Mappers.getMapper( CheeseCaseMapper.class );
@EnumMapping(nameTransformationStrategy = MappingConstants.CASE_TRANSFORMATION, configuration = "lower")
CheeseTypeLower mapToLower(CheeseType cheese);
@InheritInverseConfiguration
CheeseType mapToLowerInverse(CheeseTypeLower cheese);
@EnumMapping(nameTransformationStrategy = MappingConstants.CASE_TRANSFORMATION, configuration = "lower")
CheeseTypeLower mapToLower(CheeseTypeCapital cheese);
@EnumMapping(nameTransformationStrategy = MappingConstants.CASE_TRANSFORMATION, configuration = "upper")
CheeseType mapToUpper(CheeseTypeLower cheese);
@EnumMapping(nameTransformationStrategy = MappingConstants.CASE_TRANSFORMATION, configuration = "upper")
CheeseType mapToUpper(CheeseTypeCapital cheese);
@InheritInverseConfiguration(name = "mapToUpper")
CheeseTypeCapital mapToUpperInverse(CheeseType cheese);
@EnumMapping(nameTransformationStrategy = MappingConstants.CASE_TRANSFORMATION, configuration = "capital")
CheeseTypeCapital mapToCapital(CheeseTypeLower cheese);
@EnumMapping(nameTransformationStrategy = MappingConstants.CASE_TRANSFORMATION, configuration = "capital")
CheeseTypeCapital mapToCapital(CheeseType cheese);
@InheritInverseConfiguration(name = "mapToCapital")
CheeseType mapToCapitalInverse(CheeseTypeCapital cheese);
@EnumMapping(nameTransformationStrategy = MappingConstants.CASE_TRANSFORMATION, configuration = "lower")
String mapToLowerString(CheeseType cheese);
@EnumMapping(nameTransformationStrategy = MappingConstants.CASE_TRANSFORMATION, configuration = "upper")
String mapToUpperString(CheeseType cheese);
@EnumMapping(nameTransformationStrategy = MappingConstants.CASE_TRANSFORMATION, configuration = "capital")
String mapToCapitalString(CheeseType cheese);
}
|
CheeseCaseMapper
|
java
|
quarkusio__quarkus
|
extensions/smallrye-openapi/deployment/src/main/java/io/quarkus/smallrye/openapi/deployment/filter/AutoAddOpenApiEndpointFilter.java
|
{
"start": 339,
"end": 579
}
|
class ____ implements OASFilter {
private static final String OPENAPI_TAG = "openapi";
private static final String ENDPOINT_DESCRIPTION = "OpenAPI specification";
private final String path;
private
|
AutoAddOpenApiEndpointFilter
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/util/StagedTimeoutException.java
|
{
"start": 919,
"end": 1216
}
|
class ____ extends TimeoutException {
private final Stage stage;
public StagedTimeoutException(Stage stage) {
super();
this.stage = Objects.requireNonNull(stage, "Stage may not be null");
}
public Stage stage() {
return stage;
}
}
|
StagedTimeoutException
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java
|
{
"start": 7559,
"end": 9303
}
|
class ____ {
static final String SMTP_SETTINGS_PREFIX = "mail.smtp.";
final String name;
final Profile profile;
final Smtp smtp;
final EmailDefaults defaults;
Config(String name, Settings settings, @Nullable SSLSocketFactory sslSocketFactory, Logger logger) {
this.name = name;
profile = Profile.resolve(settings.get("profile"), Profile.STANDARD);
defaults = new EmailDefaults(name, settings.getAsSettings("email_defaults"));
smtp = new Smtp(settings.getAsSettings(SMTP_PROTOCOL));
if (smtp.host == null) {
String msg = "missing required email account setting for account [" + name + "]. 'smtp.host' must be configured";
throw new SettingsException(msg);
}
if (sslSocketFactory != null) {
String sslKeys = smtp.properties.keySet()
.stream()
.map(String::valueOf)
.filter(key -> key.startsWith("mail.smtp.ssl."))
.collect(Collectors.joining(","));
if (sslKeys.isEmpty() == false) {
logger.warn(
"The SMTP SSL settings [{}] that are configured for Account [{}]"
+ " will be ignored due to the notification SSL settings in [{}]",
sslKeys,
name,
EMAIL_NOTIFICATION_SSL_PREFIX
);
}
smtp.setSocketFactory(sslSocketFactory);
}
}
public Session createSession() {
return Session.getInstance(smtp.properties);
}
static
|
Config
|
java
|
quarkusio__quarkus
|
integration-tests/oidc-code-flow/src/test/java/io/quarkus/it/keycloak/TestSecurityLazyAuthTest.java
|
{
"start": 707,
"end": 1599
}
|
class ____ {
@Test
@TestSecurity(user = "user1", roles = "viewer")
public void testWithDummyUser() {
RestAssured.when().get("test-security").then()
.body(is("user1:user1:user1:user1"));
}
@Test
@TestSecurityMetaAnnotation
public void testJwtWithDummyUser() {
RestAssured.when().get("test-security-oidc").then()
.body(is("userOidc:userOidc:userOidc:userOidc:viewer:user@gmail.com:subject:aud"));
}
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.METHOD })
@TestSecurity(user = "userOidc", roles = "viewer")
@OidcSecurity(claims = {
@Claim(key = "email", value = "user@gmail.com")
}, userinfo = {
@UserInfo(key = "sub", value = "subject")
}, config = {
@ConfigMetadata(key = "audience", value = "aud")
})
public @
|
TestSecurityLazyAuthTest
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/error/ncd/NoClassDefFoundErrorWrapExceptionTest.java
|
{
"start": 1557,
"end": 3342
}
|
class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Test
public void testNoClassDef() {
try {
template.requestBody("activemq:NoClassDefFoundErrorWrapExceptionTest?transferException=true", "Hello World");
fail("Should throw exception");
} catch (Exception e) {
final String s = ExceptionHelper.stackTraceToString(e);
assertTrue(s.contains("java.lang.LinkageError"));
assertTrue(s.contains("Cannot do this"));
assertTrue(s.contains("org.apache.camel.component.jms.issues.error.ncd.ProcessorFail.process"));
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("activemq:NoClassDefFoundErrorWrapExceptionTest?transferException=true")
.process(new ProcessorA())
.process(new ProcessorB())
.process(new ProcessorFail());
}
};
}
@Override
protected String getComponentName() {
return "activemq";
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
|
NoClassDefFoundErrorWrapExceptionTest
|
java
|
spring-projects__spring-boot
|
buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/build/Builder.java
|
{
"start": 13376,
"end": 14319
}
|
class ____ implements DockerLog {
private final AbstractBuildLog log;
private BuildLogAdapter(AbstractBuildLog log) {
this.log = log;
}
@Override
public void log(String message) {
this.log.log(message);
}
/**
* Creates {@link DockerLog} instance based on the provided {@link BuildLog}.
* <p>
* If the provided {@link BuildLog} instance is an {@link AbstractBuildLog}, the
* method returns a {@link BuildLogAdapter}, otherwise it returns a default
* {@link DockerLog#toSystemOut()}.
* @param log the {@link BuildLog} instance to delegate
* @return a {@link DockerLog} instance for logging
*/
static DockerLog get(BuildLog log) {
if (log instanceof AbstractBuildLog abstractBuildLog) {
return new BuildLogAdapter(abstractBuildLog);
}
return DockerLog.toSystemOut();
}
}
/**
* {@link BuildpackResolverContext} implementation for the {@link Builder}.
*/
private
|
BuildLogAdapter
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_containsAll_Test.java
|
{
"start": 890,
"end": 1293
}
|
class ____ extends AtomicReferenceArrayAssertBaseTest {
@Override
protected AtomicReferenceArrayAssert<Object> invoke_api_method() {
return assertions.containsAll(newArrayList("Yoda", "Luke"));
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertContainsAll(info(), internalArray(), newArrayList("Yoda", "Luke"));
}
}
|
AtomicReferenceArrayAssert_containsAll_Test
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/collectionincompatibletype/AssertSameIncompatibleTest.java
|
{
"start": 2093,
"end": 2492
}
|
class ____ {
void f() {
// BUG: Diagnostic contains:
assertSame("foo", 1L);
}
}
""")
.doTest();
}
@Test
public void assertThat_compatible() {
helper
.addSourceLines(
"Test.java",
"""
import static com.google.common.truth.Truth.assertThat;
|
Test
|
java
|
micronaut-projects__micronaut-core
|
core/src/main/java/io/micronaut/core/cli/CommandLineParser.java
|
{
"start": 1334,
"end": 9285
}
|
class ____ implements CommandLine.Builder<CommandLineParser> {
private static final String DEFAULT_PADDING = " ";
private Map<String, Option> declaredOptions = new HashMap<>();
private int longestOptionNameLength = 0;
private String usageMessage;
/**
* Adds a declared option.
*
* @param name The name of the option
* @param description The description
*/
@Override
public CommandLineParser addOption(String name, String description) {
int length = name.length();
if (length > longestOptionNameLength) {
longestOptionNameLength = length;
}
declaredOptions.put(name, new Option(name, description));
return this;
}
@Override
public CommandLine parseString(String string) {
// Steal ants implementation for argument splitting. Handles quoted arguments with " or '.
// Doesn't handle escape sequences with \
return parse(translateCommandline(string));
}
@Override
public CommandLine parse(String... args) {
DefaultCommandLine cl = createCommandLine();
return parse(cl, args);
}
/**
* Parse the command line entry.
* @param cl commandLine
* @param args args passed in
* @return commandLine
*/
CommandLine parse(DefaultCommandLine cl, String[] args) {
parseInternal(cl, args);
return cl;
}
/**
* Build the options message.
* @return message
*/
public String getOptionsHelpMessage() {
String ls = System.getProperty("line.separator");
usageMessage = "Available options:";
StringBuilder sb = new StringBuilder(usageMessage);
sb.append(ls);
for (Option option : declaredOptions.values()) {
String name = option.getName();
int extraPadding = longestOptionNameLength - name.length();
sb.append(" -").append(name);
for (int i = 0; i < extraPadding; i++) {
sb.append(' ');
}
sb.append(DEFAULT_PADDING).append(option.getDescription()).append(ls);
}
return sb.toString();
}
private void parseInternal(DefaultCommandLine cl, String[] args) {
cl.setRawArguments(args);
String lastOptionName = null;
for (String arg : args) {
if (arg == null) {
continue;
}
String trimmed = arg.trim();
if (StringUtils.isNotEmpty(trimmed)) {
if (trimmed.charAt(0) == '"' && trimmed.charAt(trimmed.length() - 1) == '"') {
trimmed = trimmed.substring(1, trimmed.length() - 1);
}
if (trimmed.charAt(0) == '-') {
lastOptionName = processOption(cl, trimmed);
} else {
if (lastOptionName != null) {
Option opt = declaredOptions.get(lastOptionName);
if (opt != null) {
cl.addDeclaredOption(opt, trimmed);
} else {
cl.addUndeclaredOption(lastOptionName, trimmed);
}
lastOptionName = null;
} else {
cl.addRemainingArg(trimmed);
}
}
}
}
}
/**
* Create a default command line.
* @return commandLine
*/
protected DefaultCommandLine createCommandLine() {
return new DefaultCommandLine();
}
/**
* Process the passed in options.
* @param cl cl
* @param arg arg
* @return argument processed
*/
protected String processOption(DefaultCommandLine cl, String arg) {
if (arg.length() < 2) {
return null;
}
if (arg.charAt(1) == 'D' && arg.contains("=")) {
processSystemArg(cl, arg);
return null;
}
arg = (arg.charAt(1) == '-' ? arg.substring(2, arg.length()) : arg.substring(1, arg.length())).trim();
if (arg.contains("=")) {
String[] split = arg.split("=", 2);
String name = split[0].trim();
validateOptionName(name);
String value = split.length > 1 ? split[1].trim() : "";
if (declaredOptions.containsKey(name)) {
cl.addDeclaredOption(declaredOptions.get(name), value);
} else {
cl.addUndeclaredOption(name, value);
}
return null;
}
validateOptionName(arg);
if (declaredOptions.containsKey(arg)) {
cl.addDeclaredOption(declaredOptions.get(arg));
} else {
cl.addUndeclaredOption(arg);
}
return arg;
}
/**
* Process System property arg.
* @param cl cl
* @param arg system arg
*/
protected void processSystemArg(DefaultCommandLine cl, String arg) {
int i = arg.indexOf('=');
String name = arg.substring(2, i);
String value = arg.substring(i + 1, arg.length());
cl.addSystemProperty(name, value);
}
private void validateOptionName(String name) {
if (name.contains(" ")) {
throw new ParseException("Invalid argument: " + name);
}
}
/**
* Crack a command line.
*
* @param toProcess the command line to process.
* @return the command line broken into strings.
* An empty or null toProcess parameter results in a zero sized array.
*/
static String[] translateCommandline(String toProcess) {
if (toProcess == null || toProcess.isEmpty()) {
//no command? no string
return EMPTY_STRING_ARRAY;
}
// parse with a simple finite state machine
final int normal = 0;
final int inQuote = 1;
final int inDoubleQuote = 2;
int state = normal;
final StringTokenizer tok = new StringTokenizer(toProcess, "\"\' ", true);
final ArrayList<String> result = new ArrayList<>();
final StringBuilder current = new StringBuilder();
boolean lastTokenHasBeenQuoted = false;
while (tok.hasMoreTokens()) {
String nextTok = tok.nextToken();
switch (state) {
case inQuote:
if ("\'".equals(nextTok)) {
lastTokenHasBeenQuoted = true;
state = normal;
} else {
current.append(nextTok);
}
break;
case inDoubleQuote:
if ("\"".equals(nextTok)) {
lastTokenHasBeenQuoted = true;
state = normal;
} else {
current.append(nextTok);
}
break;
default:
if ("\'".equals(nextTok)) {
state = inQuote;
} else if ("\"".equals(nextTok)) {
state = inDoubleQuote;
} else if (" ".equals(nextTok)) {
if (lastTokenHasBeenQuoted || !current.isEmpty()) {
result.add(current.toString());
current.setLength(0);
}
} else {
current.append(nextTok);
}
lastTokenHasBeenQuoted = false;
break;
}
}
if (lastTokenHasBeenQuoted || !current.isEmpty()) {
result.add(current.toString());
}
if (state == inQuote || state == inDoubleQuote) {
throw new ParseException("unbalanced quotes in " + toProcess);
}
return result.toArray(EMPTY_STRING_ARRAY);
}
}
|
CommandLineParser
|
java
|
apache__camel
|
test-infra/camel-test-infra-kafka/src/main/java/org/apache/camel/test/infra/kafka/common/KafkaProperties.java
|
{
"start": 868,
"end": 1480
}
|
class ____ {
public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers";
public static final String KAFKA_ZOOKEEPER_ADDRESS = "kafka.zookeeper.address";
public static final String CONFLUENT_CONTAINER = "confluent.container.image";
public static final String KAFKA_CONTAINER = "kafka.container";
public static final String KAFKA3_CONTAINER = "kafka3.container";
public static final String REDPANDA_CONTAINER = "redpanda.container.image";
public static final String STRIMZI_CONTAINER = "strimzi.container.image";
private KafkaProperties() {
}
}
|
KafkaProperties
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/CoordinatedOperatorFactory.java
|
{
"start": 1008,
"end": 1161
}
|
class ____ the {@link StreamOperator}s implementing {@link
* org.apache.flink.runtime.operators.coordination.OperatorEventHandler}.
*/
@Internal
public
|
for
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cascade/circle/identity/AbstractEntity.java
|
{
"start": 593,
"end": 1879
}
|
class ____ implements Serializable {
@Id
@SequenceGenerator(name = "TIGER_GEN", sequenceName = "TIGER_SEQ")
@GeneratedValue(strategy = GenerationType.IDENTITY, generator = "TIGER_GEN")
private Long id;
@Basic
@Column(unique = true, updatable = false, length = 36, columnDefinition = "char(36)")
private String uuid;
@Column(updatable = false)
private Date created;
public AbstractEntity() {
super();
uuid = SafeRandomUUIDGenerator.safeRandomUUIDAsString();
created = new Date();
}
public Long getId() {
return id;
}
public String getUuid() {
return uuid;
}
public Date getCreated() {
return created;
}
@Override
public int hashCode() {
return uuid == null ? 0 : uuid.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof AbstractEntity ))
return false;
final AbstractEntity other = (AbstractEntity) obj;
if (uuid == null) {
if (other.uuid != null)
return false;
} else if (!uuid.equals(other.uuid))
return false;
return true;
}
public String toString() {
if (id != null) {
return "id: '" + id + "' uuid: '" + uuid + "'";
} else {
return "id: 'transient entity' " + " uuid: '" + uuid + "'";
}
}
}
|
AbstractEntity
|
java
|
quarkusio__quarkus
|
independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/PostConstructPreDestroyInvocationContext.java
|
{
"start": 236,
"end": 984
}
|
class ____ extends LifecycleCallbackInvocationContext {
private final Runnable forward;
PostConstructPreDestroyInvocationContext(Object target, Object[] parameters,
Set<Annotation> bindings, List<InterceptorInvocation> chain, Runnable forward) {
super(target, parameters, bindings, chain);
this.forward = forward;
}
@Override
protected void interceptorChainCompleted() {
if (forward != null) {
forward.run();
}
}
@Override
public Object[] getParameters() {
throw new IllegalStateException();
}
@Override
public void setParameters(Object[] params) {
throw new IllegalStateException();
}
}
|
PostConstructPreDestroyInvocationContext
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java
|
{
"start": 2348,
"end": 8914
}
|
class ____ extends ESTestCase {
private static RawRequestMockWebServer webServer = new RawRequestMockWebServer();
private static final Logger logger = LogManager.getLogger(HttpClientRequestTests.class);
@BeforeClass
public static void init() throws Exception {
webServer.start();
}
@AfterClass
public static void cleanup() {
try {
webServer.close();
} finally {
webServer = null;
}
}
public void testBinaryRequestForCLIEnabled() throws URISyntaxException {
assertBinaryRequestForCLI(XContentType.CBOR);
}
public void testBinaryRequestForCLIDisabled() throws URISyntaxException {
assertBinaryRequestForCLI(XContentType.JSON);
}
public void testBinaryRequestForDriversEnabled() throws URISyntaxException {
assertBinaryRequestForDrivers(XContentType.CBOR);
}
public void testBinaryRequestForDriversDisabled() throws URISyntaxException {
assertBinaryRequestForDrivers(XContentType.JSON);
}
private void assertBinaryRequestForCLI(XContentType xContentType) throws URISyntaxException {
boolean isBinary = XContentType.CBOR == xContentType;
String url = "http://" + webServer.getHostName() + ":" + webServer.getPort();
String query = randomAlphaOfLength(256);
int fetchSize = randomIntBetween(1, 100);
Properties props = new Properties();
props.setProperty(ConnectionConfiguration.BINARY_COMMUNICATION, Boolean.toString(isBinary));
URI uri = new URI(url);
ConnectionConfiguration conCfg = new ConnectionConfiguration(uri, url, props);
HttpClient httpClient = new HttpClient(conCfg);
prepareMockResponse();
try {
httpClient.basicQuery(query, fetchSize, randomBoolean(), randomBoolean());
} catch (SQLException e) {
logger.info("Ignored SQLException", e);
}
assertEquals(1, webServer.requests().size());
RawRequest recordedRequest = webServer.takeRequest();
assertEquals(xContentType.mediaTypeWithoutParameters(), recordedRequest.getHeader("Content-Type"));
assertEquals("POST", recordedRequest.getMethod());
BytesReference bytesRef = recordedRequest.getBodyAsBytes();
Map<String, Object> reqContent = XContentHelper.convertToMap(bytesRef, false, xContentType).v2();
assertTrue(((String) reqContent.get(MODE_NAME)).equalsIgnoreCase(Mode.CLI.toString()));
assertEquals(isBinary, reqContent.get(BINARY_FORMAT_NAME));
assertEquals(Boolean.FALSE, reqContent.get(COLUMNAR_NAME));
assertEquals(fetchSize, reqContent.get(FETCH_SIZE_NAME));
assertEquals(query, reqContent.get(QUERY_NAME));
assertEquals("90000ms", reqContent.get(REQUEST_TIMEOUT_NAME));
assertEquals("45000ms", reqContent.get(PAGE_TIMEOUT_NAME));
assertEquals("Z", reqContent.get(TIME_ZONE_NAME));
prepareMockResponse();
try {
// we don't care what the cursor is, because the ES node that will actually handle the request (as in running an ES search)
// will not see/have access to the "binary_format" response, which is the concern of the first node getting the request
httpClient.nextPage("");
} catch (SQLException e) {
logger.info("Ignored SQLException", e);
}
assertEquals(1, webServer.requests().size());
recordedRequest = webServer.takeRequest();
assertEquals(xContentType.mediaTypeWithoutParameters(), recordedRequest.getHeader("Content-Type"));
assertEquals("POST", recordedRequest.getMethod());
bytesRef = recordedRequest.getBodyAsBytes();
reqContent = XContentHelper.convertToMap(bytesRef, false, xContentType).v2();
assertTrue(((String) reqContent.get(MODE_NAME)).equalsIgnoreCase(Mode.CLI.toString()));
assertEquals(isBinary, reqContent.get(BINARY_FORMAT_NAME));
assertEquals("90000ms", reqContent.get(REQUEST_TIMEOUT_NAME));
assertEquals("45000ms", reqContent.get(PAGE_TIMEOUT_NAME));
}
private void assertBinaryRequestForDrivers(XContentType xContentType) throws URISyntaxException {
boolean isBinary = XContentType.CBOR == xContentType;
String url = "http://" + webServer.getHostName() + ":" + webServer.getPort();
String query = randomAlphaOfLength(256);
Properties props = new Properties();
props.setProperty(ConnectionConfiguration.BINARY_COMMUNICATION, Boolean.toString(isBinary));
URI uri = new URI(url);
ConnectionConfiguration conCfg = new ConnectionConfiguration(uri, url, props);
HttpClient httpClient = new HttpClient(conCfg);
Mode mode = randomFrom(Mode.JDBC, Mode.ODBC);
SqlQueryRequest request = new SqlQueryRequest(
query,
null,
ZoneId.of("Z"),
randomAlphaOfLength(10),
randomIntBetween(1, 100),
TimeValue.timeValueMillis(randomNonNegativeLong()),
TimeValue.timeValueMillis(randomNonNegativeLong()),
randomBoolean(),
randomAlphaOfLength(128),
new RequestInfo(mode, ClientVersion.CURRENT),
randomBoolean(),
randomBoolean(),
isBinary,
randomBoolean()
);
prepareMockResponse();
try {
httpClient.query(request);
} catch (SQLException e) {
logger.info("Ignored SQLException", e);
}
assertEquals(1, webServer.requests().size());
RawRequest recordedRequest = webServer.takeRequest();
assertEquals(xContentType.mediaTypeWithoutParameters(), recordedRequest.getHeader("Content-Type"));
assertEquals("POST", recordedRequest.getMethod());
BytesReference bytesRef = recordedRequest.getBodyAsBytes();
Map<String, Object> reqContent = XContentHelper.convertToMap(bytesRef, false, xContentType).v2();
assertTrue(((String) reqContent.get(MODE_NAME)).equalsIgnoreCase(mode.toString()));
assertEquals(isBinary, reqContent.get(BINARY_FORMAT_NAME));
assertEquals(query, reqContent.get(QUERY_NAME));
assertEquals("Z", reqContent.get(TIME_ZONE_NAME));
}
private void prepareMockResponse() {
webServer.enqueue(new Response().setResponseCode(200).addHeader("Content-Type", "application/json").setBody("{\"rows\":[]}"));
}
@SuppressForbidden(reason = "use http server")
private static
|
HttpClientRequestTests
|
java
|
apache__camel
|
components/camel-telemetry/src/main/java/org/apache/camel/telemetry/Span.java
|
{
"start": 875,
"end": 1026
}
|
interface ____ the Span model used along the tracing component. Each technology has to provide the
* implementation specific details.
*/
public
|
represent
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/deser/jdk/MapDeserializer.java
|
{
"start": 38019,
"end": 38729
}
|
class ____ extends Referring {
private final MapReferringAccumulator _parent;
public final Map<Object, Object> next = new LinkedHashMap<Object, Object>();
public final Object key;
MapReferring(MapReferringAccumulator parent, UnresolvedForwardReference ref,
Class<?> valueType, Object key)
{
super(ref, valueType);
_parent = parent;
this.key = key;
}
@Override
public void handleResolvedForwardReference(DeserializationContext ctxt, Object id, Object value)
throws JacksonException
{
_parent.resolveForwardReference(ctxt, id, value);
}
}
}
|
MapReferring
|
java
|
quarkusio__quarkus
|
extensions/smallrye-metrics/deployment/src/test/java/io/quarkus/smallrye/metrics/registration/DefaultMethodTest.java
|
{
"start": 2585,
"end": 2724
}
|
interface ____ extends A {
default void foo() {
}
}
@Dependent
@Counted(name = "Y", absolute = true)
static
|
X
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/util/KafkaBasedLog.java
|
{
"start": 3210,
"end": 3302
}
|
class ____ with either single- or multi-partition topics.
* </p>
* <p>
* Since this
|
works
|
java
|
micronaut-projects__micronaut-core
|
http-server-netty/src/test/groovy/io/micronaut/http/server/netty/websocket/PojoChatClientWebSocket.java
|
{
"start": 1202,
"end": 2105
}
|
class ____ implements AutoCloseable {
private String topic;
private String username;
private Collection<Message> replies = new ConcurrentLinkedQueue<>();
@OnOpen
public void onOpen(String topic, String username) {
this.topic = topic;
this.username = username;
}
public String getTopic() {
return topic;
}
public String getUsername() {
return username;
}
public Collection<Message> getReplies() {
return replies;
}
@OnMessage
public void onMessage(
Message message) {
System.out.println("Client received message = " + message);
replies.add(message);
}
public abstract void send(Message message);
public abstract Future<Message> sendAsync(Message message);
@SingleResult
public abstract Publisher<Message> sendRx(Message message);
}
|
PojoChatClientWebSocket
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/callbacks/Invocation.java
|
{
"start": 257,
"end": 1819
}
|
class ____ {
private final String methodName;
private final String arguments;
public Invocation(String methodName, Object... arguments) {
this.methodName = methodName;
this.arguments = Arrays.toString( arguments );
}
public String getMethodName() {
return methodName;
}
public String getArguments() {
return arguments;
}
@Override
public String toString() {
return "Invocation [methodName=" + methodName + ", arguments=" + arguments + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ( ( arguments == null ) ? 0 : arguments.hashCode() );
result = prime * result + ( ( methodName == null ) ? 0 : methodName.hashCode() );
return result;
}
@Override
public boolean equals(Object obj) {
if ( this == obj ) {
return true;
}
if ( obj == null ) {
return false;
}
if ( getClass() != obj.getClass() ) {
return false;
}
Invocation other = (Invocation) obj;
if ( arguments == null ) {
if ( other.arguments != null ) {
return false;
}
}
else if ( !arguments.equals( other.arguments ) ) {
return false;
}
if ( methodName == null ) {
return other.methodName == null;
}
else {
return methodName.equals( other.methodName );
}
}
}
|
Invocation
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/FluxMapFuseable.java
|
{
"start": 2328,
"end": 5376
}
|
class ____<T, R>
implements InnerOperator<T, R>,
QueueSubscription<R> {
final CoreSubscriber<? super R> actual;
final Function<? super T, ? extends @Nullable R> mapper;
boolean done;
@SuppressWarnings("NotNullFieldNotInitialized") // initialized in onSubscribe
QueueSubscription<T> s;
int sourceMode;
MapFuseableSubscriber(CoreSubscriber<? super R> actual,
Function<? super T, ? extends R> mapper) {
this.actual = actual;
this.mapper = mapper;
}
@SuppressWarnings("unchecked")
@Override
public void onSubscribe(Subscription s) {
if (Operators.validate(this.s, s)) {
this.s = (QueueSubscription<T>) s;
actual.onSubscribe(this);
}
}
@SuppressWarnings("DataFlowIssue") // fusion passes nulls via onNext
@Override
public void onNext(T t) {
if (sourceMode == ASYNC) {
actual.onNext(null);
}
else {
if (done) {
Operators.onNextDropped(t, actual.currentContext());
return;
}
R v;
try {
v = mapper.apply(t);
if (v == null) {
throw new NullPointerException("The mapper [" + mapper.getClass().getName() + "] returned a null value.");
}
}
catch (Throwable e) {
Throwable e_ = Operators.onNextError(t, e, actual.currentContext(), s);
if (e_ != null) {
onError(e_);
}
else {
s.request(1);
}
return;
}
actual.onNext(v);
}
}
@Override
public void onError(Throwable t) {
if (done) {
Operators.onErrorDropped(t, actual.currentContext());
return;
}
done = true;
actual.onError(t);
}
@Override
public void onComplete() {
if (done) {
return;
}
done = true;
actual.onComplete();
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return s;
if (key == Attr.TERMINATED) return done;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return InnerOperator.super.scanUnsafe(key);
}
@Override
public CoreSubscriber<? super R> actual() {
return actual;
}
@Override
public void request(long n) {
s.request(n);
}
@Override
public void cancel() {
s.cancel();
}
@Override
public @Nullable R poll() {
for(;;) {
T v = s.poll();
if (v != null) {
try {
return Objects.requireNonNull(mapper.apply(v));
}
catch (Throwable t) {
RuntimeException e_ = Operators.onNextPollError(v, t, currentContext());
if (e_ != null) {
throw e_;
}
else {
continue;
}
}
}
return null;
}
}
@Override
public boolean isEmpty() {
return s.isEmpty();
}
@Override
public void clear() {
s.clear();
}
@Override
public int requestFusion(int requestedMode) {
int m;
if ((requestedMode & Fuseable.THREAD_BARRIER) != 0) {
return Fuseable.NONE;
}
else {
m = s.requestFusion(requestedMode);
}
sourceMode = m;
return m;
}
@Override
public int size() {
return s.size();
}
}
static final
|
MapFuseableSubscriber
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkFilterJoinRule.java
|
{
"start": 21623,
"end": 23571
}
|
interface ____ extends FlinkFilterJoinRule.Config {
FlinkFilterIntoJoinRuleConfig DEFAULT =
ImmutableFlinkFilterIntoJoinRuleConfig.of((join, joinType, exp) -> true)
.withOperandSupplier(
b0 ->
b0.operand(Filter.class)
.oneInput(
b1 ->
b1.operand(Join.class)
.anyInputs()))
.withSmart(true);
@Override
default FlinkFilterIntoJoinRule toRule() {
return new FlinkFilterIntoJoinRule(this);
}
}
}
protected boolean isEventTimeTemporalJoin(RexNode joinCondition) {
RexVisitor<Void> temporalConditionFinder =
new RexVisitorImpl<Void>(true) {
@Override
public Void visitCall(RexCall call) {
if (call.getOperator() == TemporalJoinUtil.INITIAL_TEMPORAL_JOIN_CONDITION()
&& TemporalJoinUtil.isInitialRowTimeTemporalTableJoin(call)) {
throw new Util.FoundOne(call);
}
return super.visitCall(call);
}
};
try {
joinCondition.accept(temporalConditionFinder);
} catch (Util.FoundOne found) {
return true;
}
return false;
}
/**
* Predicate that returns whether a filter is valid in the ON clause of a join for this
* particular kind of join. If not, Calcite will push it back to above the join.
*/
@FunctionalInterface
public
|
FlinkFilterIntoJoinRuleConfig
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.