language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/InFragment.java | {
"start": 367,
"end": 2778
} | class ____ {
public static final String NULL = "null";
public static final String NOT_NULL = "not null";
protected String columnName;
protected List<Object> values = new ArrayList<>();
/**
* @param value an SQL literal, NULL, or NOT_NULL
*
* @return {@code this}, for method chaining
*/
public InFragment addValue(Object value) {
values.add( value );
return this;
}
public InFragment addValues(Object[] values) {
Collections.addAll( this.values, values );
return this;
}
public InFragment setColumn(String columnName) {
this.columnName = columnName;
return this;
}
public InFragment setColumn(String alias, String columnName) {
this.columnName = StringHelper.qualify( alias, columnName );
return setColumn( this.columnName );
}
public InFragment setFormula(String alias, String formulaTemplate) {
this.columnName = StringHelper.replace( formulaTemplate, Template.TEMPLATE, alias );
return setColumn( this.columnName );
}
public List<Object> getValues() {
return values;
}
public String toFragmentString() {
final StringBuilder buf = new StringBuilder( values.size() * 5 );
switch ( values.size() ) {
case 0: {
return "0=1";
}
case 1: {
Object value = values.get( 0 );
buf.append( columnName );
if ( NULL.equals( value ) ) {
buf.append( " is null" );
}
else {
if ( NOT_NULL.equals( value ) ) {
buf.append( " is not null" );
}
else {
buf.append( '=' ).append( value );
}
}
return buf.toString();
}
default: {
boolean allowNull = false;
for ( Object value : values ) {
if ( NULL.equals( value ) ) {
allowNull = true;
}
else {
if ( NOT_NULL.equals( value ) ) {
throw new IllegalArgumentException( "not null makes no sense for in expression" );
}
}
}
if ( allowNull ) {
buf.append( '(' )
.append( columnName )
.append( " is null or " )
.append( columnName )
.append( " in (" );
}
else {
buf.append( columnName ).append( " in (" );
}
for ( Object value : values ) {
if ( !NULL.equals( value ) ) {
buf.append( value );
buf.append( ", " );
}
}
buf.setLength( buf.length() - 2 );
if ( allowNull ) {
buf.append( "))" );
}
else {
buf.append( ')' );
}
return buf.toString();
}
}
}
}
| InFragment |
java | google__guice | core/test/com/google/inject/internal/OptionalBinderTest.java | {
"start": 45878,
"end": 47865
} | class ____ {
@Inject
JitInjectable() {}
}
public void testOptionalBinderAndProviderLookup() {
Optional<JitInjectable> optional =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
OptionalBinder.newOptionalBinder(binder(), JitInjectable.class);
getProvider(JitInjectable.class);
}
})
.getInstance(new Key<Optional<JitInjectable>>() {});
assertThat(optional).isPresent();
}
/**
* Tests that an OptionalBinder that depends on a Multibinder resolves initialization order
* correctly to an optimized provider.
*/
public void testOptionalBinderDependsOnMultibinder() {
Key<Set<String>> key = new Key<Set<String>>() {};
Optional<Provider<Set<String>>> e =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
OptionalBinder.newOptionalBinder(binder(), key);
Multibinder.newSetBinder(binder(), String.class);
}
})
.getInstance(new Key<Optional<Provider<Set<String>>>>() {});
// Check that we are using the optimized path.
assertThat(e.get()).isInstanceOf(InternalFactory.InstanceProvider.class);
e =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
Multibinder.newSetBinder(binder(), String.class);
OptionalBinder.newOptionalBinder(binder(), key);
}
})
.getInstance(new Key<Optional<Provider<Set<String>>>>() {});
assertThat(e.get()).isInstanceOf(InternalFactory.InstanceProvider.class);
}
/**
* Returns the short name for a module instance. Used to get the name of the anoymous | JitInjectable |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDeleteRace.java | {
"start": 6117,
"end": 6913
} | class ____ extends SubjectInheritingThread {
private FileSystem fs;
private Path path;
DeleteThread(FileSystem fs, Path path) {
this.fs = fs;
this.path = path;
}
@Override
public void work() {
try {
Thread.sleep(1000);
LOG.info("Deleting" + path);
final FSDirectory fsdir = cluster.getNamesystem().dir;
INode fileINode = fsdir.getINode4Write(path.toString());
INodeMap inodeMap = (INodeMap) Whitebox.getInternalState(fsdir,
"inodeMap");
fs.delete(path, false);
// after deletion, add the inode back to the inodeMap
inodeMap.put(fileINode);
LOG.info("Deleted" + path);
} catch (Exception e) {
LOG.info(e.toString());
}
}
}
private | DeleteThread |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/ref/RefTest21.java | {
"start": 236,
"end": 595
} | class ____ extends TestCase {
public void test_ref() throws Exception {
String jsonTest = "{\"details\":{\"type\":{\"items\":{\"allOf\":[{\"$ref\":\"title\",\"required\":[\"iconImg\"]}]}}}}";
JSONObject object = JSON.parseObject(jsonTest, Feature.DisableSpecialKeyDetect);
System.out.println( object.get( "details"));
}
}
| RefTest21 |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/disabled/DisabledInterceptorInStrictModeTest.java | {
"start": 1322,
"end": 1480
} | interface ____ {
}
@MyInterceptorBinding
@Interceptor
// no @Priority, the interceptor is disabled in strict mode
static | MyInterceptorBinding |
java | quarkusio__quarkus | integration-tests/hibernate-search-standalone-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/standalone/elasticsearch/analysis/Analysis0TestingEntity.java | {
"start": 354,
"end": 649
} | class ____ extends AnalysisTestingEntityBase {
public Analysis0TestingEntity(long id, String text) {
super(id, text);
}
@FullTextField(analyzer = "backend-level-analyzer")
@Override
public String getText() {
return super.getText();
}
}
| Analysis0TestingEntity |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/metrics/MetricsTrackingStateTestBase.java | {
"start": 2245,
"end": 6468
} | class ____<K> {
protected static final int SAMPLE_INTERVAL = 10;
protected AbstractKeyedStateBackend<K> createKeyedBackend(TypeSerializer<K> keySerializer)
throws Exception {
Environment env = new DummyEnvironment();
KeyGroupRange keyGroupRange = new KeyGroupRange(0, 127);
int numberOfKeyGroups = keyGroupRange.getNumberOfKeyGroups();
Configuration configuration = new Configuration();
configuration.set(StateLatencyTrackOptions.LATENCY_TRACK_ENABLED, true);
configuration.set(StateSizeTrackOptions.SIZE_TRACK_ENABLED, true);
configuration.set(StateLatencyTrackOptions.LATENCY_TRACK_SAMPLE_INTERVAL, SAMPLE_INTERVAL);
configuration.set(StateSizeTrackOptions.SIZE_TRACK_SAMPLE_INTERVAL, SAMPLE_INTERVAL);
// use a very large value to not let metrics data overridden.
int historySize = 1000_000;
configuration.set(StateLatencyTrackOptions.LATENCY_TRACK_HISTORY_SIZE, historySize);
configuration.set(StateSizeTrackOptions.SIZE_TRACK_HISTORY_SIZE, historySize);
HashMapStateBackend stateBackend =
new HashMapStateBackend()
.configure(configuration, Thread.currentThread().getContextClassLoader());
JobID jobID = new JobID();
TaskKvStateRegistry kvStateRegistry = env.getTaskKvStateRegistry();
CloseableRegistry cancelStreamRegistry = new CloseableRegistry();
return stateBackend.createKeyedStateBackend(
new KeyedStateBackendParametersImpl<>(
env,
jobID,
"test_op",
keySerializer,
numberOfKeyGroups,
keyGroupRange,
kvStateRegistry,
TtlTimeProvider.DEFAULT,
new UnregisteredMetricsGroup(),
Collections.emptyList(),
cancelStreamRegistry));
}
@SuppressWarnings("unchecked")
protected <
N,
V,
S extends InternalKvState<K, N, V>,
S2 extends State,
LSM extends StateMetricBase>
AbstractMetricsTrackState<K, N, V, S, LSM> createMetricsTrackingState(
AbstractKeyedStateBackend<K> keyedBackend,
StateDescriptor<S2, V> stateDescriptor)
throws Exception {
S2 keyedState =
keyedBackend.getOrCreateKeyedState(
VoidNamespaceSerializer.INSTANCE, stateDescriptor);
Preconditions.checkState(keyedState instanceof AbstractMetricsTrackState);
return (AbstractMetricsTrackState<K, N, V, S, LSM>) keyedState;
}
abstract <V, S extends State> StateDescriptor<S, V> getStateDescriptor();
abstract TypeSerializer<K> getKeySerializer();
abstract void setCurrentKey(AbstractKeyedStateBackend<K> keyedBackend);
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
void testLatencyTrackingStateClear() throws Exception {
AbstractKeyedStateBackend<K> keyedBackend = createKeyedBackend(getKeySerializer());
try {
AbstractMetricsTrackState latencyTrackingState =
createMetricsTrackingState(keyedBackend, getStateDescriptor());
latencyTrackingState.setCurrentNamespace(VoidNamespace.INSTANCE);
StateMetricBase latencyTrackingStateMetric =
latencyTrackingState.getLatencyTrackingStateMetric();
assertThat(latencyTrackingStateMetric.getClearCount()).isZero();
setCurrentKey(keyedBackend);
for (int index = 1; index <= SAMPLE_INTERVAL; index++) {
int expectedResult = index == SAMPLE_INTERVAL ? 0 : index;
latencyTrackingState.clear();
assertThat(latencyTrackingStateMetric.getClearCount()).isEqualTo(expectedResult);
}
} finally {
if (keyedBackend != null) {
keyedBackend.close();
keyedBackend.dispose();
}
}
}
}
| MetricsTrackingStateTestBase |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/framework/AbstractAopProxyTests.java | {
"start": 58055,
"end": 58652
} | class ____ extends MethodCounter implements MethodBeforeAdvice,
AfterReturningAdvice, ThrowsAdvice {
@Override
public void before(Method m, Object[] args, @Nullable Object target) {
count(m);
}
@Override
public void afterReturning(@Nullable Object o, Method m, Object[] args, @Nullable Object target) {
count(m);
}
public void afterThrowing(IOException ex) {
count(IOException.class.getName());
}
public void afterThrowing(UncheckedException ex) {
count(UncheckedException.class.getName());
}
}
@SuppressWarnings("serial")
public static | CountingMultiAdvice |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/redisnode/RedisSlave.java | {
"start": 729,
"end": 789
} | interface ____ extends RedisNode, RedisSlaveAsync {
}
| RedisSlave |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/naming/pojo/healthcheck/AbstractHealthChecker.java | {
"start": 2365,
"end": 2768
} | class ____ extends AbstractHealthChecker {
public static final String TYPE = "NONE";
private static final long serialVersionUID = -760631831097384737L;
public None() {
super(TYPE);
}
@Override
public AbstractHealthChecker clone() throws CloneNotSupportedException {
return new None();
}
}
}
| None |
java | elastic__elasticsearch | client/rest/src/main/java/org/elasticsearch/client/RestClient.java | {
"start": 38744,
"end": 41722
} | class ____ {
private final Response response;
private final ResponseException responseException;
ResponseOrResponseException(Response response) {
this.response = Objects.requireNonNull(response);
this.responseException = null;
}
ResponseOrResponseException(ResponseException responseException) {
this.responseException = Objects.requireNonNull(responseException);
this.response = null;
}
}
/**
* Wrap the exception so the caller's signature shows up in the stack trace, taking care to copy the original type and message
* where possible so async and sync code don't have to check different exceptions.
*/
private static Exception extractAndWrapCause(Exception exception) {
if (exception instanceof InterruptedException) {
Thread.currentThread().interrupt();
throw new RuntimeException("thread waiting for the response was interrupted", exception);
}
if (exception instanceof ExecutionException) {
ExecutionException executionException = (ExecutionException) exception;
Throwable t = executionException.getCause() == null ? executionException : executionException.getCause();
if (t instanceof Error) {
throw (Error) t;
}
exception = (Exception) t;
}
if (exception instanceof ConnectTimeoutException) {
ConnectTimeoutException e = new ConnectTimeoutException(exception.getMessage());
e.initCause(exception);
return e;
}
if (exception instanceof SocketTimeoutException) {
SocketTimeoutException e = new SocketTimeoutException(exception.getMessage());
e.initCause(exception);
return e;
}
if (exception instanceof ConnectionClosedException) {
ConnectionClosedException e = new ConnectionClosedException(exception.getMessage());
e.initCause(exception);
return e;
}
if (exception instanceof SSLHandshakeException) {
SSLHandshakeException e = new SSLHandshakeException(exception.getMessage());
e.initCause(exception);
return e;
}
if (exception instanceof ConnectException) {
ConnectException e = new ConnectException(exception.getMessage());
e.initCause(exception);
return e;
}
if (exception instanceof IOException) {
return new IOException(exception.getMessage(), exception);
}
if (exception instanceof RuntimeException) {
return new RuntimeException(exception.getMessage(), exception);
}
return new RuntimeException("error while performing request", exception);
}
/**
* A gzip compressing entity that also implements {@code getContent()}.
*/
public static | ResponseOrResponseException |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/test/ReachabilityChecker.java | {
"start": 3551,
"end": 5447
} | class ____ {
private final String description;
private final PhantomReference<?> phantomReference;
private final ReferenceQueue<?> referenceQueue;
Registered(String description, PhantomReference<?> phantomReference, ReferenceQueue<?> referenceQueue) {
this.description = description;
this.phantomReference = phantomReference;
this.referenceQueue = referenceQueue;
}
/**
* Attempts to trigger the GC repeatedly until the {@link ReferenceQueue} yields a reference.
*/
public void assertReferenceEnqueuedForCollection(MemoryMXBean memoryMXBean, long timeoutMillis) {
try {
final var timeoutAt = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(timeoutMillis);
while (true) {
memoryMXBean.gc();
final var ref = referenceQueue.remove(500);
if (ref != null) {
ref.clear();
return;
}
assertTrue("still reachable: " + description, System.nanoTime() < timeoutAt);
assertNull(phantomReference.get()); // always succeeds, we're just doing this to use the phantomReference for something
}
} catch (Exception e) {
ESTestCase.fail(e);
}
}
/**
* Attempts to trigger the GC and verifies that the {@link ReferenceQueue} does not yield a reference.
*/
public void assertReferenceNotEnqueuedForCollection(MemoryMXBean memoryMXBean) {
try {
memoryMXBean.gc();
assertNull("became unreachable: " + description, referenceQueue.remove(100));
} catch (Exception e) {
ESTestCase.fail(e);
}
}
}
}
| Registered |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java | {
"start": 2627,
"end": 15163
} | class ____ extends ESTestCase {
private void assertHighlightOneDoc(
String fieldName,
String[] inputs,
Analyzer analyzer,
Query query,
Locale locale,
BreakIterator breakIterator,
int noMatchSize,
String[] expectedPassages
) throws Exception {
assertHighlightOneDoc(
fieldName,
inputs,
analyzer,
query,
locale,
breakIterator,
noMatchSize,
expectedPassages,
Integer.MAX_VALUE,
null
);
}
private void assertHighlightOneDoc(
String fieldName,
String[] inputs,
Analyzer analyzer,
Query query,
Locale locale,
BreakIterator breakIterator,
int noMatchSize,
String[] expectedPassages,
int maxAnalyzedOffset,
Integer queryMaxAnalyzedOffset
) throws Exception {
assertHighlightOneDoc(
fieldName,
inputs,
analyzer,
query,
locale,
breakIterator,
noMatchSize,
expectedPassages,
maxAnalyzedOffset,
queryMaxAnalyzedOffset,
UnifiedHighlighter.OffsetSource.ANALYSIS
);
}
private void assertHighlightOneDoc(
String fieldName,
String[] inputs,
Analyzer analyzer,
Query query,
Locale locale,
BreakIterator breakIterator,
int noMatchSize,
String[] expectedPassages,
int maxAnalyzedOffset,
Integer queryMaxAnalyzedOffset,
UnifiedHighlighter.OffsetSource offsetSource
) throws Exception {
try (Directory dir = newDirectory()) {
IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
iwc.setMergePolicy(newTieredMergePolicy(random()));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
ft.freeze();
Document doc = new Document();
for (String input : inputs) {
Field field = new Field(fieldName, "", ft);
field.setStringValue(input);
doc.add(field);
}
iw.addDocument(doc);
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
iw.close();
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER);
assertThat(topDocs.totalHits.value(), equalTo(1L));
String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR));
UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, analyzer);
builder.withBreakIterator(() -> breakIterator);
builder.withFieldMatcher(name -> "text".equals(name));
builder.withFormatter(new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder(), 3));
CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(
builder,
offsetSource,
locale,
"index",
"text",
query,
noMatchSize,
expectedPassages.length,
maxAnalyzedOffset,
QueryMaxAnalyzedOffset.create(queryMaxAnalyzedOffset, maxAnalyzedOffset),
true,
true
);
final Snippet[] snippets = highlighter.highlightField(getOnlyLeafReader(reader), topDocs.scoreDocs[0].doc, () -> rawValue);
assertEquals(expectedPassages.length, snippets.length);
for (int i = 0; i < snippets.length; i++) {
assertEquals(expectedPassages[i], snippets[i].getText());
}
}
}
}
public void testSimple() throws Exception {
final String[] inputs = {
"This is a test. Just a test1 highlighting from unified highlighter.",
"This is the second highlighting value to perform highlighting on a longer text that gets scored lower.",
"This is highlighting the third short highlighting value.",
"Just a test4 highlighting from unified highlighter." };
String[] expectedPassages = {
"Just a test1 <b>highlighting</b> from unified highlighter.",
"This is the second <b>highlighting</b> value to perform <b>highlighting</b> on a" + " longer text that gets scored lower.",
"This is <b>highlighting</b> the third short <b>highlighting</b> value.",
"Just a test4 <b>highlighting</b> from unified highlighter." };
Query query = new TermQuery(new Term("text", "highlighting"));
assertHighlightOneDoc(
"text",
inputs,
new StandardAnalyzer(),
query,
Locale.ROOT,
BreakIterator.getSentenceInstance(Locale.ROOT),
0,
expectedPassages
);
}
public void testNoMatchSize() throws Exception {
final String[] inputs = { "This is a test. Just a test highlighting from unified. Feel free to ignore." };
Query query = new TermQuery(new Term("body", "highlighting"));
assertHighlightOneDoc(
"text",
inputs,
new StandardAnalyzer(),
query,
Locale.ROOT,
BreakIterator.getSentenceInstance(Locale.ROOT),
100,
inputs
);
}
public void testMultiPhrasePrefixQuerySingleTerm() throws Exception {
final String[] inputs = { "The quick brown fox." };
final String[] outputs = { "The quick <b>brown</b> fox." };
MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery("text");
query.add(new Term("text", "bro"));
assertHighlightOneDoc(
"text",
inputs,
new StandardAnalyzer(),
query,
Locale.ROOT,
BreakIterator.getSentenceInstance(Locale.ROOT),
0,
outputs
);
}
public void testMultiPhrasePrefixQuery() throws Exception {
final String[] inputs = { "The quick brown fox." };
final String[] outputs = { "The <b>quick brown fox</b>." };
MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery("text");
query.add(new Term("text", "quick"));
query.add(new Term("text", "brown"));
query.add(new Term("text", "fo"));
assertHighlightOneDoc(
"text",
inputs,
new StandardAnalyzer(),
query,
Locale.ROOT,
BreakIterator.getSentenceInstance(Locale.ROOT),
0,
outputs
);
}
public void testSentenceBoundedBreakIterator() throws Exception {
final String[] inputs = {
"The quick brown fox in a long sentence with another quick brown fox. " + "Another sentence with brown fox." };
final String[] outputs = {
"The <b>quick</b> <b>brown</b>",
"<b>fox</b> in a long",
"another <b>quick</b>",
"<b>brown</b> <b>fox</b>.",
"sentence with <b>brown</b>",
"<b>fox</b>.", };
BooleanQuery query = new BooleanQuery.Builder().add(new TermQuery(new Term("text", "quick")), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term("text", "brown")), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term("text", "fox")), BooleanClause.Occur.SHOULD)
.build();
assertHighlightOneDoc(
"text",
inputs,
new StandardAnalyzer(),
query,
Locale.ROOT,
BoundedBreakIteratorScanner.getSentence(Locale.ROOT, 10),
0,
outputs
);
}
public void testSmallSentenceBoundedBreakIterator() throws Exception {
final String[] inputs = { "A short sentence. Followed by a bigger sentence that should be truncated. And a last short sentence." };
final String[] outputs = { "A short <b>sentence</b>.", "Followed by a bigger <b>sentence</b>", "And a last short <b>sentence</b>" };
TermQuery query = new TermQuery(new Term("text", "sentence"));
assertHighlightOneDoc(
"text",
inputs,
new StandardAnalyzer(),
query,
Locale.ROOT,
BoundedBreakIteratorScanner.getSentence(Locale.ROOT, 20),
0,
outputs
);
}
public void testRepeatTerm() throws Exception {
final String[] inputs = { "Fun fun fun fun fun fun fun fun fun fun" };
final String[] outputs = {
"<b>Fun</b> <b>fun</b> <b>fun</b>",
"<b>fun</b> <b>fun</b>",
"<b>fun</b> <b>fun</b> <b>fun</b>",
"<b>fun</b> <b>fun</b>" };
Query query = new TermQuery(new Term("text", "fun"));
assertHighlightOneDoc(
"text",
inputs,
new StandardAnalyzer(),
query,
Locale.ROOT,
BoundedBreakIteratorScanner.getSentence(Locale.ROOT, 10),
0,
outputs
);
}
public void testRepeatPhrase() throws Exception {
final String[] inputs = { "Fun fun fun fun fun fun fun fun fun fun" };
final String[] outputs = { "<b>Fun fun fun</b>", "<b>fun fun </b>", "<b>fun fun fun</b>", "<b>fun fun</b>" };
Query query = new PhraseQuery.Builder().add(new Term("text", "fun")).add(new Term("text", "fun")).build();
assertHighlightOneDoc(
"text",
inputs,
new StandardAnalyzer(),
query,
Locale.ROOT,
BoundedBreakIteratorScanner.getSentence(Locale.ROOT, 10),
0,
outputs
);
}
public void testGroupSentences() throws Exception {
final String[] inputs = { "Two words. Followed by many words in a big sentence. One. Two. Three. And more words." };
final String[] outputs = {
"<b>Two</b> <b>words</b>.",
"Followed by many <b>words</b>",
"<b>One</b>. <b>Two</b>. <b>Three</b>.",
"And more <b>words</b>.", };
BooleanQuery query = new BooleanQuery.Builder().add(new TermQuery(new Term("text", "one")), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term("text", "two")), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term("text", "three")), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term("text", "words")), BooleanClause.Occur.SHOULD)
.build();
assertHighlightOneDoc(
"text",
inputs,
new StandardAnalyzer(),
query,
Locale.ROOT,
BoundedBreakIteratorScanner.getSentence(Locale.ROOT, 20),
0,
outputs
);
}
public void testOverlappingTerms() throws Exception {
final String[] inputs = { "bro", "brown", "brownie", "browser" };
final String[] outputs = { "<b>bro</b>", "<b>brown</b>", "<b>browni</b>e", "<b>browser</b>" };
BooleanQuery query = new BooleanQuery.Builder().add(new FuzzyQuery(new Term("text", "brow")), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term("text", "b")), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term("text", "br")), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term("text", "bro")), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term("text", "brown")), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term("text", "browni")), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term("text", "browser")), BooleanClause.Occur.SHOULD)
.build();
Analyzer analyzer = CustomAnalyzer.builder()
.withTokenizer(EdgeNGramTokenizerFactory.class, "minGramSize", "1", "maxGramSize", "7")
.build();
assertHighlightOneDoc("text", inputs, analyzer, query, Locale.ROOT, BreakIterator.getSentenceInstance(Locale.ROOT), 0, outputs);
}
public static | CustomUnifiedHighlighterTests |
java | apache__camel | components/camel-ignite/src/main/java/org/apache/camel/component/ignite/ClusterGroupExpressions.java | {
"start": 1017,
"end": 2766
} | class ____ {
public static final ClusterGroupExpression FOR_CLIENTS = new ClusterGroupExpression() {
@Override
public ClusterGroup getClusterGroup(Ignite ignite) {
return ignite.cluster().forClients();
}
};
public static final ClusterGroupExpression FOR_LOCAL = new ClusterGroupExpression() {
@Override
public ClusterGroup getClusterGroup(Ignite ignite) {
return ignite.cluster().forLocal();
}
};
public static final ClusterGroupExpression FOR_OLDEST = new ClusterGroupExpression() {
@Override
public ClusterGroup getClusterGroup(Ignite ignite) {
return ignite.cluster().forOldest();
}
};
public static final ClusterGroupExpression FOR_YOUNGEST = new ClusterGroupExpression() {
@Override
public ClusterGroup getClusterGroup(Ignite ignite) {
return ignite.cluster().forYoungest();
}
};
public static final ClusterGroupExpression FOR_RANDOM = new ClusterGroupExpression() {
@Override
public ClusterGroup getClusterGroup(Ignite ignite) {
return ignite.cluster().forRandom();
}
};
public static final ClusterGroupExpression FOR_REMOTES = new ClusterGroupExpression() {
@Override
public ClusterGroup getClusterGroup(Ignite ignite) {
return ignite.cluster().forRemotes();
}
};
public static final ClusterGroupExpression FOR_SERVERS = new ClusterGroupExpression() {
@Override
public ClusterGroup getClusterGroup(Ignite ignite) {
return ignite.cluster().forServers();
}
};
private ClusterGroupExpressions() {
}
}
| ClusterGroupExpressions |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/LogManager.java | {
"start": 28754,
"end": 29194
} | class ____ should be used as the Logger name. If null the name of the calling class
* will be used as the logger name.
* @param messageFactory The message factory is used only when creating a logger, subsequent use does not change the
* logger but will log a warning if mismatched.
* @return The Logger.
* @throws UnsupportedOperationException if {@code value} is {@code null} and the calling | name |
java | apache__camel | components/camel-as2/camel-as2-api/src/test/java/org/apache/camel/component/as2/api/entity/TextPlainEntityTest.java | {
"start": 1095,
"end": 1838
} | class ____ {
@Test
void test_parse() throws IOException, HttpException {
String parsedMimeMessage;
try (MimeEntity mimeEntity = EntityParser.parseEntity(MESSAGE.getBytes(StandardCharsets.US_ASCII))) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
mimeEntity.writeTo(out);
parsedMimeMessage = new String(out.toByteArray(), StandardCharsets.US_ASCII);
}
Assertions.assertEquals(parsedMimeMessage, MESSAGE);
}
String MESSAGE = """
Content-Type: text/plain; charset=US-ASCII\r
Content-Transfer-Encoding: binary\r
\r
<root>
\t<item/>
</root>
""";
}
| TextPlainEntityTest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/ComponentsBuilderFactory.java | {
"start": 33683,
"end": 166778
} | interface ____ triggering events at times specified through the
* Unix cron syntax.
*
* Category: scheduling
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-cron
*
* @return the dsl builder
*/
static CronComponentBuilderFactory.CronComponentBuilder cron() {
return CronComponentBuilderFactory.cron();
}
/**
* Crypto (JCE) (camel-crypto)
* Sign and verify exchanges using the Signature Service of the Java
* Cryptographic Extension (JCE).
*
* Category: security,transformation
* Since: 2.3
* Maven coordinates: org.apache.camel:camel-crypto
*
* @return the dsl builder
*/
static CryptoComponentBuilderFactory.CryptoComponentBuilder crypto() {
return CryptoComponentBuilderFactory.crypto();
}
/**
* CXF (camel-cxf-soap)
* Expose SOAP WebServices using Apache CXF or connect to external
* WebServices using CXF WS client.
*
* Category: http,webservice
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-cxf-soap
*
* @return the dsl builder
*/
static CxfComponentBuilderFactory.CxfComponentBuilder cxf() {
return CxfComponentBuilderFactory.cxf();
}
/**
* CXF-RS (camel-cxf-rest)
* Expose JAX-RS REST services using Apache CXF or connect to external REST
* services using CXF REST client.
*
* Category: rest
* Since: 2.0
* Maven coordinates: org.apache.camel:camel-cxf-rest
*
* @return the dsl builder
*/
static CxfrsComponentBuilderFactory.CxfrsComponentBuilder cxfrs() {
return CxfrsComponentBuilderFactory.cxfrs();
}
/**
* CyberArk Vault (camel-cyberark-vault)
* Retrieve secrets from CyberArk Conjur Vault.
*
* Category: cloud,security
* Since: 4.17
* Maven coordinates: org.apache.camel:camel-cyberark-vault
*
* @return the dsl builder
*/
static CyberarkVaultComponentBuilderFactory.CyberarkVaultComponentBuilder cyberarkVault() {
return CyberarkVaultComponentBuilderFactory.cyberarkVault();
}
/**
* Dapr (camel-dapr)
* Dapr component which interfaces with Dapr Building Blocks.
*
* Category: cloud,saas
* Since: 4.12
* Maven coordinates: org.apache.camel:camel-dapr
*
* @return the dsl builder
*/
static DaprComponentBuilderFactory.DaprComponentBuilder dapr() {
return DaprComponentBuilderFactory.dapr();
}
/**
* Data Format (camel-dataformat)
* Use a Camel Data Format as a regular Camel Component.
*
* Category: core,transformation
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-dataformat
*
* @return the dsl builder
*/
static DataformatComponentBuilderFactory.DataformatComponentBuilder dataformat() {
return DataformatComponentBuilderFactory.dataformat();
}
/**
* Dataset (camel-dataset)
* Provide data for load and soak testing of your Camel application.
*
* Category: core,testing
* Since: 1.3
* Maven coordinates: org.apache.camel:camel-dataset
*
* @return the dsl builder
*/
static DatasetComponentBuilderFactory.DatasetComponentBuilder dataset() {
return DatasetComponentBuilderFactory.dataset();
}
/**
* DataSet Test (camel-dataset)
* Extends the mock component by pulling messages from another endpoint on
* startup to set the expected message bodies.
*
* Category: core,testing
* Since: 1.3
* Maven coordinates: org.apache.camel:camel-dataset
*
* @return the dsl builder
*/
static DatasetTestComponentBuilderFactory.DatasetTestComponentBuilder datasetTest() {
return DatasetTestComponentBuilderFactory.datasetTest();
}
/**
* Debezium DB2 Connector (camel-debezium-db2)
* Capture changes from a DB2 database.
*
* Category: database
* Since: 3.17
* Maven coordinates: org.apache.camel:camel-debezium-db2
*
* @return the dsl builder
*/
static DebeziumDb2ComponentBuilderFactory.DebeziumDb2ComponentBuilder debeziumDb2() {
return DebeziumDb2ComponentBuilderFactory.debeziumDb2();
}
/**
* Debezium MongoDB Connector (camel-debezium-mongodb)
* Capture changes from a MongoDB database.
*
* Category: database
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-debezium-mongodb
*
* @return the dsl builder
*/
static DebeziumMongodbComponentBuilderFactory.DebeziumMongodbComponentBuilder debeziumMongodb() {
return DebeziumMongodbComponentBuilderFactory.debeziumMongodb();
}
/**
* Debezium MySQL Connector (camel-debezium-mysql)
* Capture changes from a MySQL database.
*
* Category: database
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-debezium-mysql
*
* @return the dsl builder
*/
static DebeziumMysqlComponentBuilderFactory.DebeziumMysqlComponentBuilder debeziumMysql() {
return DebeziumMysqlComponentBuilderFactory.debeziumMysql();
}
/**
* Debezium Oracle Connector (camel-debezium-oracle)
* Capture changes from an Oracle database.
*
* Category: database
* Since: 3.17
* Maven coordinates: org.apache.camel:camel-debezium-oracle
*
* @return the dsl builder
*/
static DebeziumOracleComponentBuilderFactory.DebeziumOracleComponentBuilder debeziumOracle() {
return DebeziumOracleComponentBuilderFactory.debeziumOracle();
}
/**
* Debezium PostgresSQL Connector (camel-debezium-postgres)
* Capture changes from a PostgresSQL database.
*
* Category: database
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-debezium-postgres
*
* @return the dsl builder
*/
static DebeziumPostgresComponentBuilderFactory.DebeziumPostgresComponentBuilder debeziumPostgres() {
return DebeziumPostgresComponentBuilderFactory.debeziumPostgres();
}
/**
* Debezium SQL Server Connector (camel-debezium-sqlserver)
* Capture changes from an SQL Server database.
*
* Category: database
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-debezium-sqlserver
*
* @return the dsl builder
*/
static DebeziumSqlserverComponentBuilderFactory.DebeziumSqlserverComponentBuilder debeziumSqlserver() {
return DebeziumSqlserverComponentBuilderFactory.debeziumSqlserver();
}
/**
* DFDL (camel-dfdl)
* Transforms fixed format data such as EDI message from/to XML using a Data
* Format Description Language (DFDL).
*
* Category: transformation
* Since: 4.11
* Maven coordinates: org.apache.camel:camel-dfdl
*
* @return the dsl builder
*/
static DfdlComponentBuilderFactory.DfdlComponentBuilder dfdl() {
return DfdlComponentBuilderFactory.dfdl();
}
/**
* DHIS2 (camel-dhis2)
* Leverages the DHIS2 Java SDK to integrate Apache Camel with the DHIS2 Web
* API.
*
* Category: api
* Since: 4.0
* Maven coordinates: org.apache.camel:camel-dhis2
*
* @return the dsl builder
*/
static Dhis2ComponentBuilderFactory.Dhis2ComponentBuilder dhis2() {
return Dhis2ComponentBuilderFactory.dhis2();
}
/**
* DigitalOcean (camel-digitalocean)
* Manage Droplets and resources within the DigitalOcean cloud.
*
* Category: cloud,management
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-digitalocean
*
* @return the dsl builder
*/
static DigitaloceanComponentBuilderFactory.DigitaloceanComponentBuilder digitalocean() {
return DigitaloceanComponentBuilderFactory.digitalocean();
}
/**
* Direct (camel-direct)
* Call another endpoint from the same Camel Context synchronously.
*
* Category: core,messaging
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-direct
*
* @return the dsl builder
*/
static DirectComponentBuilderFactory.DirectComponentBuilder direct() {
return DirectComponentBuilderFactory.direct();
}
/**
* Disruptor (camel-disruptor)
* Provides asynchronous SEDA behavior using LMAX Disruptor.
*
* Category: messaging
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-disruptor
*
* @return the dsl builder
*/
static DisruptorComponentBuilderFactory.DisruptorComponentBuilder disruptor() {
return DisruptorComponentBuilderFactory.disruptor();
}
/**
* Disruptor VM (camel-disruptor)
* Provides asynchronous SEDA behavior using LMAX Disruptor.
*
* Category: messaging
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-disruptor
*
* @return the dsl builder
*/
static DisruptorVmComponentBuilderFactory.DisruptorVmComponentBuilder disruptorVm() {
return DisruptorVmComponentBuilderFactory.disruptorVm();
}
/**
* Deep Java Library (camel-djl)
* Infer Deep Learning models from message exchanges data using Deep Java
* Library (DJL).
*
* Category: ai
* Since: 3.3
* Maven coordinates: org.apache.camel:camel-djl
*
* @return the dsl builder
*/
static DjlComponentBuilderFactory.DjlComponentBuilder djl() {
return DjlComponentBuilderFactory.djl();
}
/**
* DNS (camel-dns)
* Perform DNS queries using DNSJava.
*
* Category: networking
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-dns
*
* @return the dsl builder
*/
static DnsComponentBuilderFactory.DnsComponentBuilder dns() {
return DnsComponentBuilderFactory.dns();
}
/**
* Docker (camel-docker)
* Manage Docker containers.
*
* Category: cloud,container
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-docker
*
* @return the dsl builder
*/
static DockerComponentBuilderFactory.DockerComponentBuilder docker() {
return DockerComponentBuilderFactory.docker();
}
/**
* Docling (camel-docling)
* Process documents using Docling library for parsing and conversion.
*
* Category: transformation,ai
* Since: 4.15
* Maven coordinates: org.apache.camel:camel-docling
*
* @return the dsl builder
*/
static DoclingComponentBuilderFactory.DoclingComponentBuilder docling() {
return DoclingComponentBuilderFactory.docling();
}
/**
* Drill (camel-drill)
* Perform queries against an Apache Drill cluster.
*
* Category: database,bigdata
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-drill
*
* @return the dsl builder
*/
static DrillComponentBuilderFactory.DrillComponentBuilder drill() {
return DrillComponentBuilderFactory.drill();
}
/**
* Dropbox (camel-dropbox)
* Upload, download and manage files, folders, groups, collaborations, etc
* on Dropbox.
*
* Category: cloud,file,api
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-dropbox
*
* @return the dsl builder
*/
static DropboxComponentBuilderFactory.DropboxComponentBuilder dropbox() {
return DropboxComponentBuilderFactory.dropbox();
}
/**
* Dynamic Router (camel-dynamic-router)
* The Dynamic Router component routes exchanges to recipients, and the
* recipients (and their rules) may change at runtime.
*
* Category: messaging,core
* Since: 3.15
* Maven coordinates: org.apache.camel:camel-dynamic-router
*
* @return the dsl builder
*/
static DynamicRouterComponentBuilderFactory.DynamicRouterComponentBuilder dynamicRouter() {
return DynamicRouterComponentBuilderFactory.dynamicRouter();
}
/**
* Dynamic Router Control (camel-dynamic-router)
* The Dynamic Router control endpoint for operations that allow routing
* participants to subscribe or unsubscribe to participate in dynamic
* message routing.
*
* Category: messaging
* Since: 4.4
* Maven coordinates: org.apache.camel:camel-dynamic-router
*
* @return the dsl builder
*/
static DynamicRouterControlComponentBuilderFactory.DynamicRouterControlComponentBuilder dynamicRouterControl() {
return DynamicRouterControlComponentBuilderFactory.dynamicRouterControl();
}
/**
* Ehcache (camel-ehcache)
* Perform caching operations using Ehcache.
*
* Category: cache,clustering
* Since: 2.18
* Maven coordinates: org.apache.camel:camel-ehcache
*
* @return the dsl builder
*/
static EhcacheComponentBuilderFactory.EhcacheComponentBuilder ehcache() {
return EhcacheComponentBuilderFactory.ehcache();
}
/**
* Elasticsearch (camel-elasticsearch)
* Send requests to ElasticSearch via Java Client API.
*
* Category: search,monitoring
* Since: 3.19
* Maven coordinates: org.apache.camel:camel-elasticsearch
*
* @return the dsl builder
*/
static ElasticsearchComponentBuilderFactory.ElasticsearchComponentBuilder elasticsearch() {
return ElasticsearchComponentBuilderFactory.elasticsearch();
}
/**
* Elasticsearch Low level Rest Client (camel-elasticsearch-rest-client)
* Perform queries and other operations on Elasticsearch or OpenSearch (uses
* low-level client).
*
* Category: search
* Since: 4.3
* Maven coordinates: org.apache.camel:camel-elasticsearch-rest-client
*
* @return the dsl builder
*/
static ElasticsearchRestClientComponentBuilderFactory.ElasticsearchRestClientComponentBuilder elasticsearchRestClient() {
return ElasticsearchRestClientComponentBuilderFactory.elasticsearchRestClient();
}
/**
* Exec (camel-exec)
* Execute commands on the underlying operating system.
*
* Category: core
* Since: 2.3
* Maven coordinates: org.apache.camel:camel-exec
*
* @return the dsl builder
*/
static ExecComponentBuilderFactory.ExecComponentBuilder exec() {
return ExecComponentBuilderFactory.exec();
}
/**
* FHIR (camel-fhir)
* Exchange information in the healthcare domain using the FHIR (Fast
* Healthcare Interoperability Resources) standard.
*
* Category: api
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-fhir
*
* @return the dsl builder
*/
static FhirComponentBuilderFactory.FhirComponentBuilder fhir() {
return FhirComponentBuilderFactory.fhir();
}
/**
* File (camel-file)
* Read and write files.
*
* Category: file,core
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-file
*
* @return the dsl builder
*/
static FileComponentBuilderFactory.FileComponentBuilder file() {
return FileComponentBuilderFactory.file();
}
/**
* File Watch (camel-file-watch)
* Get notified about file events in a directory using
* java.nio.file.WatchService.
*
* Category: file
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-file-watch
*
* @return the dsl builder
*/
static FileWatchComponentBuilderFactory.FileWatchComponentBuilder fileWatch() {
return FileWatchComponentBuilderFactory.fileWatch();
}
/**
* Flatpack (camel-flatpack)
* Parse fixed width and delimited files using the FlatPack library.
*
* Category: transformation
* Since: 1.4
* Maven coordinates: org.apache.camel:camel-flatpack
*
* @return the dsl builder
*/
static FlatpackComponentBuilderFactory.FlatpackComponentBuilder flatpack() {
return FlatpackComponentBuilderFactory.flatpack();
}
/**
* Flink (camel-flink)
* Send DataSet jobs to an Apache Flink cluster.
*
* Category: transformation,bigdata
* Since: 2.18
* Maven coordinates: org.apache.camel:camel-flink
*
* @return the dsl builder
*/
static FlinkComponentBuilderFactory.FlinkComponentBuilder flink() {
return FlinkComponentBuilderFactory.flink();
}
/**
* Flowable (camel-flowable)
* Send and receive messages from the Flowable BPMN and CMMN engines.
*
* Category: workflow
* Since: 4.9
* Maven coordinates: org.apache.camel:camel-flowable
*
* @return the dsl builder
*/
static FlowableComponentBuilderFactory.FlowableComponentBuilder flowable() {
return FlowableComponentBuilderFactory.flowable();
}
/**
* FOP (camel-fop)
* Render messages into PDF and other output formats supported by Apache
* FOP.
*
* Category: file,transformation
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-fop
*
* @return the dsl builder
*/
static FopComponentBuilderFactory.FopComponentBuilder fop() {
return FopComponentBuilderFactory.fop();
}
/**
* Freemarker (camel-freemarker)
* Transform messages using FreeMarker templates.
*
* Category: transformation
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-freemarker
*
* @return the dsl builder
*/
static FreemarkerComponentBuilderFactory.FreemarkerComponentBuilder freemarker() {
return FreemarkerComponentBuilderFactory.freemarker();
}
/**
* FTP (camel-ftp)
* Upload and download files to/from FTP servers.
*
* Category: file
* Since: 1.1
* Maven coordinates: org.apache.camel:camel-ftp
*
* @return the dsl builder
*/
static FtpComponentBuilderFactory.FtpComponentBuilder ftp() {
return FtpComponentBuilderFactory.ftp();
}
/**
* FTPS (camel-ftp)
* Upload and download files to/from FTP servers supporting the FTPS
* protocol.
*
* Category: file
* Since: 2.2
* Maven coordinates: org.apache.camel:camel-ftp
*
* @return the dsl builder
*/
static FtpsComponentBuilderFactory.FtpsComponentBuilder ftps() {
return FtpsComponentBuilderFactory.ftps();
}
/**
* Geocoder (camel-geocoder)
* Find geocodes (latitude and longitude) for a given address or the other
* way round.
*
* Category: api,search
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-geocoder
*
* @return the dsl builder
*/
static GeocoderComponentBuilderFactory.GeocoderComponentBuilder geocoder() {
return GeocoderComponentBuilderFactory.geocoder();
}
/**
* Git (camel-git)
* Perform operations on git repositories.
*
* Category: file
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-git
*
* @return the dsl builder
*/
static GitComponentBuilderFactory.GitComponentBuilder git() {
return GitComponentBuilderFactory.git();
}
/**
* GitHub (camel-github)
* Interact with the GitHub API.
*
* Category: file,cloud,api
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-github
*
* @return the dsl builder
*/
static GithubComponentBuilderFactory.GithubComponentBuilder github() {
return GithubComponentBuilderFactory.github();
}
/**
* Google BigQuery (camel-google-bigquery)
* Google BigQuery data warehouse for analytics.
*
* Category: cloud,bigdata
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-google-bigquery
*
* @return the dsl builder
*/
static GoogleBigqueryComponentBuilderFactory.GoogleBigqueryComponentBuilder googleBigquery() {
return GoogleBigqueryComponentBuilderFactory.googleBigquery();
}
/**
* Google BigQuery Standard SQL (camel-google-bigquery)
* Access Google Cloud BigQuery service using SQL queries.
*
* Category: cloud,messaging
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-google-bigquery
*
* @return the dsl builder
*/
static GoogleBigquerySqlComponentBuilderFactory.GoogleBigquerySqlComponentBuilder googleBigquerySql() {
return GoogleBigquerySqlComponentBuilderFactory.googleBigquerySql();
}
/**
* Google Calendar (camel-google-calendar)
* Perform various operations on a Google Calendar.
*
* Category: api,cloud
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-google-calendar
*
* @return the dsl builder
*/
static GoogleCalendarComponentBuilderFactory.GoogleCalendarComponentBuilder googleCalendar() {
return GoogleCalendarComponentBuilderFactory.googleCalendar();
}
/**
* Google Calendar Stream (camel-google-calendar)
* Poll for changes in a Google Calendar.
*
* Category: cloud
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-google-calendar
*
* @return the dsl builder
*/
static GoogleCalendarStreamComponentBuilderFactory.GoogleCalendarStreamComponentBuilder googleCalendarStream() {
return GoogleCalendarStreamComponentBuilderFactory.googleCalendarStream();
}
/**
* Google Drive (camel-google-drive)
* Manage files in Google Drive.
*
* Category: file,cloud,api
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-google-drive
*
* @return the dsl builder
*/
static GoogleDriveComponentBuilderFactory.GoogleDriveComponentBuilder googleDrive() {
return GoogleDriveComponentBuilderFactory.googleDrive();
}
/**
* Google Cloud Functions (camel-google-functions)
* Manage and invoke Google Cloud Functions
*
* Category: cloud
* Since: 3.9
* Maven coordinates: org.apache.camel:camel-google-functions
*
* @return the dsl builder
*/
static GoogleFunctionsComponentBuilderFactory.GoogleFunctionsComponentBuilder googleFunctions() {
return GoogleFunctionsComponentBuilderFactory.googleFunctions();
}
/**
* Google Mail (camel-google-mail)
* Manage messages in Google Mail.
*
* Category: cloud,api,mail
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-google-mail
*
* @return the dsl builder
*/
static GoogleMailComponentBuilderFactory.GoogleMailComponentBuilder googleMail() {
return GoogleMailComponentBuilderFactory.googleMail();
}
/**
* Google Mail Stream (camel-google-mail)
* Poll for incoming messages in Google Mail.
*
* Category: cloud,mail
* Since: 2.22
* Maven coordinates: org.apache.camel:camel-google-mail
*
* @return the dsl builder
*/
static GoogleMailStreamComponentBuilderFactory.GoogleMailStreamComponentBuilder googleMailStream() {
return GoogleMailStreamComponentBuilderFactory.googleMailStream();
}
/**
* Google Pubsub (camel-google-pubsub)
* Send and receive messages to/from Google Cloud Platform PubSub Service.
*
* Category: cloud,messaging
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-google-pubsub
*
* @return the dsl builder
*/
static GooglePubsubComponentBuilderFactory.GooglePubsubComponentBuilder googlePubsub() {
return GooglePubsubComponentBuilderFactory.googlePubsub();
}
/**
* Google PubSub Lite (camel-google-pubsub-lite)
* Send and receive messages to/from Google Cloud Platform PubSub Lite
* Service.
*
* Category: cloud,messaging
* Since: 4.6
* Maven coordinates: org.apache.camel:camel-google-pubsub-lite
*
* @return the dsl builder
*/
static GooglePubsubLiteComponentBuilderFactory.GooglePubsubLiteComponentBuilder googlePubsubLite() {
return GooglePubsubLiteComponentBuilderFactory.googlePubsubLite();
}
/**
* Google Secret Manager (camel-google-secret-manager)
* Manage Google Secret Manager Secrets
*
* Category: cloud
* Since: 3.16
* Maven coordinates: org.apache.camel:camel-google-secret-manager
*
* @return the dsl builder
*/
static GoogleSecretManagerComponentBuilderFactory.GoogleSecretManagerComponentBuilder googleSecretManager() {
return GoogleSecretManagerComponentBuilderFactory.googleSecretManager();
}
/**
* Google Sheets (camel-google-sheets)
* Manage spreadsheets in Google Sheets.
*
* Category: cloud,document
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-google-sheets
*
* @return the dsl builder
*/
static GoogleSheetsComponentBuilderFactory.GoogleSheetsComponentBuilder googleSheets() {
return GoogleSheetsComponentBuilderFactory.googleSheets();
}
/**
* Google Sheets Stream (camel-google-sheets)
* Poll for changes in Google Sheets.
*
* Category: cloud,document
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-google-sheets
*
* @return the dsl builder
*/
static GoogleSheetsStreamComponentBuilderFactory.GoogleSheetsStreamComponentBuilder googleSheetsStream() {
return GoogleSheetsStreamComponentBuilderFactory.googleSheetsStream();
}
/**
* Google Storage (camel-google-storage)
* Store and retrieve objects from Google Cloud Storage Service using the
* google-cloud-storage library.
*
* Category: cloud
* Since: 3.9
* Maven coordinates: org.apache.camel:camel-google-storage
*
* @return the dsl builder
*/
static GoogleStorageComponentBuilderFactory.GoogleStorageComponentBuilder googleStorage() {
return GoogleStorageComponentBuilderFactory.googleStorage();
}
/**
* Grape (camel-grape)
* Fetch, load and manage additional jars dynamically after Camel Context
* was started.
*
* Category: management
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-grape
*
* @return the dsl builder
*/
static GrapeComponentBuilderFactory.GrapeComponentBuilder grape() {
return GrapeComponentBuilderFactory.grape();
}
/**
* GraphQL (camel-graphql)
* Send GraphQL queries and mutations to external systems.
*
* Category: api
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-graphql
*
* @return the dsl builder
*/
static GraphqlComponentBuilderFactory.GraphqlComponentBuilder graphql() {
return GraphqlComponentBuilderFactory.graphql();
}
/**
* gRPC (camel-grpc)
* Expose gRPC endpoints and access external gRPC endpoints.
*
* Category: rpc
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-grpc
*
* @return the dsl builder
*/
static GrpcComponentBuilderFactory.GrpcComponentBuilder grpc() {
return GrpcComponentBuilderFactory.grpc();
}
/**
* Guava EventBus (camel-guava-eventbus)
* Send and receive messages to/from Guava EventBus.
*
* Category: messaging
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-guava-eventbus
*
* @return the dsl builder
*/
static GuavaEventbusComponentBuilderFactory.GuavaEventbusComponentBuilder guavaEventbus() {
return GuavaEventbusComponentBuilderFactory.guavaEventbus();
}
/**
* Hashicorp Vault (camel-hashicorp-vault)
* Manage secrets in Hashicorp Vault Service
*
* Category: cloud,cloud
* Since: 3.18
* Maven coordinates: org.apache.camel:camel-hashicorp-vault
*
* @return the dsl builder
*/
static HashicorpVaultComponentBuilderFactory.HashicorpVaultComponentBuilder hashicorpVault() {
return HashicorpVaultComponentBuilderFactory.hashicorpVault();
}
/**
* Hazelcast Atomic Number (camel-hazelcast)
* Increment, decrement, set, etc. Hazelcast atomic number (a grid wide
* number).
*
* Category: cache,clustering
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* @return the dsl builder
*/
static HazelcastAtomicvalueComponentBuilderFactory.HazelcastAtomicvalueComponentBuilder hazelcastAtomicvalue() {
return HazelcastAtomicvalueComponentBuilderFactory.hazelcastAtomicvalue();
}
/**
* Hazelcast Instance (camel-hazelcast)
* Consume join/leave events of a cache instance in a Hazelcast cluster.
*
* Category: cache,clustering
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* @return the dsl builder
*/
static HazelcastInstanceComponentBuilderFactory.HazelcastInstanceComponentBuilder hazelcastInstance() {
return HazelcastInstanceComponentBuilderFactory.hazelcastInstance();
}
/**
* Hazelcast List (camel-hazelcast)
* Perform operations on Hazelcast distributed list.
*
* Category: cache,clustering
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* @return the dsl builder
*/
static HazelcastListComponentBuilderFactory.HazelcastListComponentBuilder hazelcastList() {
return HazelcastListComponentBuilderFactory.hazelcastList();
}
/**
* Hazelcast Map (camel-hazelcast)
* Perform operations on Hazelcast distributed map.
*
* Category: cache,clustering
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* @return the dsl builder
*/
static HazelcastMapComponentBuilderFactory.HazelcastMapComponentBuilder hazelcastMap() {
return HazelcastMapComponentBuilderFactory.hazelcastMap();
}
/**
* Hazelcast Multimap (camel-hazelcast)
* Perform operations on Hazelcast distributed multimap.
*
* Category: cache,clustering
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* @return the dsl builder
*/
static HazelcastMultimapComponentBuilderFactory.HazelcastMultimapComponentBuilder hazelcastMultimap() {
return HazelcastMultimapComponentBuilderFactory.hazelcastMultimap();
}
/**
* Hazelcast Queue (camel-hazelcast)
* Perform operations on Hazelcast distributed queue.
*
* Category: cache,clustering,messaging
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* @return the dsl builder
*/
static HazelcastQueueComponentBuilderFactory.HazelcastQueueComponentBuilder hazelcastQueue() {
return HazelcastQueueComponentBuilderFactory.hazelcastQueue();
}
/**
* Hazelcast Replicated Map (camel-hazelcast)
* Perform operations on Hazelcast replicated map.
*
* Category: cache,clustering
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* @return the dsl builder
*/
static HazelcastReplicatedmapComponentBuilderFactory.HazelcastReplicatedmapComponentBuilder hazelcastReplicatedmap() {
return HazelcastReplicatedmapComponentBuilderFactory.hazelcastReplicatedmap();
}
/**
* Hazelcast Ringbuffer (camel-hazelcast)
* Perform operations on Hazelcast distributed ringbuffer.
*
* Category: cache,clustering
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* @return the dsl builder
*/
static HazelcastRingbufferComponentBuilderFactory.HazelcastRingbufferComponentBuilder hazelcastRingbuffer() {
return HazelcastRingbufferComponentBuilderFactory.hazelcastRingbuffer();
}
/**
* Hazelcast SEDA (camel-hazelcast)
* Asynchronously send/receive Exchanges between Camel routes running on
* potentially distinct JVMs/hosts backed by Hazelcast BlockingQueue.
*
* Category: cache,clustering
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* @return the dsl builder
*/
static HazelcastSedaComponentBuilderFactory.HazelcastSedaComponentBuilder hazelcastSeda() {
return HazelcastSedaComponentBuilderFactory.hazelcastSeda();
}
/**
* Hazelcast Set (camel-hazelcast)
* Perform operations on Hazelcast distributed set.
*
* Category: cache,clustering
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* @return the dsl builder
*/
static HazelcastSetComponentBuilderFactory.HazelcastSetComponentBuilder hazelcastSet() {
return HazelcastSetComponentBuilderFactory.hazelcastSet();
}
/**
* Hazelcast Topic (camel-hazelcast)
* Send and receive messages to/from Hazelcast distributed topic.
*
* Category: cache,clustering
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* @return the dsl builder
*/
static HazelcastTopicComponentBuilderFactory.HazelcastTopicComponentBuilder hazelcastTopic() {
return HazelcastTopicComponentBuilderFactory.hazelcastTopic();
}
/**
* HTTP (camel-http)
* Send requests to external HTTP servers using Apache HTTP Client 5.x.
*
* Category: http
* Since: 2.3
* Maven coordinates: org.apache.camel:camel-http
*
* @return the dsl builder
*/
static HttpComponentBuilderFactory.HttpComponentBuilder http() {
return HttpComponentBuilderFactory.http();
}
/**
* HTTPS (Secure) (camel-http)
* Send requests to external HTTP servers using Apache HTTP Client 5.x.
*
* Category: http
* Since: 2.3
* Maven coordinates: org.apache.camel:camel-http
*
* @return the dsl builder
*/
static HttpsComponentBuilderFactory.HttpsComponentBuilder https() {
return HttpsComponentBuilderFactory.https();
}
/**
* Huawei Distributed Message Service (DMS) (camel-huaweicloud-dms)
* To integrate with a fully managed, high-performance message queuing
* service on Huawei Cloud
*
* Category: cloud
* Since: 3.12
* Maven coordinates: org.apache.camel:camel-huaweicloud-dms
*
* @return the dsl builder
*/
static HwcloudDmsComponentBuilderFactory.HwcloudDmsComponentBuilder hwcloudDms() {
return HwcloudDmsComponentBuilderFactory.hwcloudDms();
}
/**
* Huawei Cloud Face Recognition Service (FRS) (camel-huaweicloud-frs)
* Face Recognition Service (FRS) is an intelligent service that uses
* computers to process, analyze, and understand facial images based on
* human facial features.
*
* Category: cloud,messaging
* Since: 3.15
* Maven coordinates: org.apache.camel:camel-huaweicloud-frs
*
* @return the dsl builder
*/
static HwcloudFrsComponentBuilderFactory.HwcloudFrsComponentBuilder hwcloudFrs() {
return HwcloudFrsComponentBuilderFactory.hwcloudFrs();
}
/**
* Huawei FunctionGraph (camel-huaweicloud-functiongraph)
* To call serverless functions on Huawei Cloud
*
* Category: cloud,serverless
* Since: 3.11
* Maven coordinates: org.apache.camel:camel-huaweicloud-functiongraph
*
* @return the dsl builder
*/
static HwcloudFunctiongraphComponentBuilderFactory.HwcloudFunctiongraphComponentBuilder hwcloudFunctiongraph() {
return HwcloudFunctiongraphComponentBuilderFactory.hwcloudFunctiongraph();
}
/**
* Huawei Identity and Access Management (IAM) (camel-huaweicloud-iam)
* To securely manage users on Huawei Cloud
*
* Category: cloud
* Since: 3.11
* Maven coordinates: org.apache.camel:camel-huaweicloud-iam
*
* @return the dsl builder
*/
static HwcloudIamComponentBuilderFactory.HwcloudIamComponentBuilder hwcloudIam() {
return HwcloudIamComponentBuilderFactory.hwcloudIam();
}
/**
* Huawei Cloud Image Recognition (camel-huaweicloud-imagerecognition)
* To identify objects, scenes, and concepts in images on Huawei Cloud
*
* Category: cloud,messaging
* Since: 3.12
* Maven coordinates: org.apache.camel:camel-huaweicloud-imagerecognition
*
* @return the dsl builder
*/
static HwcloudImagerecognitionComponentBuilderFactory.HwcloudImagerecognitionComponentBuilder hwcloudImagerecognition() {
return HwcloudImagerecognitionComponentBuilderFactory.hwcloudImagerecognition();
}
/**
* Huawei Object Storage Service (OBS) (camel-huaweicloud-obs)
* To provide stable, secure, efficient, and easy-to-use cloud storage
* service on Huawei Cloud
*
* Category: cloud
* Since: 3.12
* Maven coordinates: org.apache.camel:camel-huaweicloud-obs
*
* @return the dsl builder
*/
static HwcloudObsComponentBuilderFactory.HwcloudObsComponentBuilder hwcloudObs() {
return HwcloudObsComponentBuilderFactory.hwcloudObs();
}
/**
* Huawei Simple Message Notification (SMN) (camel-huaweicloud-smn)
* To broadcast messages and connect cloud services through notifications on
* Huawei Cloud
*
* Category: cloud,messaging
* Since: 3.8
* Maven coordinates: org.apache.camel:camel-huaweicloud-smn
*
* @return the dsl builder
*/
static HwcloudSmnComponentBuilderFactory.HwcloudSmnComponentBuilder hwcloudSmn() {
return HwcloudSmnComponentBuilderFactory.hwcloudSmn();
}
/**
* IBM Cloud Object Storage (camel-ibm-cos)
* Store and retrieve objects from IBM Cloud Object Storage.
*
* Category: cloud,file
* Since: 4.16
* Maven coordinates: org.apache.camel:camel-ibm-cos
*
* @return the dsl builder
*/
static IbmCosComponentBuilderFactory.IbmCosComponentBuilder ibmCos() {
return IbmCosComponentBuilderFactory.ibmCos();
}
/**
* IBM Secrets Manager (camel-ibm-secrets-manager)
* Manage secrets in IBM Secrets Manager Service
*
* Category: cloud,cloud
* Since: 4.11
* Maven coordinates: org.apache.camel:camel-ibm-secrets-manager
*
* @return the dsl builder
*/
static IbmSecretsManagerComponentBuilderFactory.IbmSecretsManagerComponentBuilder ibmSecretsManager() {
return IbmSecretsManagerComponentBuilderFactory.ibmSecretsManager();
}
/**
* IBM Watson Discovery (camel-ibm-watson-discovery)
* Perform document understanding and search using IBM Watson Discovery
*
* Category: ai,cloud
* Since: 4.16
* Maven coordinates: org.apache.camel:camel-ibm-watson-discovery
*
* @return the dsl builder
*/
static IbmWatsonDiscoveryComponentBuilderFactory.IbmWatsonDiscoveryComponentBuilder ibmWatsonDiscovery() {
return IbmWatsonDiscoveryComponentBuilderFactory.ibmWatsonDiscovery();
}
/**
* IBM Watson Language (camel-ibm-watson-language)
* Perform natural language processing using IBM Watson Natural Language
* Understanding
*
* Category: ai,cloud
* Since: 4.16
* Maven coordinates: org.apache.camel:camel-ibm-watson-language
*
* @return the dsl builder
*/
static IbmWatsonLanguageComponentBuilderFactory.IbmWatsonLanguageComponentBuilder ibmWatsonLanguage() {
return IbmWatsonLanguageComponentBuilderFactory.ibmWatsonLanguage();
}
/**
* IBM Watson Speech to Text (camel-ibm-watson-speech-to-text)
* Convert speech audio to text using IBM Watson Speech to Text
*
* Category: ai,cloud
* Since: 4.17
* Maven coordinates: org.apache.camel:camel-ibm-watson-speech-to-text
*
* @return the dsl builder
*/
static IbmWatsonSpeechToTextComponentBuilderFactory.IbmWatsonSpeechToTextComponentBuilder ibmWatsonSpeechToText() {
return IbmWatsonSpeechToTextComponentBuilderFactory.ibmWatsonSpeechToText();
}
/**
* IBM Watson Text to Speech (camel-ibm-watson-text-to-speech)
* Convert text to natural-sounding speech using IBM Watson Text to Speech
*
* Category: ai,cloud
* Since: 4.17
* Maven coordinates: org.apache.camel:camel-ibm-watson-text-to-speech
*
* @return the dsl builder
*/
static IbmWatsonTextToSpeechComponentBuilderFactory.IbmWatsonTextToSpeechComponentBuilder ibmWatsonTextToSpeech() {
return IbmWatsonTextToSpeechComponentBuilderFactory.ibmWatsonTextToSpeech();
}
/**
* IEC 60870 Client (camel-iec60870)
* IEC 60870 supervisory control and data acquisition (SCADA) client using
* NeoSCADA implementation.
*
* Category: iot
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-iec60870
*
* @return the dsl builder
*/
static Iec60870ClientComponentBuilderFactory.Iec60870ClientComponentBuilder iec60870Client() {
return Iec60870ClientComponentBuilderFactory.iec60870Client();
}
/**
* IEC 60870 Server (camel-iec60870)
* IEC 60870 supervisory control and data acquisition (SCADA) server using
* NeoSCADA implementation.
*
* Category: iot
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-iec60870
*
* @return the dsl builder
*/
static Iec60870ServerComponentBuilderFactory.Iec60870ServerComponentBuilder iec60870Server() {
return Iec60870ServerComponentBuilderFactory.iec60870Server();
}
/**
* Ignite Cache (camel-ignite)
* Perform cache operations on an Ignite cache or consume changes from a
* continuous query.
*
* Category: cache,clustering
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ignite
*
* @return the dsl builder
*/
static IgniteCacheComponentBuilderFactory.IgniteCacheComponentBuilder igniteCache() {
return IgniteCacheComponentBuilderFactory.igniteCache();
}
/**
* Ignite Compute (camel-ignite)
* Run compute operations on an Ignite cluster.
*
* Category: cache,clustering
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ignite
*
* @return the dsl builder
*/
static IgniteComputeComponentBuilderFactory.IgniteComputeComponentBuilder igniteCompute() {
return IgniteComputeComponentBuilderFactory.igniteCompute();
}
/**
* Ignite Events (camel-ignite)
* Receive events from an Ignite cluster by creating a local event listener.
*
* Category: messaging,cache,clustering
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ignite
*
* @return the dsl builder
*/
static IgniteEventsComponentBuilderFactory.IgniteEventsComponentBuilder igniteEvents() {
return IgniteEventsComponentBuilderFactory.igniteEvents();
}
/**
* Ignite ID Generator (camel-ignite)
* Interact with Ignite Atomic Sequences and ID Generators .
*
* Category: cache,clustering
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ignite
*
* @return the dsl builder
*/
static IgniteIdgenComponentBuilderFactory.IgniteIdgenComponentBuilder igniteIdgen() {
return IgniteIdgenComponentBuilderFactory.igniteIdgen();
}
/**
* Ignite Messaging (camel-ignite)
* Send and receive messages from an Ignite topic.
*
* Category: messaging
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ignite
*
* @return the dsl builder
*/
static IgniteMessagingComponentBuilderFactory.IgniteMessagingComponentBuilder igniteMessaging() {
return IgniteMessagingComponentBuilderFactory.igniteMessaging();
}
/**
* Ignite Queues (camel-ignite)
* Interact with Ignite Queue data structures.
*
* Category: cache,clustering,messaging
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ignite
*
* @return the dsl builder
*/
static IgniteQueueComponentBuilderFactory.IgniteQueueComponentBuilder igniteQueue() {
return IgniteQueueComponentBuilderFactory.igniteQueue();
}
/**
* Ignite Sets (camel-ignite)
* Interact with Ignite Set data structures.
*
* Category: cache,clustering
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ignite
*
* @return the dsl builder
*/
static IgniteSetComponentBuilderFactory.IgniteSetComponentBuilder igniteSet() {
return IgniteSetComponentBuilderFactory.igniteSet();
}
/**
* IMAP (camel-mail)
* Send and receive emails using imap, pop3 and smtp protocols.
*
* Category: mail
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-mail
*
* @return the dsl builder
*/
static ImapComponentBuilderFactory.ImapComponentBuilder imap() {
return ImapComponentBuilderFactory.imap();
}
/**
* IMAPS (Secure) (camel-mail)
* Send and receive emails using imap, pop3 and smtp protocols.
*
* Category: mail
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-mail
*
* @return the dsl builder
*/
static ImapsComponentBuilderFactory.ImapsComponentBuilder imaps() {
return ImapsComponentBuilderFactory.imaps();
}
/**
* Infinispan (camel-infinispan)
* Read and write from/to Infinispan distributed key/value store and data
* grid.
*
* Category: cache,clustering
* Since: 2.13
* Maven coordinates: org.apache.camel:camel-infinispan
*
* @return the dsl builder
*/
static InfinispanComponentBuilderFactory.InfinispanComponentBuilder infinispan() {
return InfinispanComponentBuilderFactory.infinispan();
}
/**
* Infinispan Embedded (camel-infinispan-embedded)
* Read and write from/to Infinispan distributed key/value store and data
* grid.
*
* Category: cache,clustering
* Since: 2.13
* Maven coordinates: org.apache.camel:camel-infinispan-embedded
*
* @return the dsl builder
*/
static InfinispanEmbeddedComponentBuilderFactory.InfinispanEmbeddedComponentBuilder infinispanEmbedded() {
return InfinispanEmbeddedComponentBuilderFactory.infinispanEmbedded();
}
/**
* InfluxDB (camel-influxdb)
* Interact with InfluxDB v1, a time series database.
*
* Category: database
* Since: 2.18
* Maven coordinates: org.apache.camel:camel-influxdb
*
* @return the dsl builder
*/
static InfluxdbComponentBuilderFactory.InfluxdbComponentBuilder influxdb() {
return InfluxdbComponentBuilderFactory.influxdb();
}
/**
* InfluxDB2 (camel-influxdb2)
* Interact with InfluxDB v2, a time series database.
*
* Category: database
* Since: 3.20
* Maven coordinates: org.apache.camel:camel-influxdb2
*
* @return the dsl builder
*/
static Influxdb2ComponentBuilderFactory.Influxdb2ComponentBuilder influxdb2() {
return Influxdb2ComponentBuilderFactory.influxdb2();
}
/**
* IRC (camel-irc)
* Send and receive messages to/from and IRC chat.
*
* Category: chat
* Since: 1.1
* Maven coordinates: org.apache.camel:camel-irc
*
* @return the dsl builder
*/
static IrcComponentBuilderFactory.IrcComponentBuilder irc() {
return IrcComponentBuilderFactory.irc();
}
/**
* IronMQ (camel-ironmq)
* Send and receive messages to/from IronMQ an elastic and durable hosted
* message queue as a service.
*
* Category: cloud,messaging
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ironmq
*
* @return the dsl builder
*/
static IronmqComponentBuilderFactory.IronmqComponentBuilder ironmq() {
return IronmqComponentBuilderFactory.ironmq();
}
/**
* JCache (camel-jcache)
* Perform caching operations against JSR107/JCache.
*
* Category: cache,clustering
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-jcache
*
* @return the dsl builder
*/
static JcacheComponentBuilderFactory.JcacheComponentBuilder jcache() {
return JcacheComponentBuilderFactory.jcache();
}
/**
* JCR (camel-jcr)
* Read and write nodes to/from a JCR compliant content repository.
*
* Category: database,cms
* Since: 1.3
* Maven coordinates: org.apache.camel:camel-jcr
*
* @return the dsl builder
*/
static JcrComponentBuilderFactory.JcrComponentBuilder jcr() {
return JcrComponentBuilderFactory.jcr();
}
/**
* JDBC (camel-jdbc)
* Access databases through SQL and JDBC.
*
* Category: database
* Since: 1.2
* Maven coordinates: org.apache.camel:camel-jdbc
*
* @return the dsl builder
*/
static JdbcComponentBuilderFactory.JdbcComponentBuilder jdbc() {
return JdbcComponentBuilderFactory.jdbc();
}
/**
* Jetty (camel-jetty)
* Expose HTTP endpoints using Jetty 12.
*
* Category: http
* Since: 1.2
* Maven coordinates: org.apache.camel:camel-jetty
*
* @return the dsl builder
*/
static JettyComponentBuilderFactory.JettyComponentBuilder jetty() {
return JettyComponentBuilderFactory.jetty();
}
/**
* JGroups (camel-jgroups)
* Exchange messages with JGroups clusters.
*
* Category: clustering,messaging
* Since: 2.13
* Maven coordinates: org.apache.camel:camel-jgroups
*
* @return the dsl builder
*/
static JgroupsComponentBuilderFactory.JgroupsComponentBuilder jgroups() {
return JgroupsComponentBuilderFactory.jgroups();
}
/**
* JGroups raft (camel-jgroups-raft)
* Exchange messages with JGroups-raft clusters.
*
* Category: clustering,messaging
* Since: 2.24
* Maven coordinates: org.apache.camel:camel-jgroups-raft
*
* @return the dsl builder
*/
static JgroupsRaftComponentBuilderFactory.JgroupsRaftComponentBuilder jgroupsRaft() {
return JgroupsRaftComponentBuilderFactory.jgroupsRaft();
}
/**
* Jira (camel-jira)
* Interact with JIRA issue tracker.
*
* Category: document
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-jira
*
* @return the dsl builder
*/
static JiraComponentBuilderFactory.JiraComponentBuilder jira() {
return JiraComponentBuilderFactory.jira();
}
/**
* JMS (camel-jms)
* Send and receive messages to/from JMS message brokers.
*
* Category: messaging
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-jms
*
* @return the dsl builder
*/
static JmsComponentBuilderFactory.JmsComponentBuilder jms() {
return JmsComponentBuilderFactory.jms();
}
/**
* JMX (camel-jmx)
* Receive JMX notifications.
*
* Category: monitoring
* Since: 2.6
* Maven coordinates: org.apache.camel:camel-jmx
*
* @return the dsl builder
*/
static JmxComponentBuilderFactory.JmxComponentBuilder jmx() {
return JmxComponentBuilderFactory.jmx();
}
/**
* JOLT (camel-jolt)
* JSON to JSON transformation using JOLT.
*
* Category: transformation
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-jolt
*
* @return the dsl builder
*/
static JoltComponentBuilderFactory.JoltComponentBuilder jolt() {
return JoltComponentBuilderFactory.jolt();
}
/**
* JOOQ (camel-jooq)
* Store and retrieve Java objects from an SQL database using JOOQ.
*
* Category: database
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-jooq
*
* @return the dsl builder
*/
static JooqComponentBuilderFactory.JooqComponentBuilder jooq() {
return JooqComponentBuilderFactory.jooq();
}
/**
* JPA (camel-jpa)
* Store and retrieve Java objects from databases using Java Persistence API
* (JPA).
*
* Category: database
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-jpa
*
* @return the dsl builder
*/
static JpaComponentBuilderFactory.JpaComponentBuilder jpa() {
return JpaComponentBuilderFactory.jpa();
}
/**
* JSLT (camel-jslt)
* Query or transform JSON payloads using JSLT.
*
* Category: transformation
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-jslt
*
* @return the dsl builder
*/
static JsltComponentBuilderFactory.JsltComponentBuilder jslt() {
return JsltComponentBuilderFactory.jslt();
}
/**
* JsonPatch (camel-json-patch)
* Transforms JSON using JSON patch (RFC 6902).
*
* Category: transformation
* Since: 3.12
* Maven coordinates: org.apache.camel:camel-json-patch
*
* @return the dsl builder
*/
static JsonPatchComponentBuilderFactory.JsonPatchComponentBuilder jsonPatch() {
return JsonPatchComponentBuilderFactory.jsonPatch();
}
/**
* JSON Schema Validator (camel-json-validator)
* Validate JSON payloads using NetworkNT JSON Schema.
*
* Category: validation
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-json-validator
*
* @return the dsl builder
*/
static JsonValidatorComponentBuilderFactory.JsonValidatorComponentBuilder jsonValidator() {
return JsonValidatorComponentBuilderFactory.jsonValidator();
}
/**
* JSONata (camel-jsonata)
* Transforms JSON payload using JSONata transformation.
*
* Category: transformation
* Since: 3.5
* Maven coordinates: org.apache.camel:camel-jsonata
*
* @return the dsl builder
*/
static JsonataComponentBuilderFactory.JsonataComponentBuilder jsonata() {
return JsonataComponentBuilderFactory.jsonata();
}
/**
* JT400 (camel-jt400)
* Exchanges messages with an IBM i system using data queues, message
* queues, or program call. IBM i is the replacement for AS/400 and iSeries
* servers.
*
* Category: messaging
* Since: 1.5
* Maven coordinates: org.apache.camel:camel-jt400
*
* @return the dsl builder
*/
static Jt400ComponentBuilderFactory.Jt400ComponentBuilder jt400() {
return Jt400ComponentBuilderFactory.jt400();
}
/**
* JTE (camel-jte)
* Transform messages using a Java based template engine (JTE).
*
* Category: transformation
* Since: 4.4
* Maven coordinates: org.apache.camel:camel-jte
*
* @return the dsl builder
*/
static JteComponentBuilderFactory.JteComponentBuilder jte() {
return JteComponentBuilderFactory.jte();
}
/**
* Kafka (camel-kafka)
* Send and receive messages to/from an Apache Kafka broker.
*
* Category: messaging
* Since: 2.13
* Maven coordinates: org.apache.camel:camel-kafka
*
* @return the dsl builder
*/
static KafkaComponentBuilderFactory.KafkaComponentBuilder kafka() {
return KafkaComponentBuilderFactory.kafka();
}
/**
* Kamelet (camel-kamelet)
* To call Kamelets
*
* Category: core
* Since: 3.8
* Maven coordinates: org.apache.camel:camel-kamelet
*
* @return the dsl builder
*/
static KameletComponentBuilderFactory.KameletComponentBuilder kamelet() {
return KameletComponentBuilderFactory.kamelet();
}
/**
* Keycloak (camel-keycloak)
* Manage Keycloak instances via Admin API.
*
* Category: security,management
* Since: 4.15
* Maven coordinates: org.apache.camel:camel-keycloak
*
* @return the dsl builder
*/
static KeycloakComponentBuilderFactory.KeycloakComponentBuilder keycloak() {
return KeycloakComponentBuilderFactory.keycloak();
}
/**
* Knative (camel-knative)
* Send and receive events from Knative.
*
* Category: cloud
* Since: 3.15
* Maven coordinates: org.apache.camel:camel-knative
*
* @return the dsl builder
*/
static KnativeComponentBuilderFactory.KnativeComponentBuilder knative() {
return KnativeComponentBuilderFactory.knative();
}
/**
* KServe (camel-kserve)
* Provide access to AI model servers with the KServe standard to run
* inference with remote models
*
* Category: ai
* Since: 4.10
* Maven coordinates: org.apache.camel:camel-kserve
*
* @return the dsl builder
*/
static KserveComponentBuilderFactory.KserveComponentBuilder kserve() {
return KserveComponentBuilderFactory.kserve();
}
/**
* Kubernetes ConfigMap (camel-kubernetes)
* Perform operations on Kubernetes ConfigMaps and get notified on
* ConfigMaps changes.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesConfigMapsComponentBuilderFactory.KubernetesConfigMapsComponentBuilder kubernetesConfigMaps() {
return KubernetesConfigMapsComponentBuilderFactory.kubernetesConfigMaps();
}
/**
* Kubernetes Cronjob (camel-kubernetes)
* Perform operations on Kubernetes CronJob.
*
* Category: container,cloud
* Since: 4.3
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesCronjobComponentBuilderFactory.KubernetesCronjobComponentBuilder kubernetesCronjob() {
return KubernetesCronjobComponentBuilderFactory.kubernetesCronjob();
}
/**
* Kubernetes Custom Resources (camel-kubernetes)
* Perform operations on Kubernetes Custom Resources and get notified on
* Deployment changes.
*
* Category: container,cloud
* Since: 3.7
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesCustomResourcesComponentBuilderFactory.KubernetesCustomResourcesComponentBuilder kubernetesCustomResources() {
return KubernetesCustomResourcesComponentBuilderFactory.kubernetesCustomResources();
}
/**
* Kubernetes Deployments (camel-kubernetes)
* Perform operations on Kubernetes Deployments and get notified on
* Deployment changes.
*
* Category: container,cloud
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesDeploymentsComponentBuilderFactory.KubernetesDeploymentsComponentBuilder kubernetesDeployments() {
return KubernetesDeploymentsComponentBuilderFactory.kubernetesDeployments();
}
/**
* Kubernetes Event (camel-kubernetes)
* Perform operations on Kubernetes Events and get notified on Events
* changes.
*
* Category: container,cloud
* Since: 3.20
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesEventsComponentBuilderFactory.KubernetesEventsComponentBuilder kubernetesEvents() {
return KubernetesEventsComponentBuilderFactory.kubernetesEvents();
}
/**
* Kubernetes HPA (camel-kubernetes)
* Perform operations on Kubernetes Horizontal Pod Autoscalers (HPA) and get
* notified on HPA changes.
*
* Category: container,cloud
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesHpaComponentBuilderFactory.KubernetesHpaComponentBuilder kubernetesHpa() {
return KubernetesHpaComponentBuilderFactory.kubernetesHpa();
}
/**
* Kubernetes Job (camel-kubernetes)
* Perform operations on Kubernetes Jobs.
*
* Category: container,cloud
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesJobComponentBuilderFactory.KubernetesJobComponentBuilder kubernetesJob() {
return KubernetesJobComponentBuilderFactory.kubernetesJob();
}
/**
* Kubernetes Namespaces (camel-kubernetes)
* Perform operations on Kubernetes Namespaces and get notified on Namespace
* changes.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesNamespacesComponentBuilderFactory.KubernetesNamespacesComponentBuilder kubernetesNamespaces() {
return KubernetesNamespacesComponentBuilderFactory.kubernetesNamespaces();
}
/**
* Kubernetes Nodes (camel-kubernetes)
* Perform operations on Kubernetes Nodes and get notified on Node changes.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesNodesComponentBuilderFactory.KubernetesNodesComponentBuilder kubernetesNodes() {
return KubernetesNodesComponentBuilderFactory.kubernetesNodes();
}
/**
* Kubernetes Persistent Volume (camel-kubernetes)
* Perform operations on Kubernetes Persistent Volumes and get notified on
* Persistent Volume changes.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesPersistentVolumesComponentBuilderFactory.KubernetesPersistentVolumesComponentBuilder kubernetesPersistentVolumes() {
return KubernetesPersistentVolumesComponentBuilderFactory.kubernetesPersistentVolumes();
}
/**
* Kubernetes Persistent Volume Claim (camel-kubernetes)
* Perform operations on Kubernetes Persistent Volumes Claims and get
* notified on Persistent Volumes Claim changes.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesPersistentVolumesClaimsComponentBuilderFactory.KubernetesPersistentVolumesClaimsComponentBuilder kubernetesPersistentVolumesClaims() {
return KubernetesPersistentVolumesClaimsComponentBuilderFactory.kubernetesPersistentVolumesClaims();
}
/**
* Kubernetes Pods (camel-kubernetes)
* Perform operations on Kubernetes Pods and get notified on Pod changes.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesPodsComponentBuilderFactory.KubernetesPodsComponentBuilder kubernetesPods() {
return KubernetesPodsComponentBuilderFactory.kubernetesPods();
}
/**
* Kubernetes Replication Controller (camel-kubernetes)
* Perform operations on Kubernetes Replication Controllers and get notified
* on Replication Controllers changes.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesReplicationControllersComponentBuilderFactory.KubernetesReplicationControllersComponentBuilder kubernetesReplicationControllers() {
return KubernetesReplicationControllersComponentBuilderFactory.kubernetesReplicationControllers();
}
/**
* Kubernetes Resources Quota (camel-kubernetes)
* Perform operations on Kubernetes Resources Quotas.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesResourcesQuotaComponentBuilderFactory.KubernetesResourcesQuotaComponentBuilder kubernetesResourcesQuota() {
return KubernetesResourcesQuotaComponentBuilderFactory.kubernetesResourcesQuota();
}
/**
* Kubernetes Secrets (camel-kubernetes)
* Perform operations on Kubernetes Secrets.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesSecretsComponentBuilderFactory.KubernetesSecretsComponentBuilder kubernetesSecrets() {
return KubernetesSecretsComponentBuilderFactory.kubernetesSecrets();
}
/**
* Kubernetes Service Account (camel-kubernetes)
* Perform operations on Kubernetes Service Accounts.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesServiceAccountsComponentBuilderFactory.KubernetesServiceAccountsComponentBuilder kubernetesServiceAccounts() {
return KubernetesServiceAccountsComponentBuilderFactory.kubernetesServiceAccounts();
}
/**
* Kubernetes Services (camel-kubernetes)
* Perform operations on Kubernetes Services and get notified on Service
* changes.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesServicesComponentBuilderFactory.KubernetesServicesComponentBuilder kubernetesServices() {
return KubernetesServicesComponentBuilderFactory.kubernetesServices();
}
/**
* Kudu (camel-kudu)
* Interact with Apache Kudu, a free and open source column-oriented data
* store of the Apache Hadoop ecosystem.
*
* Category: database,iot,cloud
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-kudu
*
* @return the dsl builder
*/
static KuduComponentBuilderFactory.KuduComponentBuilder kudu() {
return KuduComponentBuilderFactory.kudu();
}
/**
* LangChain4j Agent (camel-langchain4j-agent)
* LangChain4j Agent component
*
* Category: ai
* Since: 4.14
* Maven coordinates: org.apache.camel:camel-langchain4j-agent
*
* @return the dsl builder
*/
static Langchain4jAgentComponentBuilderFactory.Langchain4jAgentComponentBuilder langchain4jAgent() {
return Langchain4jAgentComponentBuilderFactory.langchain4jAgent();
}
/**
* LangChain4j Chat (camel-langchain4j-chat)
* LangChain4j Chat component
*
* Category: ai
* Since: 4.5
* Maven coordinates: org.apache.camel:camel-langchain4j-chat
*
* @return the dsl builder
*/
static Langchain4jChatComponentBuilderFactory.Langchain4jChatComponentBuilder langchain4jChat() {
return Langchain4jChatComponentBuilderFactory.langchain4jChat();
}
/**
* LangChain4j Embeddings (camel-langchain4j-embeddings)
* LangChain4j Embeddings
*
* Category: ai
* Since: 4.5
* Maven coordinates: org.apache.camel:camel-langchain4j-embeddings
*
* @return the dsl builder
*/
static Langchain4jEmbeddingsComponentBuilderFactory.Langchain4jEmbeddingsComponentBuilder langchain4jEmbeddings() {
return Langchain4jEmbeddingsComponentBuilderFactory.langchain4jEmbeddings();
}
/**
* LangChain4j Embedding Store (camel-langchain4j-embeddingstore)
* Perform operations on the Langchain4jEmbeddingStores.
*
* Category: database,ai
* Since: 4.14
* Maven coordinates: org.apache.camel:camel-langchain4j-embeddingstore
*
* @return the dsl builder
*/
static Langchain4jEmbeddingstoreComponentBuilderFactory.Langchain4jEmbeddingstoreComponentBuilder langchain4jEmbeddingstore() {
return Langchain4jEmbeddingstoreComponentBuilderFactory.langchain4jEmbeddingstore();
}
/**
* LangChain4j Tools (camel-langchain4j-tools)
* LangChain4j Tools and Function Calling Features
*
* Category: ai
* Since: 4.8
* Maven coordinates: org.apache.camel:camel-langchain4j-tools
*
* @return the dsl builder
*/
static Langchain4jToolsComponentBuilderFactory.Langchain4jToolsComponentBuilder langchain4jTools() {
return Langchain4jToolsComponentBuilderFactory.langchain4jTools();
}
/**
* LangChain4j Web Search (camel-langchain4j-web-search)
* LangChain4j Web Search Engine
*
* Category: ai
* Since: 4.8
* Maven coordinates: org.apache.camel:camel-langchain4j-web-search
*
* @return the dsl builder
*/
static Langchain4jWebSearchComponentBuilderFactory.Langchain4jWebSearchComponentBuilder langchain4jWebSearch() {
return Langchain4jWebSearchComponentBuilderFactory.langchain4jWebSearch();
}
/**
* Language (camel-language)
* Execute scripts in any of the languages supported by Camel.
*
* Category: core,script
* Since: 2.5
* Maven coordinates: org.apache.camel:camel-language
*
* @return the dsl builder
*/
static LanguageComponentBuilderFactory.LanguageComponentBuilder language() {
return LanguageComponentBuilderFactory.language();
}
/**
* LDAP (camel-ldap)
* Perform searches on LDAP servers.
*
* Category: database,security
* Since: 1.5
* Maven coordinates: org.apache.camel:camel-ldap
*
* @return the dsl builder
*/
static LdapComponentBuilderFactory.LdapComponentBuilder ldap() {
return LdapComponentBuilderFactory.ldap();
}
/**
* LDIF (camel-ldif)
* Perform updates on an LDAP server from an LDIF body content.
*
* Category: database,security
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-ldif
*
* @return the dsl builder
*/
static LdifComponentBuilderFactory.LdifComponentBuilder ldif() {
return LdifComponentBuilderFactory.ldif();
}
/**
* Log Data (camel-log)
* Prints data form the routed message (such as body and headers) to the
* logger.
*
* Category: core,monitoring
* Since: 1.1
* Maven coordinates: org.apache.camel:camel-log
*
* @return the dsl builder
*/
static LogComponentBuilderFactory.LogComponentBuilder log() {
return LogComponentBuilderFactory.log();
}
/**
* Printer (camel-printer)
* Send print jobs to printers.
*
* Category: document
* Since: 2.1
* Maven coordinates: org.apache.camel:camel-printer
*
* @return the dsl builder
*/
static LprComponentBuilderFactory.LprComponentBuilder lpr() {
return LprComponentBuilderFactory.lpr();
}
/**
* Lucene (camel-lucene)
* Perform inserts or queries against Apache Lucene databases.
*
* Category: database,search
* Since: 2.2
* Maven coordinates: org.apache.camel:camel-lucene
*
* @return the dsl builder
*/
static LuceneComponentBuilderFactory.LuceneComponentBuilder lucene() {
return LuceneComponentBuilderFactory.lucene();
}
/**
* Lumberjack (camel-lumberjack)
* Receive logs messages using the Lumberjack protocol.
*
* Category: monitoring
* Since: 2.18
* Maven coordinates: org.apache.camel:camel-lumberjack
*
* @return the dsl builder
*/
static LumberjackComponentBuilderFactory.LumberjackComponentBuilder lumberjack() {
return LumberjackComponentBuilderFactory.lumberjack();
}
/**
* MapStruct (camel-mapstruct)
* Type Conversion using Mapstruct
*
* Category: transformation
* Since: 3.19
* Maven coordinates: org.apache.camel:camel-mapstruct
*
* @return the dsl builder
*/
static MapstructComponentBuilderFactory.MapstructComponentBuilder mapstruct() {
return MapstructComponentBuilderFactory.mapstruct();
}
/**
* Master (camel-master)
* Have only a single consumer in a cluster consuming from a given endpoint;
* with automatic failover if the JVM dies.
*
* Category: clustering
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-master
*
* @return the dsl builder
*/
static MasterComponentBuilderFactory.MasterComponentBuilder master() {
return MasterComponentBuilderFactory.master();
}
/**
* Metrics (camel-metrics)
* Collect various metrics directly from Camel routes using the DropWizard
* metrics library.
*
* Category: monitoring
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-metrics
*
* @return the dsl builder
*/
static MetricsComponentBuilderFactory.MetricsComponentBuilder metrics() {
return MetricsComponentBuilderFactory.metrics();
}
/**
* Micrometer (camel-micrometer)
* Collect various metrics directly from Camel routes using the Micrometer
* library.
*
* Category: monitoring
* Since: 2.22
* Maven coordinates: org.apache.camel:camel-micrometer
*
* @return the dsl builder
*/
static MicrometerComponentBuilderFactory.MicrometerComponentBuilder micrometer() {
return MicrometerComponentBuilderFactory.micrometer();
}
/**
* OPC UA Browser (camel-milo)
* Connect to OPC UA servers using the binary protocol for browsing the node
* tree.
*
* Category: iot
* Since: 3.15
* Maven coordinates: org.apache.camel:camel-milo
*
* @return the dsl builder
*/
static MiloBrowseComponentBuilderFactory.MiloBrowseComponentBuilder miloBrowse() {
return MiloBrowseComponentBuilderFactory.miloBrowse();
}
/**
* OPC UA Client (camel-milo)
* Connect to OPC UA servers using the binary protocol for acquiring
* telemetry data.
*
* Category: iot
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-milo
*
* @return the dsl builder
*/
static MiloClientComponentBuilderFactory.MiloClientComponentBuilder miloClient() {
return MiloClientComponentBuilderFactory.miloClient();
}
/**
* OPC UA Server (camel-milo)
* Make telemetry data available as an OPC UA server.
*
* Category: iot
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-milo
*
* @return the dsl builder
*/
static MiloServerComponentBuilderFactory.MiloServerComponentBuilder miloServer() {
return MiloServerComponentBuilderFactory.miloServer();
}
/**
* Milvus (camel-milvus)
* Perform operations on the Milvus Vector Database.
*
* Category: database,ai
* Since: 4.5
* Maven coordinates: org.apache.camel:camel-milvus
*
* @return the dsl builder
*/
static MilvusComponentBuilderFactory.MilvusComponentBuilder milvus() {
return MilvusComponentBuilderFactory.milvus();
}
/**
* Mina (camel-mina)
* Socket level networking using TCP or UDP with Apache Mina 2.x.
*
* Category: networking
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-mina
*
* @return the dsl builder
*/
static MinaComponentBuilderFactory.MinaComponentBuilder mina() {
return MinaComponentBuilderFactory.mina();
}
/**
* Minio (camel-minio)
* Store and retrieve objects from Minio Storage Service using Minio SDK.
*
* Category: cloud,file
* Since: 3.5
* Maven coordinates: org.apache.camel:camel-minio
*
* @return the dsl builder
*/
static MinioComponentBuilderFactory.MinioComponentBuilder minio() {
return MinioComponentBuilderFactory.minio();
}
/**
* MLLP (camel-mllp)
* Communicate with external systems using the MLLP protocol.
*
* Category: health
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-mllp
*
* @return the dsl builder
*/
static MllpComponentBuilderFactory.MllpComponentBuilder mllp() {
return MllpComponentBuilderFactory.mllp();
}
/**
* Mock (camel-mock)
* Test routes and mediation rules using mocks.
*
* Category: core,testing
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-mock
*
* @return the dsl builder
*/
static MockComponentBuilderFactory.MockComponentBuilder mock() {
return MockComponentBuilderFactory.mock();
}
/**
* MongoDB (camel-mongodb)
* Perform operations on MongoDB documents and collections.
*
* Category: database
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-mongodb
*
* @return the dsl builder
*/
static MongodbComponentBuilderFactory.MongodbComponentBuilder mongodb() {
return MongodbComponentBuilderFactory.mongodb();
}
/**
* MongoDB GridFS (camel-mongodb-gridfs)
* Interact with MongoDB GridFS.
*
* Category: database,file
* Since: 2.18
* Maven coordinates: org.apache.camel:camel-mongodb-gridfs
*
* @return the dsl builder
*/
static MongodbGridfsComponentBuilderFactory.MongodbGridfsComponentBuilder mongodbGridfs() {
return MongodbGridfsComponentBuilderFactory.mongodbGridfs();
}
/**
* Mustache (camel-mustache)
* Transform messages using a Mustache template.
*
* Category: transformation
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-mustache
*
* @return the dsl builder
*/
static MustacheComponentBuilderFactory.MustacheComponentBuilder mustache() {
return MustacheComponentBuilderFactory.mustache();
}
/**
* MVEL (camel-mvel)
* Transform messages using an MVEL template.
*
* Category: transformation,script
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-mvel
*
* @return the dsl builder
*/
static MvelComponentBuilderFactory.MvelComponentBuilder mvel() {
return MvelComponentBuilderFactory.mvel();
}
/**
* MyBatis (camel-mybatis)
* Performs a query, poll, insert, update or delete in a relational database
* using MyBatis.
*
* Category: database
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-mybatis
*
* @return the dsl builder
*/
static MybatisComponentBuilderFactory.MybatisComponentBuilder mybatis() {
return MybatisComponentBuilderFactory.mybatis();
}
/**
* MyBatis Bean (camel-mybatis)
* Perform queries, inserts, updates or deletes in a relational database
* using MyBatis.
*
* Category: database
* Since: 2.22
* Maven coordinates: org.apache.camel:camel-mybatis
*
* @return the dsl builder
*/
static MybatisBeanComponentBuilderFactory.MybatisBeanComponentBuilder mybatisBean() {
return MybatisBeanComponentBuilderFactory.mybatisBean();
}
/**
* Nats (camel-nats)
* Send and receive messages from NATS messaging system.
*
* Category: messaging
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-nats
*
* @return the dsl builder
*/
static NatsComponentBuilderFactory.NatsComponentBuilder nats() {
return NatsComponentBuilderFactory.nats();
}
/**
* Neo4j (camel-neo4j)
* Perform operations on the Neo4j Graph Database
*
* Category: database,ai
* Since: 4.10
* Maven coordinates: org.apache.camel:camel-neo4j
*
* @return the dsl builder
*/
static Neo4jComponentBuilderFactory.Neo4jComponentBuilder neo4j() {
return Neo4jComponentBuilderFactory.neo4j();
}
/**
* Netty (camel-netty)
* Socket level networking using TCP or UDP with Netty 4.x.
*
* Category: networking
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-netty
*
* @return the dsl builder
*/
static NettyComponentBuilderFactory.NettyComponentBuilder netty() {
return NettyComponentBuilderFactory.netty();
}
/**
* Netty HTTP (camel-netty-http)
* Netty HTTP server and client using the Netty 4.x.
*
* Category: networking,http
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-netty-http
*
* @return the dsl builder
*/
static NettyHttpComponentBuilderFactory.NettyHttpComponentBuilder nettyHttp() {
return NettyHttpComponentBuilderFactory.nettyHttp();
}
/**
* Nitrite (camel-nitrite)
* Access Nitrite databases.
*
* Category: database
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-nitrite
*
* @return the dsl builder
*/
static NitriteComponentBuilderFactory.NitriteComponentBuilder nitrite() {
return NitriteComponentBuilderFactory.nitrite();
}
/**
* OAI-PMH (camel-oaipmh)
* Harvest metadata using OAI-PMH protocol
*
* Category: search
* Since: 3.5
* Maven coordinates: org.apache.camel:camel-oaipmh
*
* @return the dsl builder
*/
static OaipmhComponentBuilderFactory.OaipmhComponentBuilder oaipmh() {
return OaipmhComponentBuilderFactory.oaipmh();
}
/**
* Olingo2 (camel-olingo2)
* Communicate with OData 2.0 services using Apache Olingo.
*
* Category: cloud
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-olingo2
*
* @return the dsl builder
*/
static Olingo2ComponentBuilderFactory.Olingo2ComponentBuilder olingo2() {
return Olingo2ComponentBuilderFactory.olingo2();
}
/**
* Olingo4 (camel-olingo4)
* Communicate with OData 4.0 services using Apache Olingo OData API.
*
* Category: cloud
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-olingo4
*
* @return the dsl builder
*/
static Olingo4ComponentBuilderFactory.Olingo4ComponentBuilder olingo4() {
return Olingo4ComponentBuilderFactory.olingo4();
}
/**
* OpenSearch (camel-opensearch)
* Send requests to OpenSearch via Java Client API.
*
* Category: search,monitoring
* Since: 4.0
* Maven coordinates: org.apache.camel:camel-opensearch
*
* @return the dsl builder
*/
static OpensearchComponentBuilderFactory.OpensearchComponentBuilder opensearch() {
return OpensearchComponentBuilderFactory.opensearch();
}
/**
* OpenShift Build Config (camel-kubernetes)
* Perform operations on OpenShift Build Configs.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static OpenshiftBuildConfigsComponentBuilderFactory.OpenshiftBuildConfigsComponentBuilder openshiftBuildConfigs() {
return OpenshiftBuildConfigsComponentBuilderFactory.openshiftBuildConfigs();
}
/**
* OpenShift Builds (camel-kubernetes)
* Perform operations on OpenShift Builds.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static OpenshiftBuildsComponentBuilderFactory.OpenshiftBuildsComponentBuilder openshiftBuilds() {
return OpenshiftBuildsComponentBuilderFactory.openshiftBuilds();
}
/**
* OpenShift Deployment Configs (camel-kubernetes)
* Perform operations on OpenShift Deployment Configs and get notified on
* Deployment Config changes.
*
* Category: container,cloud
* Since: 3.18
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static OpenshiftDeploymentconfigsComponentBuilderFactory.OpenshiftDeploymentconfigsComponentBuilder openshiftDeploymentconfigs() {
return OpenshiftDeploymentconfigsComponentBuilderFactory.openshiftDeploymentconfigs();
}
/**
* OpenStack Cinder (camel-openstack)
* Access data in OpenStack Cinder block storage.
*
* Category: container
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-openstack
*
* @return the dsl builder
*/
static OpenstackCinderComponentBuilderFactory.OpenstackCinderComponentBuilder openstackCinder() {
return OpenstackCinderComponentBuilderFactory.openstackCinder();
}
/**
* OpenStack Glance (camel-openstack)
* Manage VM images and metadata definitions in OpenStack Glance.
*
* Category: container
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-openstack
*
* @return the dsl builder
*/
static OpenstackGlanceComponentBuilderFactory.OpenstackGlanceComponentBuilder openstackGlance() {
return OpenstackGlanceComponentBuilderFactory.openstackGlance();
}
/**
* OpenStack Keystone (camel-openstack)
* Access OpenStack Keystone for API client authentication, service
* discovery and distributed multi-tenant authorization.
*
* Category: container
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-openstack
*
* @return the dsl builder
*/
static OpenstackKeystoneComponentBuilderFactory.OpenstackKeystoneComponentBuilder openstackKeystone() {
return OpenstackKeystoneComponentBuilderFactory.openstackKeystone();
}
/**
* OpenStack Neutron (camel-openstack)
* Access OpenStack Neutron for network services.
*
* Category: container
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-openstack
*
* @return the dsl builder
*/
static OpenstackNeutronComponentBuilderFactory.OpenstackNeutronComponentBuilder openstackNeutron() {
return OpenstackNeutronComponentBuilderFactory.openstackNeutron();
}
/**
* OpenStack Nova (camel-openstack)
* Access OpenStack to manage compute resources.
*
* Category: container
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-openstack
*
* @return the dsl builder
*/
static OpenstackNovaComponentBuilderFactory.OpenstackNovaComponentBuilder openstackNova() {
return OpenstackNovaComponentBuilderFactory.openstackNova();
}
/**
* OpenStack Swift (camel-openstack)
* Access OpenStack Swift object/blob store.
*
* Category: container
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-openstack
*
* @return the dsl builder
*/
static OpenstackSwiftComponentBuilderFactory.OpenstackSwiftComponentBuilder openstackSwift() {
return OpenstackSwiftComponentBuilderFactory.openstackSwift();
}
/**
* OpenTelemetry Metrics (camel-opentelemetry-metrics)
* Camel metrics based on the Camel Telemetry spec
*
* Category: monitoring
* Since: 4.17
* Maven coordinates: org.apache.camel:camel-opentelemetry-metrics
*
* @return the dsl builder
*/
static OpentelemetryMetricsComponentBuilderFactory.OpentelemetryMetricsComponentBuilder opentelemetryMetrics() {
return OpentelemetryMetricsComponentBuilderFactory.opentelemetryMetrics();
}
/**
* OptaPlanner (camel-optaplanner)
* Solve planning problems with OptaPlanner.
*
* Category: workflow
* Since: 2.13
* Maven coordinates: org.apache.camel:camel-optaplanner
*
* @return the dsl builder
*/
static OptaplannerComponentBuilderFactory.OptaplannerComponentBuilder optaplanner() {
return OptaplannerComponentBuilderFactory.optaplanner();
}
/**
* Paho (camel-paho)
* Communicate with MQTT message brokers using Eclipse Paho MQTT Client.
*
* Category: messaging,iot
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-paho
*
* @return the dsl builder
*/
static PahoComponentBuilderFactory.PahoComponentBuilder paho() {
return PahoComponentBuilderFactory.paho();
}
/**
* Paho MQTT 5 (camel-paho-mqtt5)
* Communicate with MQTT message brokers using Eclipse Paho MQTT v5 Client.
*
* Category: messaging,iot
* Since: 3.8
* Maven coordinates: org.apache.camel:camel-paho-mqtt5
*
* @return the dsl builder
*/
static PahoMqtt5ComponentBuilderFactory.PahoMqtt5ComponentBuilder pahoMqtt5() {
return PahoMqtt5ComponentBuilderFactory.pahoMqtt5();
}
/**
* PDF (camel-pdf)
* Create, modify or extract content from PDF documents.
*
* Category: document,transformation
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-pdf
*
* @return the dsl builder
*/
static PdfComponentBuilderFactory.PdfComponentBuilder pdf() {
return PdfComponentBuilderFactory.pdf();
}
/**
* PostgresSQL Replication Slot (camel-pg-replication-slot)
* Poll for PostgreSQL Write-Ahead Log (WAL) records using Streaming
* Replication Slots.
*
* Category: database
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-pg-replication-slot
*
* @return the dsl builder
*/
static PgReplicationSlotComponentBuilderFactory.PgReplicationSlotComponentBuilder pgReplicationSlot() {
return PgReplicationSlotComponentBuilderFactory.pgReplicationSlot();
}
/**
* PostgresSQL Event (camel-pgevent)
* Send and receive PostgreSQL events via LISTEN and NOTIFY commands.
*
* Category: database
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-pgevent
*
* @return the dsl builder
*/
static PgeventComponentBuilderFactory.PgeventComponentBuilder pgevent() {
return PgeventComponentBuilderFactory.pgevent();
}
/**
* Pinecone (camel-pinecone)
* Perform operations on the Pinecone Vector Database.
*
* Category: database,ai
* Since: 4.6
* Maven coordinates: org.apache.camel:camel-pinecone
*
* @return the dsl builder
*/
static PineconeComponentBuilderFactory.PineconeComponentBuilder pinecone() {
return PineconeComponentBuilderFactory.pinecone();
}
/**
* Platform HTTP (camel-platform-http)
* Expose HTTP endpoints using the HTTP server available in the current
* platform.
*
* Category: http
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-platform-http
*
* @return the dsl builder
*/
static PlatformHttpComponentBuilderFactory.PlatformHttpComponentBuilder platformHttp() {
return PlatformHttpComponentBuilderFactory.platformHttp();
}
/**
* PLC4X (camel-plc4x)
* Read and write to PLC devices
*
* Category: iot
* Since: 3.20
* Maven coordinates: org.apache.camel:camel-plc4x
*
* @return the dsl builder
*/
static Plc4xComponentBuilderFactory.Plc4xComponentBuilder plc4x() {
return Plc4xComponentBuilderFactory.plc4x();
}
/**
* POP3 (camel-mail)
* Send and receive emails using imap, pop3 and smtp protocols.
*
* Category: mail
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-mail
*
* @return the dsl builder
*/
static Pop3ComponentBuilderFactory.Pop3ComponentBuilder pop3() {
return Pop3ComponentBuilderFactory.pop3();
}
/**
* POP3S (camel-mail)
* Send and receive emails using imap, pop3 and smtp protocols.
*
* Category: mail
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-mail
*
* @return the dsl builder
*/
static Pop3sComponentBuilderFactory.Pop3sComponentBuilder pop3s() {
return Pop3sComponentBuilderFactory.pop3s();
}
/**
* PQC Algorithms (camel-pqc)
* Post Quantum Cryptography Signature and Verification component.
*
* Category: security
* Since: 4.12
* Maven coordinates: org.apache.camel:camel-pqc
*
* @return the dsl builder
*/
static PqcComponentBuilderFactory.PqcComponentBuilder pqc() {
return PqcComponentBuilderFactory.pqc();
}
/**
* PubNub (camel-pubnub)
* Send and receive messages to/from PubNub data stream network for
* connected devices.
*
* Category: cloud,iot,messaging
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-pubnub
*
* @return the dsl builder
*/
static PubnubComponentBuilderFactory.PubnubComponentBuilder pubnub() {
return PubnubComponentBuilderFactory.pubnub();
}
/**
* Pulsar (camel-pulsar)
* Send and receive messages from/to Apache Pulsar messaging system.
*
* Category: messaging
* Since: 2.24
* Maven coordinates: org.apache.camel:camel-pulsar
*
* @return the dsl builder
*/
static PulsarComponentBuilderFactory.PulsarComponentBuilder pulsar() {
return PulsarComponentBuilderFactory.pulsar();
}
/**
* Qdrant (camel-qdrant)
* Perform operations on the Qdrant Vector Database.
*
* Category: database,ai
* Since: 4.5
* Maven coordinates: org.apache.camel:camel-qdrant
*
* @return the dsl builder
*/
static QdrantComponentBuilderFactory.QdrantComponentBuilder qdrant() {
return QdrantComponentBuilderFactory.qdrant();
}
/**
* Quartz (camel-quartz)
* Schedule sending of messages using the Quartz 2.x scheduler.
*
* Category: scheduling
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-quartz
*
* @return the dsl builder
*/
static QuartzComponentBuilderFactory.QuartzComponentBuilder quartz() {
return QuartzComponentBuilderFactory.quartz();
}
/**
* QuickFix (camel-quickfix)
* Open a Financial Interchange (FIX) session using an embedded QuickFix/J
* engine.
*
* Category: messaging
* Since: 2.1
* Maven coordinates: org.apache.camel:camel-quickfix
*
* @return the dsl builder
*/
static QuickfixComponentBuilderFactory.QuickfixComponentBuilder quickfix() {
return QuickfixComponentBuilderFactory.quickfix();
}
/**
* Reactive Streams (camel-reactive-streams)
* Exchange messages with reactive stream processing libraries compatible
* with the reactive streams standard.
*
* Category: messaging
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-reactive-streams
*
* @return the dsl builder
*/
static ReactiveStreamsComponentBuilderFactory.ReactiveStreamsComponentBuilder reactiveStreams() {
return ReactiveStreamsComponentBuilderFactory.reactiveStreams();
}
/**
* Ref (camel-ref)
* Route messages to an endpoint looked up dynamically by name in the Camel
* Registry.
*
* Category: core
* Since: 1.2
* Maven coordinates: org.apache.camel:camel-ref
*
* @return the dsl builder
*/
static RefComponentBuilderFactory.RefComponentBuilder ref() {
return RefComponentBuilderFactory.ref();
}
/**
* REST (camel-rest)
* Expose REST services or call external REST services.
*
* Category: core,rest
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-rest
*
* @return the dsl builder
*/
static RestEndpointComponentBuilderFactory.RestEndpointComponentBuilder restEndpoint() {
return RestEndpointComponentBuilderFactory.restEndpoint();
}
/**
* REST API (camel-rest)
* Expose OpenAPI Specification of the REST services defined using Camel
* REST DSL.
*
* Category: core,rest
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-rest
*
* @return the dsl builder
*/
static RestApiComponentBuilderFactory.RestApiComponentBuilder restApi() {
return RestApiComponentBuilderFactory.restApi();
}
/**
* REST OpenApi (camel-rest-openapi)
* To call REST services using OpenAPI specification as contract.
*
* Category: rest,api
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-rest-openapi
*
* @return the dsl builder
*/
static RestOpenapiComponentBuilderFactory.RestOpenapiComponentBuilder restOpenapi() {
return RestOpenapiComponentBuilderFactory.restOpenapi();
}
/**
* Robot Framework (camel-robotframework)
* Pass camel exchanges to acceptance test written in Robot DSL.
*
* Category: testing
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-robotframework
*
* @return the dsl builder
*/
static RobotframeworkComponentBuilderFactory.RobotframeworkComponentBuilder robotframework() {
return RobotframeworkComponentBuilderFactory.robotframework();
}
/**
* RocketMQ (camel-rocketmq)
* Send and receive messages from RocketMQ cluster.
*
* Category: messaging
* Since: 3.20
* Maven coordinates: org.apache.camel:camel-rocketmq
*
* @return the dsl builder
*/
static RocketmqComponentBuilderFactory.RocketmqComponentBuilder rocketmq() {
return RocketmqComponentBuilderFactory.rocketmq();
}
/**
* RSS (camel-rss)
* Poll RSS feeds.
*
* Category: document
* Since: 2.0
* Maven coordinates: org.apache.camel:camel-rss
*
* @return the dsl builder
*/
static RssComponentBuilderFactory.RssComponentBuilder rss() {
return RssComponentBuilderFactory.rss();
}
/**
* Saga (camel-saga)
* Execute custom actions within a route using the Saga EIP.
*
* Category: clustering
* Since: 2.21
* Maven coordinates: org.apache.camel:camel-saga
*
* @return the dsl builder
*/
static SagaComponentBuilderFactory.SagaComponentBuilder saga() {
return SagaComponentBuilderFactory.saga();
}
/**
* Salesforce (camel-salesforce)
* Communicate with Salesforce using Java DTOs.
*
* Category: cloud,saas
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-salesforce
*
* @return the dsl builder
*/
static SalesforceComponentBuilderFactory.SalesforceComponentBuilder salesforce() {
return SalesforceComponentBuilderFactory.salesforce();
}
/**
* SAP NetWeaver (camel-sap-netweaver)
* Send requests to SAP NetWeaver Gateway using HTTP.
*
* Category: saas
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-sap-netweaver
*
* @return the dsl builder
*/
static SapNetweaverComponentBuilderFactory.SapNetweaverComponentBuilder sapNetweaver() {
return SapNetweaverComponentBuilderFactory.sapNetweaver();
}
/**
* Scheduler (camel-scheduler)
* Generate messages in specified intervals using
* java.util.concurrent.ScheduledExecutorService.
*
* Category: core,scheduling
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-scheduler
*
* @return the dsl builder
*/
static SchedulerComponentBuilderFactory.SchedulerComponentBuilder scheduler() {
return SchedulerComponentBuilderFactory.scheduler();
}
/**
* Schematron (camel-schematron)
* Validate XML payload using the Schematron Library.
*
* Category: validation
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-schematron
*
* @return the dsl builder
*/
static SchematronComponentBuilderFactory.SchematronComponentBuilder schematron() {
return SchematronComponentBuilderFactory.schematron();
}
/**
* SCP (camel-jsch)
* Copy files to/from remote hosts using the secure copy protocol (SCP).
*
* Category: file
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-jsch
*
* @return the dsl builder
*/
static ScpComponentBuilderFactory.ScpComponentBuilder scp() {
return ScpComponentBuilderFactory.scp();
}
/**
* SEDA (camel-seda)
* Asynchronously call another endpoint from any Camel Context in the same
* JVM.
*
* Category: core,messaging
* Since: 1.1
* Maven coordinates: org.apache.camel:camel-seda
*
* @return the dsl builder
*/
static SedaComponentBuilderFactory.SedaComponentBuilder seda() {
return SedaComponentBuilderFactory.seda();
}
/**
* Service (camel-service)
* Register a Camel endpoint to a Service Registry (such as Consul, Etcd)
* and delegate to it.
*
* Category: cloud
* Since: 2.22
* Maven coordinates: org.apache.camel:camel-service
*
* @return the dsl builder
*/
static ServiceComponentBuilderFactory.ServiceComponentBuilder service() {
return ServiceComponentBuilderFactory.service();
}
/**
* ServiceNow (camel-servicenow)
* Interact with ServiceNow via its REST API.
*
* Category: api,cloud,management
* Since: 2.18
* Maven coordinates: org.apache.camel:camel-servicenow
*
* @return the dsl builder
*/
static ServicenowComponentBuilderFactory.ServicenowComponentBuilder servicenow() {
return ServicenowComponentBuilderFactory.servicenow();
}
/**
* Servlet (camel-servlet)
* Serve HTTP requests by a Servlet.
*
* Category: http
* Since: 2.0
* Maven coordinates: org.apache.camel:camel-servlet
*
* @return the dsl builder
*/
static ServletComponentBuilderFactory.ServletComponentBuilder servlet() {
return ServletComponentBuilderFactory.servlet();
}
/**
* SFTP (camel-ftp)
* Upload and download files to/from SFTP servers.
*
* Category: file
* Since: 1.1
* Maven coordinates: org.apache.camel:camel-ftp
*
* @return the dsl builder
*/
static SftpComponentBuilderFactory.SftpComponentBuilder sftp() {
return SftpComponentBuilderFactory.sftp();
}
/**
* Simple JMS (camel-sjms)
* Send and receive messages to/from a JMS Queue or Topic using plain JMS
* 1.x API.
*
* Category: messaging
* Since: 2.11
* Maven coordinates: org.apache.camel:camel-sjms
*
* @return the dsl builder
*/
static SjmsComponentBuilderFactory.SjmsComponentBuilder sjms() {
return SjmsComponentBuilderFactory.sjms();
}
/**
* Simple JMS2 (camel-sjms2)
* Send and receive messages to/from a JMS Queue or Topic using plain JMS
* 2.x API.
*
* Category: messaging
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-sjms2
*
* @return the dsl builder
*/
static Sjms2ComponentBuilderFactory.Sjms2ComponentBuilder sjms2() {
return Sjms2ComponentBuilderFactory.sjms2();
}
/**
* Slack (camel-slack)
* Send and receive messages to/from Slack.
*
* Category: social
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-slack
*
* @return the dsl builder
*/
static SlackComponentBuilderFactory.SlackComponentBuilder slack() {
return SlackComponentBuilderFactory.slack();
}
/**
* SMB (camel-smb)
* Read and write files to Server Message Block (SMB) file shares.
*
* Category: file
* Since: 4.3
* Maven coordinates: org.apache.camel:camel-smb
*
* @return the dsl builder
*/
static SmbComponentBuilderFactory.SmbComponentBuilder smb() {
return SmbComponentBuilderFactory.smb();
}
/**
* Smooks (camel-smooks)
* Use Smooks to transform, route, and bind both XML and non-XML data,
* including EDI, CSV, JSON, and YAML.
*
* Category: transformation
* Since: 4.7
* Maven coordinates: org.apache.camel:camel-smooks
*
* @return the dsl builder
*/
static SmooksComponentBuilderFactory.SmooksComponentBuilder smooks() {
return SmooksComponentBuilderFactory.smooks();
}
/**
* SMPP (camel-smpp)
* Send and receive SMS messages using a SMSC (Short Message Service
* Center).
*
* Category: mobile
* Since: 2.2
* Maven coordinates: org.apache.camel:camel-smpp
*
* @return the dsl builder
*/
static SmppComponentBuilderFactory.SmppComponentBuilder smpp() {
return SmppComponentBuilderFactory.smpp();
}
/**
* SMPP (Secure) (camel-smpp)
* Send and receive SMS messages using a SMSC (Short Message Service
* Center).
*
* Category: mobile
* Since: 2.2
* Maven coordinates: org.apache.camel:camel-smpp
*
* @return the dsl builder
*/
static SmppsComponentBuilderFactory.SmppsComponentBuilder smpps() {
return SmppsComponentBuilderFactory.smpps();
}
/**
* SMTP (camel-mail)
* Send and receive emails using imap, pop3 and smtp protocols.
*
* Category: mail
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-mail
*
* @return the dsl builder
*/
static SmtpComponentBuilderFactory.SmtpComponentBuilder smtp() {
return SmtpComponentBuilderFactory.smtp();
}
/**
* SMTPS (camel-mail)
* Send and receive emails using imap, pop3 and smtp protocols.
*
* Category: mail
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-mail
*
* @return the dsl builder
*/
static SmtpsComponentBuilderFactory.SmtpsComponentBuilder smtps() {
return SmtpsComponentBuilderFactory.smtps();
}
/**
* SNMP (camel-snmp)
* Receive traps and poll SNMP (Simple Network Management Protocol) capable
* devices.
*
* Category: monitoring
* Since: 2.1
* Maven coordinates: org.apache.camel:camel-snmp
*
* @return the dsl builder
*/
static SnmpComponentBuilderFactory.SnmpComponentBuilder snmp() {
return SnmpComponentBuilderFactory.snmp();
}
/**
* Solr (camel-solr)
* Perform operations against Apache Lucene Solr.
*
* Category: search,monitoring
* Since: 4.8
* Maven coordinates: org.apache.camel:camel-solr
*
* @return the dsl builder
*/
static SolrComponentBuilderFactory.SolrComponentBuilder solr() {
return SolrComponentBuilderFactory.solr();
}
/**
* Splunk (camel-splunk)
* Publish or search for events in Splunk.
*
* Category: iot,monitoring
* Since: 2.13
* Maven coordinates: org.apache.camel:camel-splunk
*
* @return the dsl builder
*/
static SplunkComponentBuilderFactory.SplunkComponentBuilder splunk() {
return SplunkComponentBuilderFactory.splunk();
}
/**
* Splunk HEC (camel-splunk-hec)
* The splunk component allows publishing events in Splunk using the HTTP
* Event Collector.
*
* Category: monitoring
* Since: 3.3
* Maven coordinates: org.apache.camel:camel-splunk-hec
*
* @return the dsl builder
*/
static SplunkHecComponentBuilderFactory.SplunkHecComponentBuilder splunkHec() {
return SplunkHecComponentBuilderFactory.splunkHec();
}
/**
* Spring AI Chat (camel-spring-ai-chat)
* Perform chat operations using Spring AI.
*
* Category: ai
* Since: 4.17
* Maven coordinates: org.apache.camel:camel-spring-ai-chat
*
* @return the dsl builder
*/
static SpringAiChatComponentBuilderFactory.SpringAiChatComponentBuilder springAiChat() {
return SpringAiChatComponentBuilderFactory.springAiChat();
}
/**
* Spring AI Embeddings (camel-spring-ai-embeddings)
* Spring AI Embeddings
*
* Category: ai
* Since: 4.17
* Maven coordinates: org.apache.camel:camel-spring-ai-embeddings
*
* @return the dsl builder
*/
static SpringAiEmbeddingsComponentBuilderFactory.SpringAiEmbeddingsComponentBuilder springAiEmbeddings() {
return SpringAiEmbeddingsComponentBuilderFactory.springAiEmbeddings();
}
/**
* Spring AI Tools (camel-spring-ai-tools)
* Spring AI Tools and Function Calling Features
*
* Category: ai
* Since: 4.17
* Maven coordinates: org.apache.camel:camel-spring-ai-tools
*
* @return the dsl builder
*/
static SpringAiToolsComponentBuilderFactory.SpringAiToolsComponentBuilder springAiTools() {
return SpringAiToolsComponentBuilderFactory.springAiTools();
}
/**
* Spring AI Vector Store (camel-spring-ai-vector-store)
* Spring AI Vector Store
*
* Category: ai
* Since: 4.17
* Maven coordinates: org.apache.camel:camel-spring-ai-vector-store
*
* @return the dsl builder
*/
static SpringAiVectorStoreComponentBuilderFactory.SpringAiVectorStoreComponentBuilder springAiVectorStore() {
return SpringAiVectorStoreComponentBuilderFactory.springAiVectorStore();
}
/**
* Spring Batch (camel-spring-batch)
* Send messages to Spring Batch for further processing.
*
* Category: workflow
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-spring-batch
*
* @return the dsl builder
*/
static SpringBatchComponentBuilderFactory.SpringBatchComponentBuilder springBatch() {
return SpringBatchComponentBuilderFactory.springBatch();
}
/**
* Spring Event (camel-spring)
* Listen for Spring Application Events.
*
* Category: messaging
* Since: 1.4
* Maven coordinates: org.apache.camel:camel-spring
*
* @return the dsl builder
*/
static SpringEventComponentBuilderFactory.SpringEventComponentBuilder springEvent() {
return SpringEventComponentBuilderFactory.springEvent();
}
/**
* Spring JDBC (camel-spring-jdbc)
* Access databases through SQL and JDBC with Spring Transaction support.
*
* Category: database
* Since: 3.10
* Maven coordinates: org.apache.camel:camel-spring-jdbc
*
* @return the dsl builder
*/
static SpringJdbcComponentBuilderFactory.SpringJdbcComponentBuilder springJdbc() {
return SpringJdbcComponentBuilderFactory.springJdbc();
}
/**
* Spring LDAP (camel-spring-ldap)
* Perform searches in LDAP servers using filters as the message payload.
*
* Category: security
* Since: 2.11
* Maven coordinates: org.apache.camel:camel-spring-ldap
*
* @return the dsl builder
*/
static SpringLdapComponentBuilderFactory.SpringLdapComponentBuilder springLdap() {
return SpringLdapComponentBuilderFactory.springLdap();
}
/**
* Spring RabbitMQ (camel-spring-rabbitmq)
* Send and receive messages from RabbitMQ using the Spring RabbitMQ client.
*
* Category: messaging
* Since: 3.8
* Maven coordinates: org.apache.camel:camel-spring-rabbitmq
*
* @return the dsl builder
*/
static SpringRabbitmqComponentBuilderFactory.SpringRabbitmqComponentBuilder springRabbitmq() {
return SpringRabbitmqComponentBuilderFactory.springRabbitmq();
}
/**
* Spring Redis (camel-spring-redis)
* Send and receive messages from Redis.
*
* Category: cache
* Since: 2.11
* Maven coordinates: org.apache.camel:camel-spring-redis
*
* @return the dsl builder
*/
static SpringRedisComponentBuilderFactory.SpringRedisComponentBuilder springRedis() {
return SpringRedisComponentBuilderFactory.springRedis();
}
/**
* Spring WebService (camel-spring-ws)
* Access external web services as a client or expose your own web services.
*
* Category: webservice
* Since: 2.6
* Maven coordinates: org.apache.camel:camel-spring-ws
*
* @return the dsl builder
*/
static SpringWsComponentBuilderFactory.SpringWsComponentBuilder springWs() {
return SpringWsComponentBuilderFactory.springWs();
}
/**
* SQL (camel-sql)
* Perform SQL queries using Spring JDBC.
*
* Category: database
* Since: 1.4
* Maven coordinates: org.apache.camel:camel-sql
*
* @return the dsl builder
*/
static SqlComponentBuilderFactory.SqlComponentBuilder sql() {
return SqlComponentBuilderFactory.sql();
}
/**
* SQL Stored Procedure (camel-sql)
* Perform SQL queries as a JDBC Stored Procedures using Spring JDBC.
*
* Category: database
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-sql
*
* @return the dsl builder
*/
static SqlStoredComponentBuilderFactory.SqlStoredComponentBuilder sqlStored() {
return SqlStoredComponentBuilderFactory.sqlStored();
}
/**
* SSH (camel-ssh)
* Execute commands on remote hosts using SSH.
*
* Category: file
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-ssh
*
* @return the dsl builder
*/
static SshComponentBuilderFactory.SshComponentBuilder ssh() {
return SshComponentBuilderFactory.ssh();
}
/**
* StAX (camel-stax)
* Process XML payloads by a SAX ContentHandler.
*
* Category: transformation
* Since: 2.9
* Maven coordinates: org.apache.camel:camel-stax
*
* @return the dsl builder
*/
static StaxComponentBuilderFactory.StaxComponentBuilder stax() {
return StaxComponentBuilderFactory.stax();
}
/**
* Stitch (camel-stitch)
* Stitch is a cloud ETL service that integrates various data sources into a
* central data warehouse through various integrations.
*
* Category: cloud,api,saas,bigdata
* Since: 3.8
* Maven coordinates: org.apache.camel:camel-stitch
*
* @return the dsl builder
*/
static StitchComponentBuilderFactory.StitchComponentBuilder stitch() {
return StitchComponentBuilderFactory.stitch();
}
/**
* Stomp (camel-stomp)
* Send and receive messages to/from STOMP (Simple Text Oriented Messaging
* Protocol) compliant message brokers.
*
* Category: messaging
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-stomp
*
* @return the dsl builder
*/
static StompComponentBuilderFactory.StompComponentBuilder stomp() {
return StompComponentBuilderFactory.stomp();
}
/**
* Stream (camel-stream)
* Read from system-in and write to system-out and system-err streams.
*
* Category: file,core
* Since: 1.3
* Maven coordinates: org.apache.camel:camel-stream
*
* @return the dsl builder
*/
static StreamComponentBuilderFactory.StreamComponentBuilder stream() {
return StreamComponentBuilderFactory.stream();
}
/**
* String Template (camel-stringtemplate)
* Transform messages using StringTemplate engine.
*
* Category: transformation,script
* Since: 1.2
* Maven coordinates: org.apache.camel:camel-stringtemplate
*
* @return the dsl builder
*/
static StringTemplateComponentBuilderFactory.StringTemplateComponentBuilder stringTemplate() {
return StringTemplateComponentBuilderFactory.stringTemplate();
}
/**
* Stub (camel-stub)
* Stub out any physical endpoints while in development or testing.
*
* Category: core,testing
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-stub
*
* @return the dsl builder
*/
static StubComponentBuilderFactory.StubComponentBuilder stub() {
return StubComponentBuilderFactory.stub();
}
/**
* Tahu Edge Node / Device (camel-tahu)
* Sparkplug B Edge Node and Device support over MQTT using Eclipse Tahu
*
* Category: messaging,iot,monitoring
* Since: 4.8
* Maven coordinates: org.apache.camel:camel-tahu
*
* @return the dsl builder
*/
static TahuEdgeComponentBuilderFactory.TahuEdgeComponentBuilder tahuEdge() {
return TahuEdgeComponentBuilderFactory.tahuEdge();
}
/**
* Tahu Host Application (camel-tahu)
* Sparkplug B Host Application support over MQTT using Eclipse Tahu
*
* Category: messaging,iot,monitoring
* Since: 4.8
* Maven coordinates: org.apache.camel:camel-tahu
*
* @return the dsl builder
*/
static TahuHostComponentBuilderFactory.TahuHostComponentBuilder tahuHost() {
return TahuHostComponentBuilderFactory.tahuHost();
}
/**
* Telegram (camel-telegram)
* Send and receive messages using the Telegram Bot API.
*
* Category: cloud,api,chat
* Since: 2.18
* Maven coordinates: org.apache.camel:camel-telegram
*
* @return the dsl builder
*/
static TelegramComponentBuilderFactory.TelegramComponentBuilder telegram() {
return TelegramComponentBuilderFactory.telegram();
}
/**
* TensorFlow Serving (camel-tensorflow-serving)
* Provide access to TensorFlow Serving model servers to run inference with
* TensorFlow saved models remotely
*
* Category: ai
* Since: 4.10
* Maven coordinates: org.apache.camel:camel-tensorflow-serving
*
* @return the dsl builder
*/
static TensorflowServingComponentBuilderFactory.TensorflowServingComponentBuilder tensorflowServing() {
return TensorflowServingComponentBuilderFactory.tensorflowServing();
}
/**
* Thrift (camel-thrift)
* Call and expose remote procedures (RPC) with Apache Thrift data format
* and serialization mechanism.
*
* Category: rpc,transformation
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-thrift
*
* @return the dsl builder
*/
static ThriftComponentBuilderFactory.ThriftComponentBuilder thrift() {
return ThriftComponentBuilderFactory.thrift();
}
/**
* Thymeleaf (camel-thymeleaf)
* Transform messages using a Thymeleaf template.
*
* Category: transformation
* Since: 4.1
* Maven coordinates: org.apache.camel:camel-thymeleaf
*
* @return the dsl builder
*/
static ThymeleafComponentBuilderFactory.ThymeleafComponentBuilder thymeleaf() {
return ThymeleafComponentBuilderFactory.thymeleaf();
}
/**
* Tika (camel-tika)
* Parse documents and extract metadata and text using Apache Tika.
*
* Category: document,transformation
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-tika
*
* @return the dsl builder
*/
static TikaComponentBuilderFactory.TikaComponentBuilder tika() {
return TikaComponentBuilderFactory.tika();
}
/**
* Timer (camel-timer)
* Generate messages in specified intervals using java.util.Timer.
*
* Category: core,scheduling
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-timer
*
* @return the dsl builder
*/
static TimerComponentBuilderFactory.TimerComponentBuilder timer() {
return TimerComponentBuilderFactory.timer();
}
/**
* TorchServe (camel-torchserve)
* Provide access to PyTorch TorchServe servers to run inference with
* PyTorch models remotely
*
* Category: ai
* Since: 4.9
* Maven coordinates: org.apache.camel:camel-torchserve
*
* @return the dsl builder
*/
static TorchserveComponentBuilderFactory.TorchserveComponentBuilder torchserve() {
return TorchserveComponentBuilderFactory.torchserve();
}
/**
* Twilio (camel-twilio)
* Interact with Twilio REST APIs using Twilio Java SDK.
*
* Category: api,messaging,cloud
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-twilio
*
* @return the dsl builder
*/
static TwilioComponentBuilderFactory.TwilioComponentBuilder twilio() {
return TwilioComponentBuilderFactory.twilio();
}
/**
* Twitter Direct Message (camel-twitter)
* Send and receive Twitter direct messages.
*
* Category: saas,social
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-twitter
*
* @return the dsl builder
*/
static TwitterDirectmessageComponentBuilderFactory.TwitterDirectmessageComponentBuilder twitterDirectmessage() {
return TwitterDirectmessageComponentBuilderFactory.twitterDirectmessage();
}
/**
* Twitter Search (camel-twitter)
* Access Twitter Search.
*
* Category: saas,social
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-twitter
*
* @return the dsl builder
*/
static TwitterSearchComponentBuilderFactory.TwitterSearchComponentBuilder twitterSearch() {
return TwitterSearchComponentBuilderFactory.twitterSearch();
}
/**
* Twitter Timeline (camel-twitter)
* Send tweets and receive tweets from user's timeline.
*
* Category: saas,social
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-twitter
*
* @return the dsl builder
*/
static TwitterTimelineComponentBuilderFactory.TwitterTimelineComponentBuilder twitterTimeline() {
return TwitterTimelineComponentBuilderFactory.twitterTimeline();
}
/**
* Undertow (camel-undertow)
* Expose HTTP and WebSocket endpoints and access external HTTP/WebSocket
* servers.
*
* Category: http,networking
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-undertow
*
* @return the dsl builder
*/
static UndertowComponentBuilderFactory.UndertowComponentBuilder undertow() {
return UndertowComponentBuilderFactory.undertow();
}
/**
* Validator (camel-validator)
* Validate the payload using XML Schema and JAXP Validation.
*
* Category: core,validation
* Since: 1.1
* Maven coordinates: org.apache.camel:camel-validator
*
* @return the dsl builder
*/
static ValidatorComponentBuilderFactory.ValidatorComponentBuilder validator() {
return ValidatorComponentBuilderFactory.validator();
}
/**
* Velocity (camel-velocity)
* Transform messages using a Velocity template.
*
* Category: transformation
* Since: 1.2
* Maven coordinates: org.apache.camel:camel-velocity
*
* @return the dsl builder
*/
static VelocityComponentBuilderFactory.VelocityComponentBuilder velocity() {
return VelocityComponentBuilderFactory.velocity();
}
/**
* Vert.x (camel-vertx)
* Send and receive messages to/from Vert.x Event Bus.
*
* Category: messaging
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-vertx
*
* @return the dsl builder
*/
static VertxComponentBuilderFactory.VertxComponentBuilder vertx() {
return VertxComponentBuilderFactory.vertx();
}
/**
* Vert.x HTTP Client (camel-vertx-http)
* Send requests to external HTTP servers using Vert.x
*
* Category: http
* Since: 3.5
* Maven coordinates: org.apache.camel:camel-vertx-http
*
* @return the dsl builder
*/
static VertxHttpComponentBuilderFactory.VertxHttpComponentBuilder vertxHttp() {
return VertxHttpComponentBuilderFactory.vertxHttp();
}
/**
* Vert.x WebSocket (camel-vertx-websocket)
* Expose WebSocket endpoints and connect to remote WebSocket servers using
* Vert.x
*
* Category: http,networking
* Since: 3.5
* Maven coordinates: org.apache.camel:camel-vertx-websocket
*
* @return the dsl builder
*/
static VertxWebsocketComponentBuilderFactory.VertxWebsocketComponentBuilder vertxWebsocket() {
return VertxWebsocketComponentBuilderFactory.vertxWebsocket();
}
/**
* Wasm (camel-wasm)
* Invoke Wasm functions.
*
* Category: core,script
* Since: 4.4
* Maven coordinates: org.apache.camel:camel-wasm
*
* @return the dsl builder
*/
static WasmComponentBuilderFactory.WasmComponentBuilder wasm() {
return WasmComponentBuilderFactory.wasm();
}
/**
* Weather (camel-weather)
* Poll the weather information from Open Weather Map.
*
* Category: api
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-weather
*
* @return the dsl builder
*/
static WeatherComponentBuilderFactory.WeatherComponentBuilder weather() {
return WeatherComponentBuilderFactory.weather();
}
/**
* weaviate (camel-weaviate)
* Perform operations on the Weaviate Vector Database.
*
* Category: database,ai
* Since: 4.12
* Maven coordinates: org.apache.camel:camel-weaviate
*
* @return the dsl builder
*/
static WeaviateComponentBuilderFactory.WeaviateComponentBuilder weaviate() {
return WeaviateComponentBuilderFactory.weaviate();
}
/**
* Web3j Ethereum Blockchain (camel-web3j)
* Interact with Ethereum nodes using web3j client API.
*
* Category: blockchain
* Since: 2.22
* Maven coordinates: org.apache.camel:camel-web3j
*
* @return the dsl builder
*/
static Web3jComponentBuilderFactory.Web3jComponentBuilder web3j() {
return Web3jComponentBuilderFactory.web3j();
}
/**
* Webhook (camel-webhook)
* Expose webhook endpoints to receive push notifications for other Camel
* components.
*
* Category: cloud
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-webhook
*
* @return the dsl builder
*/
static WebhookComponentBuilderFactory.WebhookComponentBuilder webhook() {
return WebhookComponentBuilderFactory.webhook();
}
/**
* WhatsApp (camel-whatsapp)
* Send messages to WhatsApp.
*
* Category: cloud,api,chat
* Since: 3.19
* Maven coordinates: org.apache.camel:camel-whatsapp
*
* @return the dsl builder
*/
static WhatsappComponentBuilderFactory.WhatsappComponentBuilder whatsapp() {
return WhatsappComponentBuilderFactory.whatsapp();
}
/**
* WordPress (camel-wordpress)
* Manage posts and users using the WordPress API.
*
* Category: cloud,api,cms
* Since: 2.21
* Maven coordinates: org.apache.camel:camel-wordpress
*
* @return the dsl builder
*/
static WordpressComponentBuilderFactory.WordpressComponentBuilder wordpress() {
return WordpressComponentBuilderFactory.wordpress();
}
/**
* Workday (camel-workday)
* Detect and parse documents using Workday.
*
* Category: cloud,api,saas
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-workday
*
* @return the dsl builder
*/
static WorkdayComponentBuilderFactory.WorkdayComponentBuilder workday() {
return WorkdayComponentBuilderFactory.workday();
}
/**
* XChange (camel-xchange)
* Access market data and trade on Bitcoin and Altcoin exchanges.
*
* Category: blockchain
* Since: 2.21
* Maven coordinates: org.apache.camel:camel-xchange
*
* @return the dsl builder
*/
static XchangeComponentBuilderFactory.XchangeComponentBuilder xchange() {
return XchangeComponentBuilderFactory.xchange();
}
/**
* XJ (camel-xj)
* Transform JSON and XML message using a XSLT.
*
* Category: transformation
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-xj
*
* @return the dsl builder
*/
static XjComponentBuilderFactory.XjComponentBuilder xj() {
return XjComponentBuilderFactory.xj();
}
/**
* XML Security Sign (camel-xmlsecurity)
* Sign XML payloads using the XML signature specification.
*
* Category: security,transformation
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-xmlsecurity
*
* @return the dsl builder
*/
static XmlsecuritySignComponentBuilderFactory.XmlsecuritySignComponentBuilder xmlsecuritySign() {
return XmlsecuritySignComponentBuilderFactory.xmlsecuritySign();
}
/**
* XML Security Verify (camel-xmlsecurity)
* Verify XML payloads using the XML signature specification.
*
* Category: security,transformation
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-xmlsecurity
*
* @return the dsl builder
*/
static XmlsecurityVerifyComponentBuilderFactory.XmlsecurityVerifyComponentBuilder xmlsecurityVerify() {
return XmlsecurityVerifyComponentBuilderFactory.xmlsecurityVerify();
}
/**
* XMPP (camel-xmpp)
* Send and receive messages to/from an XMPP chat server.
*
* Category: chat,messaging
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-xmpp
*
* @return the dsl builder
*/
static XmppComponentBuilderFactory.XmppComponentBuilder xmpp() {
return XmppComponentBuilderFactory.xmpp();
}
/**
* XQuery (camel-saxon)
* Query and/or transform XML payloads using XQuery and Saxon.
*
* Category: transformation
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-saxon
*
* @return the dsl builder
*/
static XqueryComponentBuilderFactory.XqueryComponentBuilder xquery() {
return XqueryComponentBuilderFactory.xquery();
}
/**
* XSLT (camel-xslt)
* Transforms XML payload using an XSLT template.
*
* Category: core,transformation
* Since: 1.3
* Maven coordinates: org.apache.camel:camel-xslt
*
* @return the dsl builder
*/
static XsltComponentBuilderFactory.XsltComponentBuilder xslt() {
return XsltComponentBuilderFactory.xslt();
}
/**
* XSLT Saxon (camel-xslt-saxon)
* Transform XML payloads using an XSLT template using Saxon.
*
* Category: core,transformation
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-xslt-saxon
*
* @return the dsl builder
*/
static XsltSaxonComponentBuilderFactory.XsltSaxonComponentBuilder xsltSaxon() {
return XsltSaxonComponentBuilderFactory.xsltSaxon();
}
/**
* Zeebe (camel-zeebe)
* Zeebe component which integrates with Camunda Zeebe to interact with the
* API.
*
* Category: workflow,saas
* Since: 3.21
* Maven coordinates: org.apache.camel:camel-zeebe
*
* @return the dsl builder
*/
static ZeebeComponentBuilderFactory.ZeebeComponentBuilder zeebe() {
return ZeebeComponentBuilderFactory.zeebe();
}
/**
* Zendesk (camel-zendesk)
* Manage Zendesk tickets, users, organizations, etc.
*
* Category: cloud,api,saas
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-zendesk
*
* @return the dsl builder
*/
static ZendeskComponentBuilderFactory.ZendeskComponentBuilder zendesk() {
return ZendeskComponentBuilderFactory.zendesk();
}
/**
* ZooKeeper (camel-zookeeper)
* Manage ZooKeeper clusters.
*
* Category: clustering,management,bigdata
* Since: 2.9
* Maven coordinates: org.apache.camel:camel-zookeeper
*
* @return the dsl builder
*/
static ZookeeperComponentBuilderFactory.ZookeeperComponentBuilder zookeeper() {
return ZookeeperComponentBuilderFactory.zookeeper();
}
/**
* ZooKeeper Master (camel-zookeeper-master)
* Have only a single consumer in a cluster consuming from a given endpoint;
* with automatic failover if the JVM dies.
*
* Category: clustering,management,bigdata
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-zookeeper-master
*
* @return the dsl builder
*/
static ZookeeperMasterComponentBuilderFactory.ZookeeperMasterComponentBuilder zookeeperMaster() {
return ZookeeperMasterComponentBuilderFactory.zookeeperMaster();
}
} | for |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/annotations/SchedulerSupport.java | {
"start": 776,
"end": 1335
} | class ____ method uses.
* <p>
* Constants are provided for instances from {@link Schedulers} as well as values for
* {@linkplain #NONE not using a scheduler} and {@linkplain #CUSTOM a manually-specified scheduler}.
* Libraries providing their own values should namespace them with their base package name followed
* by a colon ({@code :}) and then a human-readable name (e.g., {@code com.example:ui-thread}).
* @since 2.0
*/
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Target({ElementType.CONSTRUCTOR, ElementType.METHOD, ElementType.TYPE})
public @ | or |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/RemovedConfigPropertiesTest.java | {
"start": 1215,
"end": 1388
} | class ____ {
@ConfigProperty(name = "my.prop")
String prop;
public String getProp() {
return prop;
}
}
}
| RemovedConfigProperties |
java | google__guava | android/guava-tests/test/com/google/common/base/OptionalTest.java | {
"start": 1511,
"end": 10729
} | class ____ extends TestCase {
@SuppressWarnings("NullOptional")
public void testToJavaUtil_static() {
assertThat(Optional.toJavaUtil(null)).isNull();
assertEquals(java.util.Optional.empty(), Optional.toJavaUtil(Optional.absent()));
assertEquals(java.util.Optional.of("abc"), Optional.toJavaUtil(Optional.of("abc")));
}
public void testToJavaUtil_instance() {
assertEquals(java.util.Optional.empty(), Optional.absent().toJavaUtil());
assertEquals(java.util.Optional.of("abc"), Optional.of("abc").toJavaUtil());
}
@SuppressWarnings("NullOptional")
public void testFromJavaUtil() {
assertThat(Optional.fromJavaUtil(null)).isNull();
assertEquals(Optional.absent(), Optional.fromJavaUtil(java.util.Optional.empty()));
assertEquals(Optional.of("abc"), Optional.fromJavaUtil(java.util.Optional.of("abc")));
}
public void testAbsent() {
Optional<String> optionalName = Optional.absent();
assertFalse(optionalName.isPresent());
}
public void testOf() {
assertEquals("training", Optional.of("training").get());
}
public void testOf_null() {
assertThrows(NullPointerException.class, () -> Optional.of(null));
}
public void testFromNullable() {
Optional<String> optionalName = Optional.fromNullable("bob");
assertEquals("bob", optionalName.get());
}
public void testFromNullable_null() {
// not promised by spec, but easier to test
assertSame(Optional.absent(), Optional.fromNullable(null));
}
public void testIsPresent_no() {
assertFalse(Optional.absent().isPresent());
}
@SuppressWarnings("OptionalOfRedundantMethod") // Unit tests for Optional
public void testIsPresent_yes() {
assertTrue(Optional.of("training").isPresent());
}
public void testGet_absent() {
Optional<String> optional = Optional.absent();
assertThrows(IllegalStateException.class, optional::get);
}
public void testGet_present() {
assertEquals("training", Optional.of("training").get());
}
@SuppressWarnings("OptionalOfRedundantMethod") // Unit tests for Optional
public void testOr_t_present() {
assertEquals("a", Optional.of("a").or("default"));
}
public void testOr_t_absent() {
assertEquals("default", Optional.absent().or("default"));
}
@SuppressWarnings("OptionalOfRedundantMethod") // Unit tests for Optional
public void testOr_supplier_present() {
assertEquals("a", Optional.of("a").or(Suppliers.ofInstance("fallback")));
}
public void testOr_supplier_absent() {
assertEquals("fallback", Optional.absent().or(Suppliers.ofInstance("fallback")));
}
public void testOr_nullSupplier_absent() {
Supplier<Object> nullSupplier = (Supplier<Object>) Suppliers.<@Nullable Object>ofInstance(null);
Optional<Object> absentOptional = Optional.absent();
assertThrows(NullPointerException.class, () -> absentOptional.or(nullSupplier));
}
@SuppressWarnings("OptionalOfRedundantMethod") // Unit tests for Optional
public void testOr_nullSupplier_present() {
Supplier<String> nullSupplier = (Supplier<String>) Suppliers.<@Nullable String>ofInstance(null);
assertEquals("a", Optional.of("a").or(nullSupplier));
}
@SuppressWarnings("OptionalOfRedundantMethod") // Unit tests for Optional
public void testOr_optional_present() {
assertEquals(Optional.of("a"), Optional.of("a").or(Optional.of("fallback")));
}
public void testOr_optional_absent() {
assertEquals(Optional.of("fallback"), Optional.absent().or(Optional.of("fallback")));
}
@SuppressWarnings("OptionalOfRedundantMethod") // Unit tests for Optional
public void testOrNull_present() {
assertEquals("a", Optional.of("a").orNull());
}
public void testOrNull_absent() {
assertThat(Optional.absent().orNull()).isNull();
}
public void testAsSet_present() {
Set<String> expected = Collections.singleton("a");
assertEquals(expected, Optional.of("a").asSet());
}
public void testAsSet_absent() {
assertTrue("Returned set should be empty", Optional.absent().asSet().isEmpty());
}
public void testAsSet_presentIsImmutable() {
Set<String> presentAsSet = Optional.of("a").asSet();
assertThrows(UnsupportedOperationException.class, () -> presentAsSet.add("b"));
}
public void testAsSet_absentIsImmutable() {
Set<Object> absentAsSet = Optional.absent().asSet();
assertThrows(UnsupportedOperationException.class, () -> absentAsSet.add("foo"));
}
public void testTransform_absent() {
assertEquals(Optional.absent(), Optional.absent().transform(Functions.identity()));
assertEquals(Optional.absent(), Optional.absent().transform(Functions.toStringFunction()));
}
public void testTransform_presentIdentity() {
assertEquals(Optional.of("a"), Optional.of("a").transform(Functions.identity()));
}
public void testTransform_presentToString() {
assertEquals(Optional.of("42"), Optional.of(42).transform(Functions.toStringFunction()));
}
public void testTransform_present_functionReturnsNull() {
assertThrows(NullPointerException.class, () -> Optional.of("a").transform(input -> null));
}
public void testTransform_absent_functionReturnsNull() {
assertEquals(Optional.absent(), Optional.absent().transform(input -> null));
}
public void testEqualsAndHashCode() {
new EqualsTester()
.addEqualityGroup(Optional.absent(), reserialize(Optional.absent()))
.addEqualityGroup(Optional.of(Long.valueOf(5)), reserialize(Optional.of(Long.valueOf(5))))
.addEqualityGroup(Optional.of(Long.valueOf(42)), reserialize(Optional.of(Long.valueOf(42))))
.testEquals();
}
public void testToString_absent() {
assertEquals("Optional.absent()", Optional.absent().toString());
}
public void testToString_present() {
assertEquals("Optional.of(training)", Optional.of("training").toString());
}
public void testPresentInstances_allPresent() {
List<Optional<String>> optionals =
ImmutableList.of(Optional.of("a"), Optional.of("b"), Optional.of("c"));
assertThat(Optional.presentInstances(optionals)).containsExactly("a", "b", "c").inOrder();
}
public void testPresentInstances_allAbsent() {
List<Optional<Object>> optionals = ImmutableList.of(Optional.absent(), Optional.absent());
assertThat(Optional.presentInstances(optionals)).isEmpty();
}
public void testPresentInstances_somePresent() {
List<Optional<String>> optionals =
ImmutableList.of(Optional.of("a"), Optional.<String>absent(), Optional.of("c"));
assertThat(Optional.presentInstances(optionals)).containsExactly("a", "c").inOrder();
}
public void testPresentInstances_callingIteratorTwice() {
List<Optional<String>> optionals =
ImmutableList.of(Optional.of("a"), Optional.<String>absent(), Optional.of("c"));
Iterable<String> onlyPresent = Optional.presentInstances(optionals);
assertThat(onlyPresent).containsExactly("a", "c").inOrder();
assertThat(onlyPresent).containsExactly("a", "c").inOrder();
}
public void testPresentInstances_wildcards() {
List<Optional<? extends Number>> optionals =
ImmutableList.<Optional<? extends Number>>of(Optional.<Double>absent(), Optional.of(2));
Iterable<Number> onlyPresent = Optional.presentInstances(optionals);
assertThat(onlyPresent).containsExactly(2);
}
private static Optional<Integer> getSomeOptionalInt() {
return Optional.of(1);
}
private static FluentIterable<? extends Number> getSomeNumbers() {
return FluentIterable.from(ImmutableList.<Number>of());
}
/*
* The following tests demonstrate the shortcomings of or() and test that the casting workaround
* mentioned in the method Javadoc does in fact compile.
*/
@SuppressWarnings("unused") // compilation test
public void testSampleCodeError1() {
Optional<Integer> optionalInt = getSomeOptionalInt();
// Number value = optionalInt.or(0.5); // error
}
@SuppressWarnings("unused") // compilation test
public void testSampleCodeError2() {
FluentIterable<? extends Number> numbers = getSomeNumbers();
Optional<? extends Number> first = numbers.first();
// Number value = first.or(0.5); // error
}
@SuppressWarnings("unused") // compilation test
public void testSampleCodeFine1() {
Optional<Number> optionalInt = Optional.of((Number) 1);
Number value = optionalInt.or(0.5); // fine
}
@SuppressWarnings("unused") // compilation test
public void testSampleCodeFine2() {
FluentIterable<? extends Number> numbers = getSomeNumbers();
// Sadly, the following is what users will have to do in some circumstances.
@SuppressWarnings("unchecked") // safe covariant cast
Optional<Number> first = (Optional<Number>) numbers.first();
Number value = first.or(0.5); // fine
}
@J2ktIncompatible
@GwtIncompatible // NullPointerTester
public void testNullPointers() {
NullPointerTester npTester = new NullPointerTester();
npTester.testAllPublicConstructors(Optional.class);
npTester.testAllPublicStaticMethods(Optional.class);
npTester.testAllPublicInstanceMethods(Optional.absent());
npTester.testAllPublicInstanceMethods(Optional.of("training"));
}
}
| OptionalTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/async/utils/FinallyFunction.java | {
"start": 2299,
"end": 3786
} | interface ____<R> {
/**
* Applies this final action function to the result of an asynchronous operation.
*
* @param r the result of the asynchronous operation, which may be null if the
* operation did not complete successfully
* @return the result after applying the final action, which may be a new result or a
* modified version of the input result
* @throws IOException if an I/O error occurs during the application of the final action
*/
R apply(R r) throws IOException;
/**
* Applies this final action function to a {@code CompletableFuture}, which is expected
* to be the result of an asynchronous operation.
* <p>
* This method is a convenience that simplifies the use of {@code FinallyFunction}
* with asynchronous operations. It handles the completion of the future and applies
* the {@code FinallyFunction} to the result.
*
* @param in the {@code CompletableFuture} representing the asynchronous operation
* @return a new {@code CompletableFuture} that completes with the result of applying
* the final action function
*/
default CompletableFuture<R> apply(CompletableFuture<R> in) {
return in.handle((r, e) -> {
try {
R ret = apply(r);
if (e != null) {
throw warpCompletionException(e);
} else {
return ret;
}
} catch (IOException ioe) {
throw warpCompletionException(ioe);
}
});
}
}
| FinallyFunction |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetoone/OptionalOneToOneMapsIdQueryTest.java | {
"start": 7450,
"end": 7687
} | class ____ {
@Id
private Long id;
@OneToOne
@MapsId
@JoinColumn(name = "id")
@NotFound(action = NotFoundAction.IGNORE)
private BarWithIdNamedId bar;
}
@Entity(name = "BarWithIdNamedId")
public static | FooHasBarWithIdNamedId |
java | apache__spark | common/network-common/src/main/java/org/apache/spark/network/server/ChunkFetchRequestHandler.java | {
"start": 2580,
"end": 7333
} | class ____ extends SimpleChannelInboundHandler<ChunkFetchRequest> {
private static final SparkLogger logger =
SparkLoggerFactory.getLogger(ChunkFetchRequestHandler.class);
private final TransportClient client;
private final StreamManager streamManager;
/** The max number of chunks being transferred and not finished yet. */
private final long maxChunksBeingTransferred;
private final boolean syncModeEnabled;
public ChunkFetchRequestHandler(
TransportClient client,
StreamManager streamManager,
Long maxChunksBeingTransferred,
boolean syncModeEnabled) {
this.client = client;
this.streamManager = streamManager;
this.maxChunksBeingTransferred = maxChunksBeingTransferred;
this.syncModeEnabled = syncModeEnabled;
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.warn("Exception in connection from {}", cause,
MDC.of(LogKeys.HOST_PORT, getRemoteAddress(ctx.channel())));
ctx.close();
}
@Override
protected void channelRead0(
ChannelHandlerContext ctx,
final ChunkFetchRequest msg) throws Exception {
Channel channel = ctx.channel();
processFetchRequest(channel, msg);
}
public void processFetchRequest(
final Channel channel, final ChunkFetchRequest msg) throws Exception {
if (logger.isTraceEnabled()) {
logger.trace("Received req from {} to fetch block {}", getRemoteAddress(channel),
msg.streamChunkId);
}
if (maxChunksBeingTransferred < Long.MAX_VALUE) {
long chunksBeingTransferred = streamManager.chunksBeingTransferred();
if (chunksBeingTransferred >= maxChunksBeingTransferred) {
logger.warn("The number of chunks being transferred {} is above {}, close the connection.",
MDC.of(LogKeys.NUM_CHUNKS, chunksBeingTransferred),
MDC.of(LogKeys.MAX_NUM_CHUNKS, maxChunksBeingTransferred));
channel.close();
return;
}
}
ManagedBuffer buf;
try {
streamManager.checkAuthorization(client, msg.streamChunkId.streamId());
buf = streamManager.getChunk(msg.streamChunkId.streamId(), msg.streamChunkId.chunkIndex());
if (buf == null) {
throw new IllegalStateException("Chunk was not found");
}
} catch (Exception e) {
logger.error("Error opening block {} for request from {}", e,
MDC.of(LogKeys.STREAM_CHUNK_ID, msg.streamChunkId),
MDC.of(LogKeys.HOST_PORT, getRemoteAddress(channel)));
respond(channel, new ChunkFetchFailure(msg.streamChunkId,
JavaUtils.stackTraceToString(e)));
return;
}
streamManager.chunkBeingSent(msg.streamChunkId.streamId());
respond(channel, new ChunkFetchSuccess(msg.streamChunkId, buf)).addListener(
(ChannelFutureListener) future -> streamManager.chunkSent(msg.streamChunkId.streamId()));
}
/**
* The invocation to channel.writeAndFlush is async, and the actual I/O on the
* channel will be handled by the EventLoop the channel is registered to. So even
* though we are processing the ChunkFetchRequest in a separate thread pool, the actual I/O,
* which is the potentially blocking call that could deplete server handler threads, is still
* being processed by TransportServer's default EventLoopGroup.
*
* When syncModeEnabled is true, Spark will throttle the max number of threads that channel I/O
* for sending response to ChunkFetchRequest, the thread calling channel.writeAndFlush will wait
* for the completion of sending response back to client by invoking await(). This will throttle
* the rate at which threads from ChunkFetchRequest dedicated EventLoopGroup submit channel I/O
* requests to TransportServer's default EventLoopGroup, thus making sure that we can reserve
* some threads in TransportServer's default EventLoopGroup for handling other RPC messages.
*/
private ChannelFuture respond(
final Channel channel,
final Encodable result) throws InterruptedException {
final SocketAddress remoteAddress = channel.remoteAddress();
ChannelFuture channelFuture;
if (syncModeEnabled) {
channelFuture = channel.writeAndFlush(result).await();
} else {
channelFuture = channel.writeAndFlush(result);
}
return channelFuture.addListener((ChannelFutureListener) future -> {
if (future.isSuccess()) {
logger.trace("Sent result {} to client {}", result, remoteAddress);
} else {
logger.error("Error sending result {} to {}; closing connection",
future.cause(),
MDC.of(LogKeys.RESULT, result),
MDC.of(LogKeys.HOST_PORT, remoteAddress));
channel.close();
}
});
}
}
| ChunkFetchRequestHandler |
java | google__guava | guava-testlib/test/com/google/common/testing/ClassSanityTesterTest.java | {
"start": 22372,
"end": 22816
} | class ____ {
final String name;
public InstantiableConstructorChosen(String name) {
checkNotNull(name);
this.name = "good";
}
public InstantiableConstructorChosen(NotInstantiable x) {
checkNotNull(x);
this.name = "x1";
}
public static InstantiableFactoryMethodChosen create(NotInstantiable x) {
return new InstantiableFactoryMethodChosen(x);
}
}
static | InstantiableConstructorChosen |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/client/RestTemplate.java | {
"start": 41978,
"end": 42165
} | class ____ implements ResponseExtractor<HttpHeaders> {
@Override
public HttpHeaders extractData(ClientHttpResponse response) {
return response.getHeaders();
}
}
}
| HeadersExtractor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MathAbsoluteNegativeTest.java | {
"start": 4197,
"end": 4592
} | class ____ {
void f() {
// BUG: Diagnostic contains: MathAbsoluteNegative
long foo = Math.abs(goodFastHash(64).hashUnencodedChars("").padToLong());
}
}
""")
.doTest();
}
@Test
public void objectHashCode() {
helper
.addSourceLines(
"Test.java",
"""
| Test |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/scalar/RegexpExtractAllFunction.java | {
"start": 1553,
"end": 2625
} | class ____ extends BuiltInScalarFunction {
public RegexpExtractAllFunction(SpecializedFunction.SpecializedContext context) {
super(BuiltInFunctionDefinitions.REGEXP_EXTRACT_ALL, context);
}
public @Nullable ArrayData eval(@Nullable StringData str, @Nullable StringData regex) {
return eval(str, regex, 1);
}
public @Nullable ArrayData eval(
@Nullable StringData str, @Nullable StringData regex, @Nullable Number extractIndex) {
if (extractIndex == null || extractIndex.longValue() < 0) {
return null;
}
Matcher matcher = getRegexpMatcher(str, regex);
if (matcher == null) {
return null;
}
if (matcher.groupCount() < extractIndex.longValue()) {
return null;
}
List<StringData> list = new ArrayList<>();
while (matcher.find()) {
list.add(BinaryStringData.fromString(matcher.group(extractIndex.intValue())));
}
return new GenericArrayData(list.toArray());
}
}
| RegexpExtractAllFunction |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoBuilderTest.java | {
"start": 8383,
"end": 9055
} | interface ____ {
MyAnnotationSimplerBuilder value(String x);
MyAnnotationSimplerBuilder id(int x);
MyAnnotation build();
}
public static MyAnnotationSimplerBuilder myAnnotationSimplerBuilder() {
return new AutoBuilder_AutoBuilderTest_MyAnnotationSimplerBuilder();
}
@Test
public void buildWithoutAutoAnnotation_noSetterForElement() {
MyAnnotation annotation = myAnnotationSimplerBuilder().value("foo").id(23).build();
assertThat(annotation.value()).isEqualTo("foo");
assertThat(annotation.id()).isEqualTo(23);
assertThat(annotation.truthiness()).isEqualTo(MyAnnotation.DEFAULT_TRUTHINESS);
}
static | MyAnnotationSimplerBuilder |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/domain/blog/ComplexImmutableAuthorId.java | {
"start": 697,
"end": 2060
} | class ____ {
protected final int id;
protected final String email;
protected final String username;
protected final String password;
public ComplexImmutableAuthorId(int aId, String aEmail, String aUsername, String aPassword) {
id = aId;
email = aEmail;
username = aUsername;
password = aPassword;
}
public int getId() {
return id;
}
public String getEmail() {
return email;
}
public String getUsername() {
return username;
}
public String getPassword() {
return password;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final ComplexImmutableAuthorId that = (ComplexImmutableAuthorId) o;
return id == that.id && email != null ? email.equals(that.email)
: that.email == null && password != null ? password.equals(that.password)
: that.password == null && username != null ? username.equals(that.username) : that.username == null;
}
@Override
public int hashCode() {
int myresult = id;
myresult = 31 * myresult + (email != null ? email.hashCode() : 0);
myresult = 31 * myresult + (username != null ? username.hashCode() : 0);
return 31 * myresult + (password != null ? password.hashCode() : 0);
}
}
| ComplexImmutableAuthorId |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/annotation/Metric.java | {
"start": 1026,
"end": 1274
} | interface ____ a single metric used to annotate a field or a method
* in the class.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
@Documented
@Target({ElementType.FIELD, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @ | for |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/tree/expression/SqmJsonTableFunction.java | {
"start": 12858,
"end": 13218
} | interface ____ extends SqmCacheable {
ColumnDefinition copy(SqmCopyContext context);
JsonTableColumnDefinition convertToSqlAst(SqmToSqlAstConverter walker);
void appendHqlString(StringBuilder sb, SqmRenderContext context);
int populateTupleType(int offset, String[] componentNames, SqmExpressible<?>[] componentTypes);
}
static final | ColumnDefinition |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AfterAssertionErrorCollected.java | {
"start": 1075,
"end": 1752
} | class ____ extend {@link AbstractSoftAssertions} as this is where the actual soft
* assertion errors collection is implemented.
* <p>
* If you just use the standard soft assertions classes provided by AssertJ, you can register your callback with
* {@link AbstractSoftAssertions#setAfterAssertionErrorCollected(AfterAssertionErrorCollected) setAfterAssertionErrorCollected(AfterAssertionErrorCollected)} or
* {@link AbstractSoftAssertions#addAfterAssertionErrorCollected(AfterAssertionErrorCollected) addAfterAssertionErrorCollected(AfterAssertionErrorCollected)} if you have many.
* <p>
* Example with custom soft assertions:
* <pre><code class='java'> | must |
java | elastic__elasticsearch | libs/tdigest/src/main/java/org/elasticsearch/tdigest/SortingDigest.java | {
"start": 1183,
"end": 1475
} | interface ____ stores internally and sorts all samples to calculate quantiles and CDFs.
* It provides perfect quantile and cdf calculations and matches or exceeds the performance of MergingDigest at least for millions of
* samples, at the expense of allocating much more memory.
*/
public | that |
java | elastic__elasticsearch | modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java | {
"start": 19586,
"end": 19692
} | class ____ {
private int docID = -1;
private MemoryIndex memoryIndex;
}
}
| MemoryIndexEntry |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/env/DefaultPropertiesPropertySource.java | {
"start": 1418,
"end": 4636
} | class ____ extends MapPropertySource {
/**
* The name of the 'default properties' property source.
*/
public static final String NAME = "defaultProperties";
/**
* Create a new {@link DefaultPropertiesPropertySource} with the given {@code Map}
* source.
* @param source the source map
*/
public DefaultPropertiesPropertySource(Map<String, Object> source) {
super(NAME, source);
}
/**
* Return {@code true} if the given source is named 'defaultProperties'.
* @param propertySource the property source to check
* @return {@code true} if the name matches
*/
public static boolean hasMatchingName(@Nullable PropertySource<?> propertySource) {
return (propertySource != null) && propertySource.getName().equals(NAME);
}
/**
* Create a new {@link DefaultPropertiesPropertySource} instance if the provided
* source is not empty.
* @param source the {@code Map} source
* @param action the action used to consume the
* {@link DefaultPropertiesPropertySource}
*/
public static void ifNotEmpty(@Nullable Map<String, Object> source,
@Nullable Consumer<DefaultPropertiesPropertySource> action) {
if (!CollectionUtils.isEmpty(source) && action != null) {
action.accept(new DefaultPropertiesPropertySource(source));
}
}
/**
* Add a new {@link DefaultPropertiesPropertySource} or merge with an existing one.
* @param source the {@code Map} source
* @param sources the existing sources
* @since 2.4.4
*/
public static void addOrMerge(Map<String, Object> source, MutablePropertySources sources) {
if (!CollectionUtils.isEmpty(source)) {
Map<String, Object> resultingSource = new HashMap<>();
DefaultPropertiesPropertySource propertySource = new DefaultPropertiesPropertySource(resultingSource);
if (sources.contains(NAME)) {
mergeIfPossible(source, sources, resultingSource);
sources.replace(NAME, propertySource);
}
else {
resultingSource.putAll(source);
sources.addLast(propertySource);
}
}
}
@SuppressWarnings("unchecked")
private static void mergeIfPossible(Map<String, Object> source, MutablePropertySources sources,
Map<String, Object> resultingSource) {
PropertySource<?> existingSource = sources.get(NAME);
if (existingSource != null) {
Object underlyingSource = existingSource.getSource();
if (underlyingSource instanceof Map) {
resultingSource.putAll((Map<String, Object>) underlyingSource);
}
resultingSource.putAll(source);
}
}
/**
* Move the 'defaultProperties' property source so that it's the last source in the
* given {@link ConfigurableEnvironment}.
* @param environment the environment to update
*/
public static void moveToEnd(ConfigurableEnvironment environment) {
moveToEnd(environment.getPropertySources());
}
/**
* Move the 'defaultProperties' property source so that it's the last source in the
* given {@link MutablePropertySources}.
* @param propertySources the property sources to update
*/
public static void moveToEnd(MutablePropertySources propertySources) {
PropertySource<?> propertySource = propertySources.remove(NAME);
if (propertySource != null) {
propertySources.addLast(propertySource);
}
}
}
| DefaultPropertiesPropertySource |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/deser/date/DateParseTest10.java | {
"start": 867,
"end": 1095
} | class ____ {
private long value;
public long getValue() {
return value;
}
public VO setValue(long value) {
this.value = value;
return this;
}
}
}
| VO |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java | {
"start": 10810,
"end": 11167
} | class ____ {
private final String jobId;
private final String forecastId;
private JobForecastId(String jobId, String forecastId) {
this.jobId = jobId;
this.forecastId = forecastId;
}
boolean hasNullValue() {
return jobId == null || forecastId == null;
}
}
}
| JobForecastId |
java | apache__camel | components/camel-micrometer/src/test/java/org/apache/camel/component/micrometer/messagehistory/ExceptionRouteMicrometerMessageHistoryTest.java | {
"start": 1457,
"end": 3906
} | class ____ extends CamelTestSupport {
protected final Logger log = LoggerFactory.getLogger(getClass());
private MeterRegistry registry = new SimpleMeterRegistry();
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
MicrometerMessageHistoryFactory factory = new MicrometerMessageHistoryFactory();
factory.setMeterRegistry(registry);
context.setMessageHistoryFactory(factory);
return context;
}
@Test
public void testMetricsHistory() throws Exception {
getMockEndpoint("mock:foo").expectedMessageCount(5);
getMockEndpoint("mock:bar").expectedMessageCount(5);
getMockEndpoint("mock:baz").expectedMessageCount(0);
getMockEndpoint("mock:exception").expectedMessageCount(5);
for (int i = 0; i < 10; i++) {
if (i % 2 == 0) {
template.sendBody("seda:foo", "Hello " + i);
} else {
template.sendBody("seda:bar", "Hello " + i);
}
}
MockEndpoint.assertIsSatisfied(context);
// there should be 3 names
assertEquals(4, registry.getMeters().size());
// get the message history service
MicrometerMessageHistoryService service = context.hasService(MicrometerMessageHistoryService.class);
assertNotNull(service);
String json = service.dumpStatisticsAsJson();
assertNotNull(json);
log.info(json);
assertTrue(json.contains("\"nodeId\" : \"foo\""));
assertTrue(json.contains("\"nodeId\" : \"bar\""));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
onException(Exception.class)
.routeId("ExceptionRoute")
.log("Exception received.")
.to("mock:exception").id("exception");
from("seda:foo")
.to("mock:foo").id("foo");
from("seda:bar")
.to("mock:bar").id("bar")
.process(exchange -> {
throw new Exception("Metrics Exception");
})
.to("mock:baz").id("baz");
}
};
}
}
| ExceptionRouteMicrometerMessageHistoryTest |
java | qos-ch__slf4j | jcl-over-slf4j/src/main/java/org/apache/commons/logging/impl/SimpleLog.java | {
"start": 21684,
"end": 22168
} | class ____
return classLoader;
}
private static InputStream getResourceAsStream(final String name) {
return AccessController.doPrivileged((PrivilegedAction<InputStream>) () -> {
ClassLoader threadCL = getContextClassLoader();
if (threadCL != null) {
return threadCL.getResourceAsStream(name);
} else {
return ClassLoader.getSystemResourceAsStream(name);
}
});
}
}
| loader |
java | grpc__grpc-java | services/src/main/java/io/grpc/services/AdminInterface.java | {
"start": 1352,
"end": 2689
} | class ____ {
private static final int DEFAULT_CHANNELZ_MAX_PAGE_SIZE = 100;
private static final Logger logger = Logger.getLogger(AdminInterface.class.getName());
// Do not instantiate.
private AdminInterface() {}
/**
* Returns a list of gRPC's built-in admin services.
*
* @return list of standard admin services
*/
public static List<ServerServiceDefinition> getStandardServices() {
List<ServerServiceDefinition> services = new ArrayList<>();
services.add(ChannelzService.newInstance(DEFAULT_CHANNELZ_MAX_PAGE_SIZE).bindService());
BindableService csds = null;
try {
Class<?> clazz = Class.forName("io.grpc.xds.CsdsService");
Method m = clazz.getMethod("newInstance");
csds = (BindableService) m.invoke(null);
} catch (ClassNotFoundException e) {
logger.log(Level.FINE, "Unable to find CSDS service", e);
} catch (NoSuchMethodException e) {
logger.log(Level.FINE, "Unable to load CSDS service", e);
} catch (IllegalAccessException e) {
logger.log(Level.FINE, "Unable to load CSDS service", e);
} catch (InvocationTargetException e) {
logger.log(Level.FINE, "Unable to load CSDS service", e);
}
if (csds != null) {
services.add(csds.bindService());
}
return Collections.unmodifiableList(services);
}
}
| AdminInterface |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestByteBufferPread.java | {
"start": 1806,
"end": 11019
} | class ____ {
private static MiniDFSCluster cluster;
private static FileSystem fs;
private static byte[] fileContents;
private static Path testFile;
private static Random rand;
private static final long SEED = 0xDEADBEEFL;
private static final int BLOCK_SIZE = 4096;
private static final int FILE_SIZE = 12 * BLOCK_SIZE;
@BeforeAll
public static void setup() throws IOException {
// Setup the cluster with a small block size so we can create small files
// that span multiple blocks
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
fs = cluster.getFileSystem();
// Create a test file that spans 12 blocks, and contains a bunch of random
// bytes
fileContents = new byte[FILE_SIZE];
rand = new Random(SEED);
rand.nextBytes(fileContents);
testFile = new Path("/byte-buffer-pread-test.dat");
try (FSDataOutputStream out = fs.create(testFile, (short) 3)) {
out.write(fileContents);
}
}
/**
* Test preads with {@link java.nio.HeapByteBuffer}s.
*/
@Test
public void testPreadWithHeapByteBuffer() throws IOException {
testPreadWithByteBuffer(ByteBuffer.allocate(FILE_SIZE));
testPreadWithFullByteBuffer(ByteBuffer.allocate(FILE_SIZE));
testPreadWithPositionedByteBuffer(ByteBuffer.allocate(FILE_SIZE));
testPreadWithLimitedByteBuffer(ByteBuffer.allocate(FILE_SIZE));
testPositionedPreadWithByteBuffer(ByteBuffer.allocate(FILE_SIZE));
testPreadFullyWithByteBuffer(ByteBuffer.allocate(FILE_SIZE));
}
/**
* Test preads with {@link java.nio.DirectByteBuffer}s.
*/
@Test
public void testPreadWithDirectByteBuffer() throws IOException {
testPreadWithByteBuffer(ByteBuffer.allocateDirect(FILE_SIZE));
testPreadWithFullByteBuffer(ByteBuffer.allocateDirect(FILE_SIZE));
testPreadWithPositionedByteBuffer(ByteBuffer.allocateDirect(FILE_SIZE));
testPreadWithLimitedByteBuffer(ByteBuffer.allocateDirect(FILE_SIZE));
testPositionedPreadWithByteBuffer(ByteBuffer.allocateDirect(FILE_SIZE));
testPreadFullyWithByteBuffer(ByteBuffer.allocateDirect(FILE_SIZE));
}
/**
* Reads the entire testFile using the pread API and validates that its
* contents are properly loaded into the supplied {@link ByteBuffer}.
*/
private void testPreadWithByteBuffer(ByteBuffer buffer) throws IOException {
int bytesRead;
int totalBytesRead = 0;
try (FSDataInputStream in = fs.open(testFile)) {
while ((bytesRead = in.read(totalBytesRead, buffer)) > 0) {
totalBytesRead += bytesRead;
// Check that each call to read changes the position of the ByteBuffer
// correctly
assertEquals(totalBytesRead, buffer.position());
}
// Make sure the buffer is full
assertFalse(buffer.hasRemaining());
// Make sure the contents of the read buffer equal the contents of the
// file
buffer.position(0);
byte[] bufferContents = new byte[FILE_SIZE];
buffer.get(bufferContents);
assertArrayEquals(bufferContents, fileContents);
}
}
/**
* Attempts to read the testFile into a {@link ByteBuffer} that is already
* full, and validates that doing so does not change the contents of the
* supplied {@link ByteBuffer}.
*/
private void testPreadWithFullByteBuffer(ByteBuffer buffer)
throws IOException {
// Load some dummy data into the buffer
byte[] existingBufferBytes = new byte[FILE_SIZE];
rand.nextBytes(existingBufferBytes);
buffer.put(existingBufferBytes);
// Make sure the buffer is full
assertFalse(buffer.hasRemaining());
try (FSDataInputStream in = fs.open(testFile)) {
// Attempt to read into the buffer, 0 bytes should be read since the
// buffer is full
assertEquals(0, in.read(buffer));
// Double check the buffer is still full and its contents have not
// changed
assertFalse(buffer.hasRemaining());
buffer.position(0);
byte[] bufferContents = new byte[FILE_SIZE];
buffer.get(bufferContents);
assertArrayEquals(bufferContents, existingBufferBytes);
}
}
/**
* Reads half of the testFile into the {@link ByteBuffer} by setting a
* {@link ByteBuffer#limit()} on the buffer. Validates that only half of the
* testFile is loaded into the buffer.
*/
private void testPreadWithLimitedByteBuffer(
ByteBuffer buffer) throws IOException {
int bytesRead;
int totalBytesRead = 0;
// Set the buffer limit to half the size of the file
buffer.limit(FILE_SIZE / 2);
try (FSDataInputStream in = fs.open(testFile)) {
while ((bytesRead = in.read(totalBytesRead, buffer)) > 0) {
totalBytesRead += bytesRead;
// Check that each call to read changes the position of the ByteBuffer
// correctly
assertEquals(totalBytesRead, buffer.position());
}
// Since we set the buffer limit to half the size of the file, we should
// have only read half of the file into the buffer
assertEquals(totalBytesRead, FILE_SIZE / 2);
// Check that the buffer is full and the contents equal the first half of
// the file
assertFalse(buffer.hasRemaining());
buffer.position(0);
byte[] bufferContents = new byte[FILE_SIZE / 2];
buffer.get(bufferContents);
assertArrayEquals(bufferContents,
Arrays.copyOfRange(fileContents, 0, FILE_SIZE / 2));
}
}
/**
* Reads half of the testFile into the {@link ByteBuffer} by setting the
* {@link ByteBuffer#position()} the half the size of the file. Validates that
* only half of the testFile is loaded into the buffer.
*/
private void testPreadWithPositionedByteBuffer(
ByteBuffer buffer) throws IOException {
int bytesRead;
int totalBytesRead = 0;
// Set the buffer position to half the size of the file
buffer.position(FILE_SIZE / 2);
try (FSDataInputStream in = fs.open(testFile)) {
while ((bytesRead = in.read(totalBytesRead, buffer)) > 0) {
totalBytesRead += bytesRead;
// Check that each call to read changes the position of the ByteBuffer
// correctly
assertEquals(totalBytesRead + FILE_SIZE / 2, buffer.position());
}
// Since we set the buffer position to half the size of the file, we
// should have only read half of the file into the buffer
assertEquals(totalBytesRead, FILE_SIZE / 2);
// Check that the buffer is full and the contents equal the first half of
// the file
assertFalse(buffer.hasRemaining());
buffer.position(FILE_SIZE / 2);
byte[] bufferContents = new byte[FILE_SIZE / 2];
buffer.get(bufferContents);
assertArrayEquals(bufferContents,
Arrays.copyOfRange(fileContents, 0, FILE_SIZE / 2));
}
}
/**
* Reads half of the testFile into the {@link ByteBuffer} by specifying a
* position for the pread API that is half of the file size. Validates that
* only half of the testFile is loaded into the buffer.
*/
private void testPositionedPreadWithByteBuffer(
ByteBuffer buffer) throws IOException {
int bytesRead;
int totalBytesRead = 0;
try (FSDataInputStream in = fs.open(testFile)) {
// Start reading from halfway through the file
while ((bytesRead = in.read(totalBytesRead + FILE_SIZE / 2,
buffer)) > 0) {
totalBytesRead += bytesRead;
// Check that each call to read changes the position of the ByteBuffer
// correctly
assertEquals(totalBytesRead, buffer.position());
}
// Since we starting reading halfway through the file, the buffer should
// only be half full
assertEquals(totalBytesRead, FILE_SIZE / 2);
assertEquals(buffer.position(), FILE_SIZE / 2);
assertTrue(buffer.hasRemaining());
// Check that the buffer contents equal the second half of the file
buffer.position(0);
byte[] bufferContents = new byte[FILE_SIZE / 2];
buffer.get(bufferContents);
assertArrayEquals(bufferContents,
Arrays.copyOfRange(fileContents, FILE_SIZE / 2, FILE_SIZE));
}
}
/**
* Reads the entire testFile using the preadFully API and validates that its
* contents are properly loaded into the supplied {@link ByteBuffer}.
*/
private void testPreadFullyWithByteBuffer(ByteBuffer buffer)
throws IOException {
int totalBytesRead = 0;
try (FSDataInputStream in = fs.open(testFile)) {
in.readFully(totalBytesRead, buffer);
// Make sure the buffer is full
assertFalse(buffer.hasRemaining());
// Make sure the contents of the read buffer equal the contents of the
// file
buffer.position(0);
byte[] bufferContents = new byte[FILE_SIZE];
buffer.get(bufferContents);
assertArrayEquals(bufferContents, fileContents);
}
}
@AfterAll
public static void shutdown() throws IOException {
try {
fs.delete(testFile, false);
fs.close();
} finally {
cluster.shutdown(true);
}
}
}
| TestByteBufferPread |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/provider/PersistenceProviderUnitTests.java | {
"start": 7586,
"end": 8281
} | class ____ extends AbstractEntityManagerFactoryBean {
public MyEntityManagerFactoryBean(Class<? extends EntityManagerFactory> entityManagerFactoryInterface,
EntityManagerFactory entityManagerFactory) {
setEntityManagerFactoryInterface(entityManagerFactoryInterface);
ReflectionTestUtils.setField(this, "nativeEntityManagerFactory", entityManagerFactory);
}
@Override
protected EntityManagerFactory createNativeEntityManagerFactory() throws PersistenceException {
return null;
}
@Override
protected EntityManagerFactory createEntityManagerFactoryProxy(EntityManagerFactory emf) {
return super.createEntityManagerFactoryProxy(emf);
}
}
}
| MyEntityManagerFactoryBean |
java | quarkusio__quarkus | extensions/elytron-security-jdbc/deployment/src/test/java/io/quarkus/elytron/security/jdbc/CustomRoleDecoderTest.java | {
"start": 208,
"end": 850
} | class ____ extends JdbcSecurityRealmTest {
static Class[] testClassesWithCustomRoleDecoder = Stream.concat(
Arrays.stream(testClasses),
Arrays.stream(new Class[] { CustomRoleDecoder.class })).toArray(Class[]::new);
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(testClassesWithCustomRoleDecoder)
.addAsResource("custom-role-decoder/import.sql")
.addAsResource("custom-role-decoder/application.properties", "application.properties"));
}
| CustomRoleDecoderTest |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/table/UnnestRowsFunction.java | {
"start": 2281,
"end": 3152
} | class ____ extends UnnestTableFunctionBase {
private static final long serialVersionUID = 1L;
private final ArrayData.ElementGetter elementGetter;
public CollectionUnnestFunction(
SpecializedContext context,
LogicalType elementType,
ArrayData.ElementGetter elementGetter) {
super(context, elementType, false);
this.elementGetter = elementGetter;
}
public void eval(ArrayData arrayData) {
evalArrayData(arrayData, elementGetter, (element, position) -> collect(element));
}
public void eval(MapData mapData) {
evalMultisetData(mapData, elementGetter, (element, position) -> collect(element));
}
}
/** Table function that unwraps the elements of a map. */
public static final | CollectionUnnestFunction |
java | spring-projects__spring-boot | module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/filewatch/FileChangeListener.java | {
"start": 860,
"end": 1046
} | interface ____ {
/**
* Called when files have been changed.
* @param changeSet a set of the {@link ChangedFiles}
*/
void onChange(Set<ChangedFiles> changeSet);
}
| FileChangeListener |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MarkableIteratorInterface.java | {
"start": 1362,
"end": 1874
} | interface ____<VALUE> extends Iterator<VALUE> {
/**
* Mark the current record. A subsequent call to reset will rewind
* the iterator to this record.
* @throws IOException
*/
void mark() throws IOException;
/**
* Reset the iterator to the last record before a call to the previous mark
* @throws IOException
*/
void reset() throws IOException;
/**
* Clear any previously set mark
* @throws IOException
*/
void clearMark() throws IOException;
}
| MarkableIteratorInterface |
java | apache__dubbo | dubbo-plugin/dubbo-security/src/main/java/org/apache/dubbo/security/cert/DubboCertManager.java | {
"start": 15781,
"end": 16405
} | class ____ {
private final PublicKey publicKey;
private final PrivateKey privateKey;
private final ContentSigner signer;
public KeyPair(PublicKey publicKey, PrivateKey privateKey, ContentSigner signer) {
this.publicKey = publicKey;
this.privateKey = privateKey;
this.signer = signer;
}
public PublicKey getPublicKey() {
return publicKey;
}
public PrivateKey getPrivateKey() {
return privateKey;
}
public ContentSigner getSigner() {
return signer;
}
}
}
| KeyPair |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestJarFinder.java | {
"start": 2525,
"end": 4354
} | class ____ is for sure in a directory in the classpath
//in this case the JAR is created on the fly
String jar = JarFinder.getJar(TestJarFinder.class);
assertTrue(new File(jar).exists());
}
@Test
public void testExistingManifest() throws Exception {
File dir = GenericTestUtils
.getTestDir(TestJarFinder.class.getName() + "-testExistingManifest");
delete(dir);
dir.mkdirs();
File metaInfDir = new File(dir, "META-INF");
metaInfDir.mkdirs();
File manifestFile = new File(metaInfDir, "MANIFEST.MF");
Manifest manifest = new Manifest();
OutputStream os = new FileOutputStream(manifestFile);
manifest.write(os);
os.close();
File propsFile = new File(dir, "props.properties");
Writer writer = new FileWriter(propsFile);
new Properties().store(writer, "");
writer.close();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
JarOutputStream zos = new JarOutputStream(baos);
JarFinder.jarDir(dir, "", zos);
JarInputStream jis =
new JarInputStream(new ByteArrayInputStream(baos.toByteArray()));
assertNotNull(jis.getManifest());
jis.close();
}
@Test
public void testNoManifest() throws Exception {
File dir = GenericTestUtils
.getTestDir(TestJarFinder.class.getName() + "-testNoManifest");
delete(dir);
dir.mkdirs();
File propsFile = new File(dir, "props.properties");
Writer writer = new FileWriter(propsFile);
new Properties().store(writer, "");
writer.close();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
JarOutputStream zos = new JarOutputStream(baos);
JarFinder.jarDir(dir, "", zos);
JarInputStream jis =
new JarInputStream(new ByteArrayInputStream(baos.toByteArray()));
assertNotNull(jis.getManifest());
jis.close();
}
}
| that |
java | alibaba__nacos | lock/src/main/java/com/alibaba/nacos/lock/exception/NacosLockException.java | {
"start": 755,
"end": 1327
} | class ____ extends RuntimeException {
public NacosLockException() {
}
public NacosLockException(String message) {
super(message);
}
public NacosLockException(String message, Throwable cause) {
super(message, cause);
}
public NacosLockException(Throwable cause) {
super(cause);
}
public NacosLockException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
| NacosLockException |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/JSR166TestCase.java | {
"start": 31032,
"end": 31161
} | class ____ implements Callable<Integer> {
@Override
public Integer call() {
return one;
}
}
public | CallableOne |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxIterable.java | {
"start": 5928,
"end": 12586
} | class ____<T>
implements InnerProducer<T>, SynchronousSubscription<T>, Consumer<T> {
final CoreSubscriber<? super T> actual;
final Spliterator<? extends T> spliterator;
final boolean knownToBeFinite;
final @Nullable Runnable onClose;
volatile boolean cancelled;
volatile long requested;
@SuppressWarnings("rawtypes")
static final AtomicLongFieldUpdater<IterableSubscription> REQUESTED =
AtomicLongFieldUpdater.newUpdater(IterableSubscription.class,
"requested");
int state;
/**
* Indicates that the iterator's hasNext returned true before but the value is not
* yet retrieved.
*/
static final int STATE_HAS_NEXT_NO_VALUE = 0;
/**
* Indicates that there is a value available in current.
*/
static final int STATE_HAS_NEXT_HAS_VALUE = 1;
/**
* Indicates that there are no more values available.
*/
static final int STATE_NO_NEXT = 2;
/**
* Indicates that the value has been consumed and a new value should be retrieved.
*/
static final int STATE_CALL_HAS_NEXT = 3;
@Nullable T current;
boolean valueReady = false;
@Nullable T nextElement;
@Nullable Throwable hasNextFailure;
IterableSubscription(CoreSubscriber<? super T> actual,
Spliterator<? extends T> spliterator, boolean knownToBeFinite, @Nullable Runnable onClose) {
this.actual = actual;
this.spliterator = spliterator;
this.knownToBeFinite = knownToBeFinite;
this.onClose = onClose;
}
IterableSubscription(CoreSubscriber<? super T> actual,
Spliterator<? extends T> spliterator, boolean knownToBeFinite) {
this(actual, spliterator, knownToBeFinite, null);
}
@Override
public void accept(T t) {
valueReady = true;
nextElement = t;
}
boolean hasNext() {
if (!valueReady)
spliterator.tryAdvance(this);
return valueReady;
}
@Nullable T next() {
if (!valueReady && !hasNext())
throw new NoSuchElementException();
else {
valueReady = false;
T t = nextElement;
nextElement = null;
return t;
}
}
@Override
public void request(long n) {
if (Operators.validate(n)) {
if (Operators.addCap(REQUESTED, this, n) == 0) {
if (n == Long.MAX_VALUE) {
fastPath();
}
else {
slowPath(n);
}
}
}
}
private void onCloseWithDropError() {
if (onClose != null) {
try {
onClose.run();
}
catch (Throwable t) {
Operators.onErrorDropped(t, actual.currentContext());
}
}
}
void slowPath(long n) {
final Subscriber<? super T> s = actual;
long e = 0L;
for (; ; ) {
while (e != n) {
T t;
try {
t = Objects.requireNonNull(next(),
"The iterator returned a null value");
}
catch (Throwable ex) {
s.onError(ex);
onCloseWithDropError();
return;
}
if (cancelled) {
return;
}
s.onNext(t);
if (cancelled) {
return;
}
boolean b;
try {
b = hasNext();
}
catch (Throwable ex) {
s.onError(ex);
onCloseWithDropError();
return;
}
if (cancelled) {
return;
}
if (!b) {
s.onComplete();
onCloseWithDropError();
return;
}
e++;
}
n = requested;
if (n == e) {
n = REQUESTED.addAndGet(this, -e);
if (n == 0L) {
return;
}
e = 0L;
}
}
}
void fastPath() {
final Subscriber<? super T> s = actual;
for (; ; ) {
if (cancelled) {
return;
}
T t;
try {
t = Objects.requireNonNull(next(),
"The iterator returned a null value");
}
catch (Exception ex) {
s.onError(ex);
onCloseWithDropError();
return;
}
if (cancelled) {
return;
}
s.onNext(t);
if (cancelled) {
return;
}
boolean b;
try {
b = hasNext();
}
catch (Exception ex) {
s.onError(ex);
onCloseWithDropError();
return;
}
if (cancelled) {
return;
}
if (!b) {
s.onComplete();
onCloseWithDropError();
return;
}
}
}
@Override
public void cancel() {
onCloseWithDropError();
cancelled = true;
Operators.onDiscard(nextElement, actual.currentContext());
Operators.onDiscardMultiple(this.spliterator, this.knownToBeFinite, actual.currentContext());
}
@Override
public CoreSubscriber<? super T> actual() {
return actual;
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.CANCELLED) return cancelled;
if (key == Attr.REQUESTED_FROM_DOWNSTREAM) return requested;
if (key == Attr.TERMINATED) return state == STATE_NO_NEXT;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return InnerProducer.super.scanUnsafe(key);
}
@Override
public void clear() {
Operators.onDiscard(nextElement, actual.currentContext());
Operators.onDiscardMultiple(this.spliterator, this.knownToBeFinite, actual.currentContext());
state = STATE_NO_NEXT;
}
@Override
public boolean isEmpty() {
int s = state;
if (s == STATE_NO_NEXT) {
return true;
}
else if (cancelled && !knownToBeFinite) {
return true; //interrupts poll in discard loops due to cancellation
}
else if (s == STATE_HAS_NEXT_HAS_VALUE || s == STATE_HAS_NEXT_NO_VALUE) {
return false;
}
else {
boolean hasNext;
try {
hasNext = hasNext();
}
catch (Throwable t) {
//this is a corner case, most Iterators are not expected to throw in hasNext.
//since most calls to isEmpty are in preparation for poll() in fusion, we "defer"
//the exception by pretending queueSub isn't empty, but keeping track of exception
//to be re-thrown by a subsequent call to poll()
state = STATE_HAS_NEXT_NO_VALUE;
hasNextFailure = t;
return false;
}
if (hasNext) {
state = STATE_HAS_NEXT_NO_VALUE;
return false;
}
state = STATE_NO_NEXT;
return true;
}
}
@Override
public @Nullable T poll() {
if (hasNextFailure != null) {
state = STATE_NO_NEXT;
throw Exceptions.propagate(hasNextFailure);
}
if (!isEmpty()) {
T c;
if (state == STATE_HAS_NEXT_NO_VALUE) {
c = next();
}
else {
c = current;
current = null;
}
state = STATE_CALL_HAS_NEXT;
if (c == null) {
onCloseWithDropError();
throw new NullPointerException("iterator returned a null value");
}
return c;
}
onCloseWithDropError();
return null;
}
@Override
public int size() {
if (state == STATE_NO_NEXT) {
return 0;
}
return 1;
}
}
static final | IterableSubscription |
java | apache__flink | flink-core/src/test/java/org/apache/flink/core/memory/DataInputOutputSerializerTest.java | {
"start": 1449,
"end": 5220
} | class ____ {
@Test
void testWrapAsByteBuffer() throws IOException {
SerializationTestType randomInt = Util.randomRecord(SerializationTestTypeFactory.INT);
DataOutputSerializer serializer = new DataOutputSerializer(randomInt.length());
MemorySegment segment = MemorySegmentFactory.allocateUnpooledSegment(randomInt.length());
// empty buffer, read buffer should be empty
ByteBuffer wrapper = serializer.wrapAsByteBuffer();
assertThat(wrapper.position()).isZero();
assertThat(wrapper.limit()).isZero();
// write to data output, read buffer should still be empty
randomInt.write(serializer);
assertThat(wrapper.position()).isZero();
assertThat(wrapper.limit()).isZero();
// get updated read buffer, read buffer should contain written data
wrapper = serializer.wrapAsByteBuffer();
assertThat(wrapper.position()).isZero();
assertThat(wrapper.limit()).isEqualTo(randomInt.length());
// clear data output, read buffer should still contain written data
serializer.clear();
assertThat(wrapper.position()).isZero();
assertThat(wrapper.limit()).isEqualTo(randomInt.length());
// get updated read buffer, should be empty
wrapper = serializer.wrapAsByteBuffer();
assertThat(wrapper.position()).isZero();
assertThat(wrapper.limit()).isZero();
// write to data output and read back to memory
randomInt.write(serializer);
wrapper = serializer.wrapAsByteBuffer();
segment.put(0, wrapper, randomInt.length());
assertThat(wrapper.position()).isEqualTo(randomInt.length());
assertThat(wrapper.limit()).isEqualTo(randomInt.length());
}
@Test
void testRandomValuesWriteRead()
throws IOException, InstantiationException, IllegalAccessException {
final int numElements = 100000;
final ArrayDeque<SerializationTestType> reference = new ArrayDeque<>();
DataOutputSerializer serializer = new DataOutputSerializer(1);
for (SerializationTestType value : Util.randomRecords(numElements)) {
reference.add(value);
value.write(serializer);
}
DataInputDeserializer deserializer =
new DataInputDeserializer(serializer.wrapAsByteBuffer());
for (SerializationTestType expected : reference) {
SerializationTestType actual = expected.getClass().newInstance();
actual.read(deserializer);
assertThat(actual).isEqualTo(expected);
}
reference.clear();
}
@Test
void testLongUTFWriteRead() throws IOException {
byte[] array = new byte[1000];
new Random(1).nextBytes(array);
String expected = new String(array, Charset.forName("UTF-8"));
DataOutputSerializer serializer = new DataOutputSerializer(1);
serializer.writeLongUTF(expected);
DataInputDeserializer deserializer =
new DataInputDeserializer(serializer.getSharedBuffer());
String actual = deserializer.readLongUTF();
assertThat(actual).isEqualTo(expected);
}
@Test
void testUTFWriteRead() throws IOException {
byte[] array = new byte[1000];
new Random(1).nextBytes(array);
String expected = new String(array, StandardCharsets.UTF_8);
DataOutputSerializer serializer = new DataOutputSerializer(1);
serializer.writeUTF(expected);
DataInputDeserializer deserializer =
new DataInputDeserializer(serializer.getSharedBuffer());
String actual = deserializer.readUTF();
assertThat(actual).isEqualTo(expected);
}
}
| DataInputOutputSerializerTest |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java | {
"start": 2309,
"end": 16699
} | class ____ extends AbstractSnapshotIntegTestCase {
public void testGetShardSnapshotFromUnknownRepoReturnsAnError() throws Exception {
boolean useMultipleUnknownRepositories = randomBoolean();
List<String> repositories = useMultipleUnknownRepositories ? List.of("unknown", "unknown-2") : List.of("unknown");
final ActionFuture<GetShardSnapshotResponse> responseFuture = getLatestSnapshotForShardFuture(repositories, "idx", 0, false);
if (useMultipleUnknownRepositories) {
GetShardSnapshotResponse response = responseFuture.get();
assertThat(response.getLatestShardSnapshot().isPresent(), is(equalTo(false)));
final Map<String, RepositoryException> failures = response.getRepositoryFailures();
for (String repository : repositories) {
RepositoryException repositoryException = failures.get(repository);
assertThat(repositoryException, is(notNullValue()));
assertThat(
repositoryException.getMessage(),
equalTo(Strings.format("[%s] Unable to find the latest snapshot for shard [[idx][0]]", repository))
);
}
} else {
expectThrows(RepositoryException.class, responseFuture);
}
disableRepoConsistencyCheck("This test checks an empty repository");
}
public void testGetShardSnapshotFromEmptyRepositoryReturnsEmptyResult() {
final String fsRepoName = randomAlphaOfLength(10);
createRepository(fsRepoName, FsRepository.TYPE);
final Optional<ShardSnapshotInfo> indexShardSnapshotInfo = getLatestSnapshotForShard(fsRepoName, "test", 0);
assertThat(indexShardSnapshotInfo.isEmpty(), equalTo(true));
disableRepoConsistencyCheck("This test checks an empty repository");
}
public void testGetShardSnapshotFromUnknownIndexReturnsEmptyResult() {
final String fsRepoName = randomAlphaOfLength(10);
createRepository(fsRepoName, FsRepository.TYPE);
createSnapshot(fsRepoName, "snap-1", Collections.emptyList());
final Optional<ShardSnapshotInfo> indexShardSnapshotInfo = getLatestSnapshotForShard(fsRepoName, "test", 0);
assertThat(indexShardSnapshotInfo.isEmpty(), equalTo(true));
}
public void testGetShardSnapshotFromUnknownShardReturnsEmptyResult() {
final String fsRepoName = randomAlphaOfLength(10);
final String indexName = "test-idx";
createIndexWithContent(indexName);
createRepository(fsRepoName, FsRepository.TYPE);
createSnapshot(fsRepoName, "snap-1", Collections.singletonList(indexName));
final Optional<ShardSnapshotInfo> indexShardSnapshotInfo = getLatestSnapshotForShard(fsRepoName, indexName, 100);
assertThat(indexShardSnapshotInfo.isEmpty(), equalTo(true));
}
public void testGetShardSnapshotOnEmptyRepositoriesListThrowsAnError() {
expectThrows(IllegalArgumentException.class, () -> getLatestSnapshotForShardFuture(Collections.emptyList(), "idx", 0, false));
}
public void testGetShardSnapshotReturnsTheLatestSuccessfulSnapshot() throws Exception {
final String repoName = "repo-name";
final Path repoPath = randomRepoPath();
createRepository(repoName, FsRepository.TYPE, repoPath);
final boolean useBwCFormat = randomBoolean();
if (useBwCFormat) {
final IndexVersion version = randomVersionBetween(random(), IndexVersions.V_7_5_0, IndexVersion.current());
initWithSnapshotVersion(repoName, repoPath, version);
}
createSnapshot(repoName, "empty-snap", Collections.emptyList());
final String indexName = "test";
final String indexName2 = "test-2";
List<String> indices = List.of(indexName, indexName2);
createIndex(indexName, indexName2);
SnapshotInfo lastSnapshot = null;
String expectedIndexMetadataId = null;
int numSnapshots = randomIntBetween(5, 25);
for (int i = 0; i < numSnapshots; i++) {
if (randomBoolean()) {
indexRandomDocs(indexName, 5);
indexRandomDocs(indexName2, 10);
}
final List<String> snapshotIndices = randomSubsetOf(indices);
final SnapshotInfo snapshotInfo = createSnapshot(repoName, Strings.format("snap-%03d", i), snapshotIndices);
if (snapshotInfo.indices().contains(indexName)) {
lastSnapshot = snapshotInfo;
ClusterStateResponse clusterStateResponse = admin().cluster().prepareState(TEST_REQUEST_TIMEOUT).get();
IndexMetadata indexMetadata = clusterStateResponse.getState().metadata().getProject().index(indexName);
expectedIndexMetadataId = IndexMetaDataGenerations.buildUniqueIdentifier(indexMetadata);
}
}
if (useBwCFormat) {
// Reload the RepositoryData so we don't use cached data that wasn't serialized
assertAcked(clusterAdmin().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName).get());
createRepository(repoName, "fs", repoPath);
}
final Optional<ShardSnapshotInfo> indexShardSnapshotInfoOpt = getLatestSnapshotForShard(repoName, indexName, 0);
if (lastSnapshot == null) {
assertThat(indexShardSnapshotInfoOpt.isPresent(), equalTo(false));
} else {
assertThat(indexShardSnapshotInfoOpt.isPresent(), equalTo(true));
final ShardSnapshotInfo shardSnapshotInfo = indexShardSnapshotInfoOpt.get();
assertThat(shardSnapshotInfo.getIndexMetadataIdentifier(), equalTo(expectedIndexMetadataId));
final Snapshot snapshot = shardSnapshotInfo.getSnapshot();
assertThat(snapshot, equalTo(lastSnapshot.snapshot()));
}
}
public void testGetShardSnapshotWhileThereIsARunningSnapshot() throws Exception {
final String fsRepoName = randomAlphaOfLength(10);
createRepository(fsRepoName, "mock");
createSnapshot(fsRepoName, "empty-snap", Collections.emptyList());
final String indexName = "test-idx";
createIndexWithContent(indexName);
blockAllDataNodes(fsRepoName);
final String snapshotName = "snap-1";
final ActionFuture<CreateSnapshotResponse> snapshotFuture = clusterAdmin().prepareCreateSnapshot(
TEST_REQUEST_TIMEOUT,
fsRepoName,
snapshotName
).setIndices(indexName).setWaitForCompletion(true).execute();
waitForBlockOnAnyDataNode(fsRepoName);
assertThat(getLatestSnapshotForShard(fsRepoName, indexName, 0).isEmpty(), equalTo(true));
unblockAllDataNodes(fsRepoName);
assertSuccessful(snapshotFuture);
}
public void testGetShardSnapshotFailureHandlingLetOtherRepositoriesRequestsMakeProgress() throws Exception {
final String failingRepoName = randomAlphaOfLength(10);
createRepository(failingRepoName, "mock");
int repoCount = randomIntBetween(1, 10);
List<String> workingRepoNames = new ArrayList<>();
for (int i = 0; i < repoCount; i++) {
final String repoName = randomAlphaOfLength(10);
createRepository(repoName, "fs");
workingRepoNames.add(repoName);
}
final String indexName = "test-idx";
createIndexWithContent(indexName);
int snapshotIdx = 0;
Object[] args1 = new Object[] { snapshotIdx++ };
createSnapshot(failingRepoName, Strings.format("snap-%03d", args1), Collections.singletonList(indexName));
SnapshotInfo latestSnapshot = null;
for (String workingRepoName : workingRepoNames) {
Object[] args = new Object[] { snapshotIdx++ };
String snapshot = Strings.format("snap-%03d", args);
latestSnapshot = createSnapshot(workingRepoName, snapshot, Collections.singletonList(indexName));
}
final MockRepository repository = getRepositoryOnMaster(failingRepoName);
if (randomBoolean()) {
repository.setBlockAndFailOnReadIndexFiles();
} else {
repository.setBlockAndFailOnReadSnapFiles();
}
PlainActionFuture<GetShardSnapshotResponse> future = getLatestSnapshotForShardFuture(
CollectionUtils.appendToCopy(workingRepoNames, failingRepoName),
indexName,
0
);
waitForBlock(internalCluster().getMasterName(), failingRepoName);
repository.unblock();
final GetShardSnapshotResponse response = future.actionGet();
final Optional<RepositoryException> error = response.getFailureForRepository(failingRepoName);
assertThat(error.isPresent(), is(equalTo(true)));
assertThat(
error.get().getMessage(),
equalTo(Strings.format("[%s] Unable to find the latest snapshot for shard [[%s][0]]", failingRepoName, indexName))
);
for (String workingRepoName : workingRepoNames) {
assertThat(response.getFailureForRepository(workingRepoName).isEmpty(), is(equalTo(true)));
}
Optional<ShardSnapshotInfo> shardSnapshotInfoOpt = response.getLatestShardSnapshot();
assertThat(shardSnapshotInfoOpt.isPresent(), equalTo(true));
ShardSnapshotInfo shardSnapshotInfo = shardSnapshotInfoOpt.get();
assertThat(shardSnapshotInfo.getSnapshot(), equalTo(latestSnapshot.snapshot()));
assertThat(shardSnapshotInfo.getRepository(), equalTo(latestSnapshot.repository()));
}
public void testGetShardSnapshotInMultipleRepositoriesReturnsTheLatestSnapshot() {
int repoCount = randomIntBetween(2, 10);
List<String> repositories = new ArrayList<>();
for (int i = 0; i < repoCount; i++) {
final String repoName = randomAlphaOfLength(10);
createRepository(repoName, "fs");
repositories.add(repoName);
}
final String indexName = "test-idx";
createIndexWithContent(indexName);
int snapshotIdx = 0;
SnapshotInfo expectedLatestSnapshot = null;
for (String repository : repositories) {
Object[] args = new Object[] { snapshotIdx++ };
String snapshot = Strings.format("snap-%03d", args);
expectedLatestSnapshot = createSnapshot(repository, snapshot, Collections.singletonList(indexName));
}
GetShardSnapshotResponse response = getLatestSnapshotForShardFuture(repositories, indexName, 0).actionGet();
assertThat(response.getRepositoryFailures(), is(anEmptyMap()));
Optional<ShardSnapshotInfo> shardSnapshotInfoOpt = response.getLatestShardSnapshot();
assertThat(shardSnapshotInfoOpt.isPresent(), equalTo(true));
ShardSnapshotInfo shardSnapshotInfo = shardSnapshotInfoOpt.get();
assertThat(shardSnapshotInfo.getSnapshot(), equalTo(expectedLatestSnapshot.snapshot()));
assertThat(shardSnapshotInfo.getRepository(), equalTo(expectedLatestSnapshot.repository()));
}
public void testFailedSnapshotsAreNotReturned() throws Exception {
final String indexName = "test";
createIndexWithContent(indexName);
final String repoName = "test-repo";
createRepository(repoName, "mock");
for (RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) {
((MockRepository) repositoriesService.repository(repoName)).setBlockAndFailOnWriteSnapFiles();
}
clusterAdmin().prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, "snap")
.setIndices(indexName)
.setWaitForCompletion(false)
.setFeatureStates(NO_FEATURE_STATES_VALUE)
.get();
waitForBlockOnAnyDataNode(repoName);
for (RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) {
((MockRepository) repositoriesService.repository(repoName)).unblock();
}
assertBusy(() -> assertThat(getSnapshot(repoName, "snap").state(), equalTo(SnapshotState.PARTIAL)));
Optional<ShardSnapshotInfo> shardSnapshotInfo = getLatestSnapshotForShard(repoName, indexName, 0);
assertThat(shardSnapshotInfo.isEmpty(), equalTo(true));
final SnapshotInfo snapshotInfo = createSnapshot(repoName, "snap-1", Collections.singletonList(indexName));
Optional<ShardSnapshotInfo> latestSnapshotForShard = getLatestSnapshotForShard(repoName, indexName, 0);
assertThat(latestSnapshotForShard.isPresent(), equalTo(true));
assertThat(latestSnapshotForShard.get().getSnapshot(), equalTo(snapshotInfo.snapshot()));
assertThat(latestSnapshotForShard.get().getRepository(), equalTo(snapshotInfo.repository()));
}
private Optional<ShardSnapshotInfo> getLatestSnapshotForShard(String repository, String indexName, int shard) {
final GetShardSnapshotResponse response = getLatestSnapshotForShardFuture(Collections.singletonList(repository), indexName, shard)
.actionGet();
return response.getLatestShardSnapshot();
}
private PlainActionFuture<GetShardSnapshotResponse> getLatestSnapshotForShardFuture(
List<String> repositories,
String indexName,
int shard
) {
return getLatestSnapshotForShardFuture(repositories, indexName, shard, true);
}
private PlainActionFuture<GetShardSnapshotResponse> getLatestSnapshotForShardFuture(
List<String> repositories,
String indexName,
int shard,
boolean useAllRepositoriesRequest
) {
ShardId shardId = new ShardId(new Index(indexName, "__na__"), shard);
PlainActionFuture<GetShardSnapshotResponse> future = new PlainActionFuture<>();
final GetShardSnapshotRequest request;
if (useAllRepositoriesRequest && randomBoolean()) {
request = GetShardSnapshotRequest.latestSnapshotInAllRepositories(TEST_REQUEST_TIMEOUT, shardId);
} else {
request = GetShardSnapshotRequest.latestSnapshotInRepositories(TEST_REQUEST_TIMEOUT, shardId, repositories);
}
client().execute(TransportGetShardSnapshotAction.TYPE, request, future);
return future;
}
}
| IndexSnapshotsServiceIT |
java | google__error-prone | check_api/src/main/java/com/google/errorprone/dataflow/ConstantPropagationAnalysis.java | {
"start": 1017,
"end": 1756
} | class ____ {
private static final ConstantPropagationTransfer CONSTANT_PROPAGATION =
new ConstantPropagationTransfer();
/**
* Returns the value of the leaf of {@code exprPath}, if it is determined to be a constant (always
* evaluates to the same numeric value), and null otherwise. Note that returning null does not
* necessarily mean the expression is *not* a constant.
*/
public static @Nullable Number numberValue(TreePath exprPath, Context context) {
Constant val = DataFlow.expressionDataflow(exprPath, context, CONSTANT_PROPAGATION);
if (val == null || !val.isConstant()) {
return null;
}
return val.getValue();
}
private ConstantPropagationAnalysis() {}
}
| ConstantPropagationAnalysis |
java | apache__rocketmq | openmessaging/src/main/java/io/openmessaging/rocketmq/utils/BeanUtils.java | {
"start": 3669,
"end": 8054
} | class ____ properties are being populated
* @param properties Map keyed by property name, with the corresponding (String or String[]) value(s) to be set
* @param <T> Class type
* @return Class instance
*/
public static <T> T populate(final Properties properties, final Class<T> clazz) {
T obj = null;
try {
obj = clazz.getDeclaredConstructor().newInstance();
return populate(properties, obj);
} catch (Throwable e) {
log.warn("Error occurs !", e);
}
return obj;
}
public static <T> T populate(final KeyValue properties, final Class<T> clazz) {
T obj = null;
try {
obj = clazz.getDeclaredConstructor().newInstance();
return populate(properties, obj);
} catch (Throwable e) {
log.warn("Error occurs !", e);
}
return obj;
}
public static Class<?> getMethodClass(Class<?> clazz, String methodName) {
Method[] methods = clazz.getMethods();
for (Method method : methods) {
if (method.getName().equalsIgnoreCase(methodName)) {
return method.getParameterTypes()[0];
}
}
return null;
}
public static void setProperties(Class<?> clazz, Object obj, String methodName,
Object value) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Class<?> parameterClass = getMethodClass(clazz, methodName);
Method setterMethod = clazz.getMethod(methodName, parameterClass);
if (parameterClass == Boolean.TYPE) {
setterMethod.invoke(obj, Boolean.valueOf(value.toString()));
} else if (parameterClass == Integer.TYPE) {
setterMethod.invoke(obj, Integer.valueOf(value.toString()));
} else if (parameterClass == Double.TYPE) {
setterMethod.invoke(obj, Double.valueOf(value.toString()));
} else if (parameterClass == Float.TYPE) {
setterMethod.invoke(obj, Float.valueOf(value.toString()));
} else if (parameterClass == Long.TYPE) {
setterMethod.invoke(obj, Long.valueOf(value.toString()));
} else
setterMethod.invoke(obj, value);
}
public static <T> T populate(final Properties properties, final T obj) {
Class<?> clazz = obj.getClass();
try {
Set<Map.Entry<Object, Object>> entries = properties.entrySet();
for (Map.Entry<Object, Object> entry : entries) {
String entryKey = entry.getKey().toString();
String[] keyGroup = entryKey.split("\\.");
for (int i = 0; i < keyGroup.length; i++) {
keyGroup[i] = keyGroup[i].toLowerCase();
keyGroup[i] = StringUtils.capitalize(keyGroup[i]);
}
String beanFieldNameWithCapitalization = StringUtils.join(keyGroup);
try {
setProperties(clazz, obj, "set" + beanFieldNameWithCapitalization, entry.getValue());
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException ignored) {
//ignored...
}
}
} catch (RuntimeException e) {
log.warn("Error occurs !", e);
}
return obj;
}
public static <T> T populate(final KeyValue properties, final T obj) {
Class<?> clazz = obj.getClass();
try {
final Set<String> keySet = properties.keySet();
for (String key : keySet) {
String[] keyGroup = key.split("[\\._]");
for (int i = 0; i < keyGroup.length; i++) {
keyGroup[i] = keyGroup[i].toLowerCase();
keyGroup[i] = StringUtils.capitalize(keyGroup[i]);
}
String beanFieldNameWithCapitalization = StringUtils.join(keyGroup);
try {
setProperties(clazz, obj, "set" + beanFieldNameWithCapitalization, properties.getString(key));
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException ignored) {
//ignored...
}
}
} catch (RuntimeException e) {
log.warn("Error occurs !", e);
}
return obj;
}
}
| whose |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/ansi/AnsiPropertySource.java | {
"start": 1789,
"end": 3437
} | class ____ extends PropertySource<AnsiElement> {
private static final Iterable<Mapping> MAPPINGS;
static {
List<Mapping> mappings = new ArrayList<>();
mappings.add(new EnumMapping<>("AnsiStyle.", AnsiStyle.class));
mappings.add(new EnumMapping<>("AnsiColor.", AnsiColor.class));
mappings.add(new Ansi8BitColorMapping("AnsiColor.", Ansi8BitColor::foreground));
mappings.add(new EnumMapping<>("AnsiBackground.", AnsiBackground.class));
mappings.add(new Ansi8BitColorMapping("AnsiBackground.", Ansi8BitColor::background));
mappings.add(new EnumMapping<>("Ansi.", AnsiStyle.class));
mappings.add(new EnumMapping<>("Ansi.", AnsiColor.class));
mappings.add(new EnumMapping<>("Ansi.BG_", AnsiBackground.class));
MAPPINGS = Collections.unmodifiableList(mappings);
}
private final boolean encode;
/**
* Create a new {@link AnsiPropertySource} instance.
* @param name the name of the property source
* @param encode if the output should be encoded
*/
public AnsiPropertySource(String name, boolean encode) {
super(name);
this.encode = encode;
}
@Override
public @Nullable Object getProperty(String name) {
if (StringUtils.hasLength(name)) {
for (Mapping mapping : MAPPINGS) {
String prefix = mapping.getPrefix();
if (name.startsWith(prefix)) {
String postfix = name.substring(prefix.length());
AnsiElement element = mapping.getElement(postfix);
if (element != null) {
return (this.encode) ? AnsiOutput.encode(element) : element;
}
}
}
}
return null;
}
/**
* Mapping between a name and the pseudo property source.
*/
private abstract static | AnsiPropertySource |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-links/deployment/src/test/java/io/quarkus/resteasy/reactive/links/deployment/TestResourceNoId.java | {
"start": 592,
"end": 1424
} | class ____ {
private static final List<TestRecordNoId> RECORDS = new LinkedList<>(Arrays.asList(
new TestRecordNoId("first_value"),
new TestRecordNoId("second_value")));
@GET
@RestLink(entityType = TestRecordNoId.class)
@InjectRestLinks
public Uni<List<TestRecordNoId>> getAll() {
return Uni.createFrom().item(RECORDS).onItem().delayIt().by(Duration.ofMillis(100));
}
@GET
@Path("/by-name/{name}")
@RestLink(entityType = TestRecordNoId.class)
@InjectRestLinks(RestLinkType.INSTANCE)
public TestRecordNoId getByNothing(@PathParam("name") String name) {
return RECORDS.stream()
.filter(record -> record.getName().equals(name))
.findFirst()
.orElseThrow(NotFoundException::new);
}
}
| TestResourceNoId |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigDescriptionBuildStep.java | {
"start": 1145,
"end": 4926
} | class ____ {
@BuildStep
List<ConfigDescriptionBuildItem> createConfigDescriptions(
ConfigurationBuildItem config) throws Exception {
Properties javadoc = new Properties();
ClassPathUtils.consumeAsStreams("META-INF/quarkus-javadoc.properties", in -> {
try {
javadoc.load(in);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
List<ConfigDescriptionBuildItem> ret = new ArrayList<>();
processMappings(config.getReadResult().getBuildTimeMappings(), ret, javadoc, BUILD_TIME);
processMappings(config.getReadResult().getBuildTimeRunTimeMappings(), ret, javadoc, BUILD_AND_RUN_TIME_FIXED);
processMappings(config.getReadResult().getRunTimeMappings(), ret, javadoc, RUN_TIME);
return ret;
}
private void processMappings(List<ConfigClass> mappings, List<ConfigDescriptionBuildItem> descriptionBuildItems,
Properties javaDocProperties, ConfigPhase configPhase) {
for (ConfigClass mapping : mappings) {
Map<String, Property> properties = ConfigMappings.getProperties(mapping);
for (Map.Entry<String, Property> entry : properties.entrySet()) {
String propertyName = entry.getKey();
Property property = entry.getValue();
Method method = property.getMethod();
String defaultValue = null;
if (property instanceof PrimitiveProperty primitiveProperty) {
if (primitiveProperty.hasDefaultValue()) {
defaultValue = primitiveProperty.getDefaultValue();
} else if (primitiveProperty.getPrimitiveType() == boolean.class) {
defaultValue = "false";
} else if (primitiveProperty.getPrimitiveType() != char.class) {
defaultValue = "0";
}
} else if (property instanceof LeafProperty leafProperty) {
if (leafProperty.hasDefaultValue()) {
defaultValue = leafProperty.getDefaultValue();
}
}
String javadocKey = method.getDeclaringClass().getName().replace('$', '.') + '.' + method.getName();
EffectiveConfigTypeAndValues typeName = getTypeName(method.getReturnType(), method.getGenericReturnType());
descriptionBuildItems.add(new ConfigDescriptionBuildItem(propertyName, defaultValue,
javaDocProperties.getProperty(javadocKey), typeName.typeName(), typeName.allowedValues(), configPhase));
}
}
}
private EffectiveConfigTypeAndValues getTypeName(Class<?> valueClass, Type genericType) {
final String name;
final List<String> allowedValues = new ArrayList<>();
// Extract Optionals, Lists and Sets
if ((valueClass.equals(Optional.class) || valueClass.equals(List.class) || valueClass.equals(Set.class))) {
String thisName = valueClass.getName();
if (genericType != null) {
thisName = genericType.getTypeName();
}
if (thisName.contains("<") && thisName.contains(">")) {
thisName = thisName.substring(thisName.lastIndexOf("<") + 1, thisName.indexOf(">"));
}
try {
Class<?> c = Class.forName(thisName);
return getTypeName(c, null);
} catch (ClassNotFoundException ex) {
// Then we use the name as is.
}
name = thisName;
} else if (Enum.class.isAssignableFrom(valueClass)) {
// Check if this is an | ConfigDescriptionBuildStep |
java | apache__camel | components/camel-wasm/src/generated/java/org/apache/camel/component/wasm/WasmConfigurationConfigurer.java | {
"start": 715,
"end": 1973
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.component.wasm.WasmConfiguration target = (org.apache.camel.component.wasm.WasmConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "module": target.setModule(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "module": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.component.wasm.WasmConfiguration target = (org.apache.camel.component.wasm.WasmConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "module": return target.getModule();
default: return null;
}
}
}
| WasmConfigurationConfigurer |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/CheckedIntFunction.java | {
"start": 532,
"end": 622
} | interface ____<T, E extends Exception> {
T apply(int input) throws E;
}
| CheckedIntFunction |
java | spring-projects__spring-framework | integration-tests/src/test/java/org/springframework/aop/framework/autoproxy/AdvisorAutoProxyCreatorIntegrationTests.java | {
"start": 7375,
"end": 8210
} | class ____ extends StaticMethodMatcherPointcutAdvisor implements InitializingBean {
/**
* Should we insist on the presence of a transaction attribute or refuse to accept one?
*/
private boolean requireTransactionContext = false;
public void setRequireTransactionContext(boolean requireTransactionContext) {
this.requireTransactionContext = requireTransactionContext;
}
public boolean isRequireTransactionContext() {
return requireTransactionContext;
}
public CountingBeforeAdvice getCountingBeforeAdvice() {
return (CountingBeforeAdvice) getAdvice();
}
@Override
public void afterPropertiesSet() {
setAdvice(new TxCountingBeforeAdvice());
}
@Override
public boolean matches(Method method, @Nullable Class<?> targetClass) {
return method.getName().startsWith("setAge");
}
private | OrderedTxCheckAdvisor |
java | spring-projects__spring-framework | spring-tx/src/main/java/org/springframework/transaction/support/SimpleTransactionScope.java | {
"start": 1779,
"end": 3487
} | class ____ implements Scope {
@Override
public Object get(String name, ObjectFactory<?> objectFactory) {
ScopedObjectsHolder scopedObjects = (ScopedObjectsHolder) TransactionSynchronizationManager.getResource(this);
if (scopedObjects == null) {
scopedObjects = new ScopedObjectsHolder();
TransactionSynchronizationManager.registerSynchronization(new CleanupSynchronization(scopedObjects));
TransactionSynchronizationManager.bindResource(this, scopedObjects);
}
// NOTE: Do NOT modify the following to use Map::computeIfAbsent. For details,
// see https://github.com/spring-projects/spring-framework/issues/25801.
Object scopedObject = scopedObjects.scopedInstances.get(name);
if (scopedObject == null) {
scopedObject = objectFactory.getObject();
scopedObjects.scopedInstances.put(name, scopedObject);
}
return scopedObject;
}
@Override
public @Nullable Object remove(String name) {
ScopedObjectsHolder scopedObjects = (ScopedObjectsHolder) TransactionSynchronizationManager.getResource(this);
if (scopedObjects != null) {
scopedObjects.destructionCallbacks.remove(name);
return scopedObjects.scopedInstances.remove(name);
}
else {
return null;
}
}
@Override
public void registerDestructionCallback(String name, Runnable callback) {
ScopedObjectsHolder scopedObjects = (ScopedObjectsHolder) TransactionSynchronizationManager.getResource(this);
if (scopedObjects != null) {
scopedObjects.destructionCallbacks.put(name, callback);
}
}
@Override
public @Nullable String getConversationId() {
return TransactionSynchronizationManager.getCurrentTransactionName();
}
/**
* Holder for scoped objects.
*/
static | SimpleTransactionScope |
java | dropwizard__dropwizard | dropwizard-metrics/src/test/java/io/dropwizard/metrics/common/CsvReporterFactoryTest.java | {
"start": 628,
"end": 1951
} | class ____ {
private final ObjectMapper objectMapper = Jackson.newObjectMapper();
private final YamlConfigurationFactory<MetricsFactory> factory =
new YamlConfigurationFactory<>(MetricsFactory.class,
BaseValidator.newValidator(),
objectMapper, "dw");
@BeforeEach
void setUp() {
objectMapper.getSubtypeResolver().registerSubtypes(ConsoleReporterFactory.class,
CsvReporterFactory.class,
Slf4jReporterFactory.class);
}
@Test
void isDiscoverable() {
assertThat(new DiscoverableSubtypeResolver().getDiscoveredSubtypes())
.contains(CsvReporterFactory.class);
}
@Test
void directoryCreatedOnStartup() throws Exception {
File dir = new File("metrics");
dir.delete();
assertThat(dir).doesNotExist();
MetricsFactory config = factory.build(new ResourceConfigurationSourceProvider(), "yaml/metrics.yml");
MetricRegistry metricRegistry = new MetricRegistry();
config.configure(new LifecycleEnvironment(metricRegistry), metricRegistry);
assertThat(dir).exists();
}
}
| CsvReporterFactoryTest |
java | spring-projects__spring-framework | spring-expression/src/test/java/org/springframework/expression/spel/SpelReproTests.java | {
"start": 73747,
"end": 73778
} | enum ____ { A, B, C }
private | ABC |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/window/tvf/slicing/SliceAssigners.java | {
"start": 19116,
"end": 21333
} | class ____ implements SliceUnsharedAssigner {
private static final long serialVersionUID = 1L;
private final int windowEndIndex;
private final SliceAssigner innerAssigner;
private final ReusableListIterable reuseExpiredList = new ReusableListIterable();
public WindowedSliceAssigner(int windowEndIndex, SliceAssigner innerAssigner) {
checkArgument(
windowEndIndex >= 0,
"Windowed slice assigner must have a positive window end index.");
this.windowEndIndex = windowEndIndex;
this.innerAssigner = innerAssigner;
}
@Override
public long assignSliceEnd(RowData element, ClockService clock) {
return element.getTimestamp(windowEndIndex, 3).getMillisecond();
}
@Override
public long getLastWindowEnd(long sliceEnd) {
// we shouldn't use innerAssigner.getLastWindowEnd here,
// because WindowedSliceAssigner is slice unshared, an attached window can't be
// shared with other windows and the last window should be itself.
return sliceEnd;
}
@Override
public long getWindowStart(long windowEnd) {
return innerAssigner.getWindowStart(windowEnd);
}
@Override
public Iterable<Long> expiredSlices(long windowEnd) {
reuseExpiredList.reset(windowEnd);
return reuseExpiredList;
}
@Override
public long getSliceEndInterval() {
return innerAssigner.getSliceEndInterval();
}
@Override
public boolean isEventTime() {
// it always works in event-time mode if input row has been attached windows
return true;
}
@Override
public String getDescription() {
return String.format(
"WindowedSliceWindow(innerWindow=%s, windowEndIndex=%d)",
innerAssigner, windowEndIndex);
}
}
/**
* The {@link SliceAssigner} for elements have been attached slice end timestamp, and the slices
* are shared.
*/
public static final | WindowedSliceAssigner |
java | elastic__elasticsearch | libs/core/src/main/java/org/elasticsearch/core/ESSloppyMath.java | {
"start": 587,
"end": 909
} | class ____ {
private ESSloppyMath() {}
public static double sinh(double value) {
return FastMath.sinh(value);
}
public static double atan(double value) {
return FastMath.atan(value);
}
public static double log(double value) {
return FastMath.log(value);
}
}
| ESSloppyMath |
java | netty__netty | testsuite/src/main/java/io/netty/testsuite/transport/socket/ServerSocketSuspendTest.java | {
"start": 3386,
"end": 3749
} | class ____ extends ChannelInboundHandlerAdapter {
final CountDownLatch latch;
AcceptedChannelCounter(int nChannels) {
latch = new CountDownLatch(nChannels);
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
latch.countDown();
}
}
}
| AcceptedChannelCounter |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/timeline/BaseHashTable.java | {
"start": 1434,
"end": 9159
} | class ____<T> {
/**
* The maximum load factor we will allow the hash table to climb to before expanding.
*/
private static final double MAX_LOAD_FACTOR = 0.75f;
/**
* The minimum number of slots we can have in the hash table.
*/
static final int MIN_CAPACITY = 2;
/**
* The maximum number of slots we can have in the hash table.
*/
static final int MAX_CAPACITY = 1 << 30;
private Object[] elements;
private int size = 0;
BaseHashTable(int expectedSize) {
this.elements = new Object[expectedSizeToCapacity(expectedSize)];
}
/**
* Calculate the capacity we should provision, given the expected size.
* <br>
* Our capacity must always be a power of 2, and never less than 2 or more
* than MAX_CAPACITY. We use 64-bit numbers here to avoid overflow
* concerns.
*/
static int expectedSizeToCapacity(int expectedSize) {
long minCapacity = (long) Math.ceil((float) expectedSize / MAX_LOAD_FACTOR);
return Math.max(MIN_CAPACITY,
(int) Math.min(MAX_CAPACITY, roundUpToPowerOfTwo(minCapacity)));
}
private static long roundUpToPowerOfTwo(long i) {
if (i <= 0) {
return 0;
} else if (i > (1L << 62)) {
throw new ArithmeticException("There are no 63-bit powers of 2 higher than " +
"or equal to " + i);
} else {
return 1L << -Long.numberOfLeadingZeros(i - 1);
}
}
final int baseSize() {
return size;
}
final Object[] baseElements() {
return elements;
}
final T baseGet(Object key) {
int slot = findSlot(key, elements.length);
Object value = elements[slot];
if (value == null) {
return null;
} else if (value instanceof Object[]) {
T[] array = (T[]) value;
for (T object : array) {
if (object.equals(key)) {
return object;
}
}
return null;
} else if (value.equals(key)) {
return (T) value;
} else {
return null;
}
}
final T baseAddOrReplace(T newObject) {
if (((size + 1) * MAX_LOAD_FACTOR > elements.length) &&
(elements.length < MAX_CAPACITY)) {
int newSize = elements.length * 2;
rehash(newSize);
}
int slot = findSlot(newObject, elements.length);
Object cur = elements[slot];
if (cur == null) {
size++;
elements[slot] = newObject;
return null;
} else if (cur instanceof Object[]) {
T[] curArray = (T[]) cur;
for (int i = 0; i < curArray.length; i++) {
T value = curArray[i];
if (value.equals(newObject)) {
curArray[i] = newObject;
return value;
}
}
size++;
T[] newArray = (T[]) new Object[curArray.length + 1];
System.arraycopy(curArray, 0, newArray, 0, curArray.length);
newArray[curArray.length] = newObject;
elements[slot] = newArray;
return null;
} else if (cur.equals(newObject)) {
elements[slot] = newObject;
return (T) cur;
} else {
size++;
elements[slot] = new Object[] {cur, newObject};
return null;
}
}
final T baseRemove(Object key) {
int slot = findSlot(key, elements.length);
Object object = elements[slot];
if (object == null) {
return null;
} else if (object instanceof Object[] curArray) {
for (int i = 0; i < curArray.length; i++) {
if (curArray[i].equals(key)) {
size--;
if (curArray.length <= 2) {
int j = i == 0 ? 1 : 0;
elements[slot] = curArray[j];
} else {
Object[] newArray = new Object[curArray.length - 1];
System.arraycopy(curArray, 0, newArray, 0, i);
System.arraycopy(curArray, i + 1, newArray, i, curArray.length - 1 - i);
elements[slot] = newArray;
}
return (T) curArray[i];
}
}
return null;
} else if (object.equals(key)) {
size--;
elements[slot] = null;
return (T) object;
} else {
return null;
}
}
/**
* Expand the hash table to a new size. Existing elements will be copied to new slots.
*/
private void rehash(int newSize) {
Object[] prevElements = elements;
elements = new Object[newSize];
List<Object> ready = new ArrayList<>();
for (int slot = 0; slot < prevElements.length; slot++) {
unpackSlot(ready, prevElements, slot);
for (Object object : ready) {
int newSlot = findSlot(object, elements.length);
Object cur = elements[newSlot];
if (cur == null) {
elements[newSlot] = object;
} else if (cur instanceof Object[] curArray) {
Object[] newArray = new Object[curArray.length + 1];
System.arraycopy(curArray, 0, newArray, 0, curArray.length);
newArray[curArray.length] = object;
elements[newSlot] = newArray;
} else {
elements[newSlot] = new Object[]{cur, object};
}
}
ready.clear();
}
}
/**
* Find the slot in the array that an element should go into.
*/
static int findSlot(Object object, int numElements) {
// This performs a secondary hash using Knuth's multiplicative Fibonacci
// hashing. Then, we choose some of the highest bits. The number of bits
// we choose is based on the table size. If the size is 2, we need 1 bit;
// if the size is 4, we need 2 bits, etc.
int objectHashCode = object.hashCode();
int log2size = 32 - Integer.numberOfLeadingZeros(numElements);
int shift = 65 - log2size;
return (int) ((objectHashCode * -7046029254386353131L) >>> shift);
}
/**
* Copy any elements in the given slot into the output list.
*/
static <T> void unpackSlot(List<T> out, Object[] elements, int slot) {
Object value = elements[slot];
if (value != null) {
if (value instanceof Object[] array) {
for (Object object : array) {
out.add((T) object);
}
} else {
out.add((T) value);
}
}
}
String baseToDebugString() {
StringBuilder bld = new StringBuilder();
bld.append("BaseHashTable{");
for (int i = 0; i < elements.length; i++) {
Object slotObject = elements[i];
bld.append(String.format("%n%d: ", i));
if (slotObject == null) {
bld.append("null");
} else if (slotObject instanceof Object[] array) {
String prefix = "";
for (Object object : array) {
bld.append(prefix);
prefix = ", ";
bld.append(object);
}
} else {
bld.append(slotObject);
}
}
bld.append(String.format("%n}"));
return bld.toString();
}
}
| BaseHashTable |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResourcesTrackerImpl.java | {
"start": 4610,
"end": 43165
} | class ____ {
@Test
@Timeout(value = 10)
@SuppressWarnings("unchecked")
public void test() {
String user = "testuser";
DrainDispatcher dispatcher = null;
try {
Configuration conf = new Configuration();
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
DeletionService mockDelService = mock(DeletionService.class);
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
ContainerId cId2 = BuilderUtils.newContainerId(1, 1, 1, 2);
LocalizerContext lc2 = new LocalizerContext(user, cId2, null);
LocalResourceRequest req1 =
createLocalResourceRequest(user, 1, 1, LocalResourceVisibility.PUBLIC);
LocalResourceRequest req2 =
createLocalResourceRequest(user, 2, 1, LocalResourceVisibility.PUBLIC);
LocalizedResource lr1 = createLocalizedResource(req1, dispatcher);
LocalizedResource lr2 = createLocalizedResource(req2, dispatcher);
ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc =
new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
localrsrc.put(req1, lr1);
localrsrc.put(req2, lr2);
LocalResourcesTracker tracker =
new LocalResourcesTrackerImpl(user, null, dispatcher, localrsrc,
false, conf, new NMNullStateStoreService(),null);
ResourceEvent req11Event =
new ResourceRequestEvent(req1, LocalResourceVisibility.PUBLIC, lc1);
ResourceEvent req12Event =
new ResourceRequestEvent(req1, LocalResourceVisibility.PUBLIC, lc2);
ResourceEvent req21Event =
new ResourceRequestEvent(req2, LocalResourceVisibility.PUBLIC, lc1);
ResourceEvent rel11Event = new ResourceReleaseEvent(req1, cId1);
ResourceEvent rel12Event = new ResourceReleaseEvent(req1, cId2);
ResourceEvent rel21Event = new ResourceReleaseEvent(req2, cId1);
// Localize R1 for C1
tracker.handle(req11Event);
// Localize R1 for C2
tracker.handle(req12Event);
// Localize R2 for C1
tracker.handle(req21Event);
dispatcher.await();
verify(localizerEventHandler, times(3)).handle(
any(LocalizerResourceRequestEvent.class));
// Verify refCount for R1 is 2
assertEquals(2, lr1.getRefCount());
// Verify refCount for R2 is 1
assertEquals(1, lr2.getRefCount());
// Release R2 for C1
tracker.handle(rel21Event);
dispatcher.await();
verifyTrackedResourceCount(tracker, 2);
// Verify resource with non zero ref count is not removed.
assertEquals(2, lr1.getRefCount());
assertFalse(tracker.remove(lr1, mockDelService));
verifyTrackedResourceCount(tracker, 2);
// Localize resource1
ResourceLocalizedEvent rle =
new ResourceLocalizedEvent(req1, new Path("file:///tmp/r1"), 1);
lr1.handle(rle);
assertTrue(lr1.getState().equals(ResourceState.LOCALIZED));
// Release resource1
tracker.handle(rel11Event);
tracker.handle(rel12Event);
assertEquals(0, lr1.getRefCount());
// Verify resources in state LOCALIZED with ref-count=0 is removed.
assertTrue(tracker.remove(lr1, mockDelService));
verifyTrackedResourceCount(tracker, 1);
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@Timeout(value = 10)
@SuppressWarnings("unchecked")
public void testConsistency() {
String user = "testuser";
DrainDispatcher dispatcher = null;
try {
Configuration conf = new Configuration();
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler = mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler = mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
LocalResourceRequest req1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.PUBLIC);
LocalizedResource lr1 = createLocalizedResource(req1, dispatcher);
ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc = new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
localrsrc.put(req1, lr1);
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
null, dispatcher, localrsrc, false, conf,
new NMNullStateStoreService(), null);
ResourceEvent req11Event = new ResourceRequestEvent(req1,
LocalResourceVisibility.PUBLIC, lc1);
ResourceEvent rel11Event = new ResourceReleaseEvent(req1, cId1);
// Localize R1 for C1
tracker.handle(req11Event);
dispatcher.await();
// Verify refCount for R1 is 1
assertEquals(1, lr1.getRefCount());
dispatcher.await();
verifyTrackedResourceCount(tracker, 1);
// Localize resource1
ResourceLocalizedEvent rle = new ResourceLocalizedEvent(req1, new Path(
"file:///tmp/r1"), 1);
lr1.handle(rle);
assertTrue(lr1.getState().equals(ResourceState.LOCALIZED));
assertTrue(createdummylocalizefile(new Path("file:///tmp/r1")));
LocalizedResource rsrcbefore = tracker.iterator().next();
File resFile = new File(lr1.getLocalPath().toUri().getRawPath()
.toString());
assertTrue(resFile.exists());
assertTrue(resFile.delete());
// Localize R1 for C1
tracker.handle(req11Event);
dispatcher.await();
lr1.handle(rle);
assertTrue(lr1.getState().equals(ResourceState.LOCALIZED));
LocalizedResource rsrcafter = tracker.iterator().next();
if (rsrcbefore == rsrcafter) {
fail("Localized resource should not be equal");
}
// Release resource1
tracker.handle(rel11Event);
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@Timeout(value = 10)
@SuppressWarnings("unchecked")
public void testLocalResourceCache() {
String user = "testuser";
DrainDispatcher dispatcher = null;
try {
Configuration conf = new Configuration();
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<ContainerEvent> containerEventHandler =
mock(EventHandler.class);
// Registering event handlers.
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc =
new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
LocalResourcesTracker tracker =
new LocalResourcesTrackerImpl(user, null, dispatcher, localrsrc,
true, conf, new NMNullStateStoreService(), null);
LocalResourceRequest lr =
createLocalResourceRequest(user, 1, 1, LocalResourceVisibility.PUBLIC);
// Creating 2 containers for same application which will be requesting
// same local resource.
// Container 1 requesting local resource.
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
ResourceEvent reqEvent1 =
new ResourceRequestEvent(lr, LocalResourceVisibility.PRIVATE, lc1);
// No resource request is initially present in local cache
assertEquals(0, localrsrc.size());
// Container-1 requesting local resource.
tracker.handle(reqEvent1);
dispatcher.await();
// New localized Resource should have been added to local resource map
// and the requesting container will be added to its waiting queue.
assertEquals(1, localrsrc.size());
assertTrue(localrsrc.containsKey(lr));
assertEquals(1, localrsrc.get(lr).getRefCount());
assertTrue(localrsrc.get(lr).ref.contains(cId1));
assertEquals(ResourceState.DOWNLOADING, localrsrc.get(lr)
.getState());
// Container 2 requesting the resource
ContainerId cId2 = BuilderUtils.newContainerId(1, 1, 1, 2);
LocalizerContext lc2 = new LocalizerContext(user, cId2, null);
ResourceEvent reqEvent2 =
new ResourceRequestEvent(lr, LocalResourceVisibility.PRIVATE, lc2);
tracker.handle(reqEvent2);
dispatcher.await();
// Container 2 should have been added to the waiting queue of the local
// resource
assertEquals(2, localrsrc.get(lr).getRefCount());
assertTrue(localrsrc.get(lr).ref.contains(cId2));
// Failing resource localization
ResourceEvent resourceFailedEvent = new ResourceFailedLocalizationEvent(
lr,(new Exception("test").getMessage()));
// Backing up the resource to track its state change as it will be
// removed after the failed event.
LocalizedResource localizedResource = localrsrc.get(lr);
tracker.handle(resourceFailedEvent);
dispatcher.await();
// After receiving failed resource event; all waiting containers will be
// notified with Container Resource Failed Event.
assertEquals(0, localrsrc.size());
verify(containerEventHandler, timeout(1000).times(2)).handle(
isA(ContainerResourceFailedEvent.class));
assertEquals(ResourceState.FAILED, localizedResource.getState());
// Container 1 trying to release the resource (This resource is already
// deleted from the cache. This call should return silently without
// exception.
ResourceReleaseEvent relEvent1 = new ResourceReleaseEvent(lr, cId1);
tracker.handle(relEvent1);
dispatcher.await();
// Container-3 now requests for the same resource. This request call
// is coming prior to Container-2's release call.
ContainerId cId3 = BuilderUtils.newContainerId(1, 1, 1, 3);
LocalizerContext lc3 = new LocalizerContext(user, cId3, null);
ResourceEvent reqEvent3 =
new ResourceRequestEvent(lr, LocalResourceVisibility.PRIVATE, lc3);
tracker.handle(reqEvent3);
dispatcher.await();
// Local resource cache now should have the requested resource and the
// number of waiting containers should be 1.
assertEquals(1, localrsrc.size());
assertTrue(localrsrc.containsKey(lr));
assertEquals(1, localrsrc.get(lr).getRefCount());
assertTrue(localrsrc.get(lr).ref.contains(cId3));
// Container-2 Releases the resource
ResourceReleaseEvent relEvent2 = new ResourceReleaseEvent(lr, cId2);
tracker.handle(relEvent2);
dispatcher.await();
// Making sure that there is no change in the cache after the release.
assertEquals(1, localrsrc.size());
assertTrue(localrsrc.containsKey(lr));
assertEquals(1, localrsrc.get(lr).getRefCount());
assertTrue(localrsrc.get(lr).ref.contains(cId3));
// Sending ResourceLocalizedEvent to tracker. In turn resource should
// send Container Resource Localized Event to waiting containers.
Path localizedPath = new Path("/tmp/file1");
ResourceLocalizedEvent localizedEvent =
new ResourceLocalizedEvent(lr, localizedPath, 123L);
tracker.handle(localizedEvent);
dispatcher.await();
// Verifying ContainerResourceLocalizedEvent .
verify(containerEventHandler, timeout(1000).times(1)).handle(
isA(ContainerResourceLocalizedEvent.class));
assertEquals(ResourceState.LOCALIZED, localrsrc.get(lr)
.getState());
assertEquals(1, localrsrc.get(lr).getRefCount());
// Container-3 releasing the resource.
ResourceReleaseEvent relEvent3 = new ResourceReleaseEvent(lr, cId3);
tracker.handle(relEvent3);
dispatcher.await();
assertEquals(0, localrsrc.get(lr).getRefCount());
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@Timeout(value = 10)
@SuppressWarnings("unchecked")
public void testHierarchicalLocalCacheDirectories() {
String user = "testuser";
DrainDispatcher dispatcher = null;
try {
Configuration conf = new Configuration();
// setting per directory file limit to 1.
conf.set(YarnConfiguration.NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY, "37");
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
DeletionService mockDelService = mock(DeletionService.class);
ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc =
new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
null, dispatcher, localrsrc, true, conf,
new NMNullStateStoreService(), null);
// This is a random path. NO File creation will take place at this place.
Path localDir = new Path("/tmp");
// Container 1 needs lr1 resource
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalResourceRequest lr1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.PUBLIC);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
// Container 1 requests lr1 to be localized
ResourceEvent reqEvent1 = new ResourceRequestEvent(lr1,
LocalResourceVisibility.PUBLIC, lc1);
tracker.handle(reqEvent1);
// Simulate the process of localization of lr1
// NOTE: Localization path from tracker has resource ID at end
Path hierarchicalPath1 =
tracker.getPathForLocalization(lr1, localDir, null).getParent();
// Simulate lr1 getting localized
ResourceLocalizedEvent rle1 =
new ResourceLocalizedEvent(lr1,
new Path(hierarchicalPath1.toUri().toString() +
Path.SEPARATOR + "file1"), 120);
tracker.handle(rle1);
// Localization successful.
LocalResourceRequest lr2 = createLocalResourceRequest(user, 3, 3,
LocalResourceVisibility.PUBLIC);
// Container 1 requests lr2 to be localized.
ResourceEvent reqEvent2 =
new ResourceRequestEvent(lr2, LocalResourceVisibility.PUBLIC, lc1);
tracker.handle(reqEvent2);
Path hierarchicalPath2 =
tracker.getPathForLocalization(lr2, localDir, null).getParent();
// localization failed.
ResourceFailedLocalizationEvent rfe2 =
new ResourceFailedLocalizationEvent(
lr2, new Exception("Test").toString());
tracker.handle(rfe2);
/*
* The path returned for two localization should be different because we
* are limiting one file per sub-directory.
*/
assertNotSame(hierarchicalPath1, hierarchicalPath2);
LocalResourceRequest lr3 = createLocalResourceRequest(user, 2, 2,
LocalResourceVisibility.PUBLIC);
ResourceEvent reqEvent3 = new ResourceRequestEvent(lr3,
LocalResourceVisibility.PUBLIC, lc1);
tracker.handle(reqEvent3);
Path hierarchicalPath3 =
tracker.getPathForLocalization(lr3, localDir, null).getParent();
// localization successful
ResourceLocalizedEvent rle3 =
new ResourceLocalizedEvent(lr3, new Path(hierarchicalPath3.toUri()
.toString() + Path.SEPARATOR + "file3"), 120);
tracker.handle(rle3);
// Verifying that path created is inside the subdirectory
assertEquals(hierarchicalPath3.toUri().toString(),
hierarchicalPath1.toUri().toString() + Path.SEPARATOR + "0");
// Container 1 releases resource lr1
ResourceEvent relEvent1 = new ResourceReleaseEvent(lr1, cId1);
tracker.handle(relEvent1);
// Validate the file counts now
int resources = 0;
Iterator<LocalizedResource> iter = tracker.iterator();
while (iter.hasNext()) {
iter.next();
resources++;
}
// There should be only two resources lr1 and lr3 now.
assertEquals(2, resources);
// Now simulate cache cleanup - removes unused resources.
iter = tracker.iterator();
while (iter.hasNext()) {
LocalizedResource rsrc = iter.next();
if (rsrc.getRefCount() == 0) {
assertTrue(tracker.remove(rsrc, mockDelService));
resources--;
}
}
// lr1 is not used by anyone and will be removed, only lr3 will hang
// around
assertEquals(1, resources);
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@SuppressWarnings("unchecked")
public void testStateStoreSuccessfulLocalization() throws Exception {
final String user = "someuser";
final ApplicationId appId = ApplicationId.newInstance(1, 1);
// This is a random path. NO File creation will take place at this place.
final Path localDir = new Path("/tmp");
Configuration conf = new YarnConfiguration();
DrainDispatcher dispatcher = null;
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
DeletionService mockDelService = mock(DeletionService.class);
NMStateStoreService stateStore = mock(NMStateStoreService.class);
try {
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
appId, dispatcher, false, conf, stateStore, null);
// Container 1 needs lr1 resource
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalResourceRequest lr1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.APPLICATION);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
// Container 1 requests lr1 to be localized
ResourceEvent reqEvent1 = new ResourceRequestEvent(lr1,
LocalResourceVisibility.APPLICATION, lc1);
tracker.handle(reqEvent1);
dispatcher.await();
// Simulate the process of localization of lr1
Path hierarchicalPath1 = tracker.getPathForLocalization(lr1, localDir,
null);
ArgumentCaptor<LocalResourceProto> localResourceCaptor =
ArgumentCaptor.forClass(LocalResourceProto.class);
ArgumentCaptor<Path> pathCaptor = ArgumentCaptor.forClass(Path.class);
verify(stateStore).startResourceLocalization(eq(user), eq(appId),
localResourceCaptor.capture(), pathCaptor.capture());
LocalResourceProto lrProto = localResourceCaptor.getValue();
Path localizedPath1 = pathCaptor.getValue();
assertEquals(lr1,
new LocalResourceRequest(new LocalResourcePBImpl(lrProto)));
assertEquals(hierarchicalPath1, localizedPath1.getParent());
// Simulate lr1 getting localized
ResourceLocalizedEvent rle1 =
new ResourceLocalizedEvent(lr1, pathCaptor.getValue(), 120);
tracker.handle(rle1);
dispatcher.await();
ArgumentCaptor<LocalizedResourceProto> localizedProtoCaptor =
ArgumentCaptor.forClass(LocalizedResourceProto.class);
verify(stateStore).finishResourceLocalization(eq(user), eq(appId),
localizedProtoCaptor.capture());
LocalizedResourceProto localizedProto = localizedProtoCaptor.getValue();
assertEquals(lr1, new LocalResourceRequest(
new LocalResourcePBImpl(localizedProto.getResource())));
assertEquals(localizedPath1.toString(),
localizedProto.getLocalPath());
LocalizedResource localizedRsrc1 = tracker.getLocalizedResource(lr1);
assertNotNull(localizedRsrc1);
// simulate release and retention processing
tracker.handle(new ResourceReleaseEvent(lr1, cId1));
dispatcher.await();
boolean removeResult = tracker.remove(localizedRsrc1, mockDelService);
assertTrue(removeResult);
verify(stateStore).removeLocalizedResource(eq(user), eq(appId),
eq(localizedPath1));
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@SuppressWarnings("unchecked")
public void testStateStoreFailedLocalization() throws Exception {
final String user = "someuser";
final ApplicationId appId = ApplicationId.newInstance(1, 1);
// This is a random path. NO File creation will take place at this place.
final Path localDir = new Path("/tmp");
Configuration conf = new YarnConfiguration();
DrainDispatcher dispatcher = null;
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
NMStateStoreService stateStore = mock(NMStateStoreService.class);
try {
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
appId, dispatcher, false, conf, stateStore, null);
// Container 1 needs lr1 resource
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalResourceRequest lr1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.APPLICATION);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
// Container 1 requests lr1 to be localized
ResourceEvent reqEvent1 = new ResourceRequestEvent(lr1,
LocalResourceVisibility.APPLICATION, lc1);
tracker.handle(reqEvent1);
dispatcher.await();
// Simulate the process of localization of lr1
Path hierarchicalPath1 = tracker.getPathForLocalization(lr1, localDir,
null);
ArgumentCaptor<LocalResourceProto> localResourceCaptor =
ArgumentCaptor.forClass(LocalResourceProto.class);
ArgumentCaptor<Path> pathCaptor = ArgumentCaptor.forClass(Path.class);
verify(stateStore).startResourceLocalization(eq(user), eq(appId),
localResourceCaptor.capture(), pathCaptor.capture());
LocalResourceProto lrProto = localResourceCaptor.getValue();
Path localizedPath1 = pathCaptor.getValue();
assertEquals(lr1,
new LocalResourceRequest(new LocalResourcePBImpl(lrProto)));
assertEquals(hierarchicalPath1, localizedPath1.getParent());
ResourceFailedLocalizationEvent rfe1 =
new ResourceFailedLocalizationEvent(
lr1, new Exception("Test").toString());
tracker.handle(rfe1);
dispatcher.await();
verify(stateStore).removeLocalizedResource(eq(user), eq(appId),
eq(localizedPath1));
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@SuppressWarnings("unchecked")
public void testRecoveredResource() throws Exception {
final String user = "someuser";
final ApplicationId appId = ApplicationId.newInstance(1, 1);
// This is a random path. NO File creation will take place at this place.
final Path localDir = new Path("/tmp/localdir");
Configuration conf = new YarnConfiguration();
DrainDispatcher dispatcher = null;
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
NMStateStoreService stateStore = mock(NMStateStoreService.class);
try {
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
appId, dispatcher, false, conf, stateStore, null);
// Container 1 needs lr1 resource
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalResourceRequest lr1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.APPLICATION);
assertNull(tracker.getLocalizedResource(lr1));
final long localizedId1 = 52;
Path hierarchicalPath1 = new Path(localDir,
Long.toString(localizedId1));
Path localizedPath1 = new Path(hierarchicalPath1, "resource.jar");
tracker.handle(new ResourceRecoveredEvent(lr1, localizedPath1, 120));
dispatcher.await();
assertNotNull(tracker.getLocalizedResource(lr1));
// verify new paths reflect recovery of previous resources
LocalResourceRequest lr2 = createLocalResourceRequest(user, 2, 2,
LocalResourceVisibility.APPLICATION);
LocalizerContext lc2 = new LocalizerContext(user, cId1, null);
ResourceEvent reqEvent2 = new ResourceRequestEvent(lr2,
LocalResourceVisibility.APPLICATION, lc2);
tracker.handle(reqEvent2);
dispatcher.await();
Path hierarchicalPath2 = tracker.getPathForLocalization(lr2, localDir,
null);
long localizedId2 = Long.parseLong(hierarchicalPath2.getName());
assertEquals(localizedId1 + 1, localizedId2);
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@SuppressWarnings("unchecked")
public void testRecoveredResourceWithDirCacheMgr() throws Exception {
final String user = "someuser";
final ApplicationId appId = ApplicationId.newInstance(1, 1);
// This is a random path. NO File creation will take place at this place.
final Path localDirRoot = new Path("/tmp/localdir");
Configuration conf = new YarnConfiguration();
DrainDispatcher dispatcher = null;
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
NMStateStoreService stateStore = mock(NMStateStoreService.class);
try {
LocalResourcesTrackerImpl tracker = new LocalResourcesTrackerImpl(user,
appId, dispatcher, true, conf, stateStore, null);
LocalResourceRequest lr1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.PUBLIC);
assertNull(tracker.getLocalizedResource(lr1));
final long localizedId1 = 52;
Path hierarchicalPath1 = new Path(localDirRoot + "/4/2",
Long.toString(localizedId1));
Path localizedPath1 = new Path(hierarchicalPath1, "resource.jar");
tracker.handle(new ResourceRecoveredEvent(lr1, localizedPath1, 120));
dispatcher.await();
assertNotNull(tracker.getLocalizedResource(lr1));
LocalCacheDirectoryManager dirMgrRoot =
tracker.getDirectoryManager(localDirRoot);
assertEquals(0, dirMgrRoot.getDirectory("").getCount());
assertEquals(1, dirMgrRoot.getDirectory("4/2").getCount());
LocalResourceRequest lr2 = createLocalResourceRequest(user, 2, 2,
LocalResourceVisibility.PUBLIC);
assertNull(tracker.getLocalizedResource(lr2));
final long localizedId2 = localizedId1 + 1;
Path hierarchicalPath2 = new Path(localDirRoot + "/4/2",
Long.toString(localizedId2));
Path localizedPath2 = new Path(hierarchicalPath2, "resource.jar");
tracker.handle(new ResourceRecoveredEvent(lr2, localizedPath2, 120));
dispatcher.await();
assertNotNull(tracker.getLocalizedResource(lr2));
assertEquals(0, dirMgrRoot.getDirectory("").getCount());
assertEquals(2, dirMgrRoot.getDirectory("4/2").getCount());
LocalResourceRequest lr3 = createLocalResourceRequest(user, 3, 3,
LocalResourceVisibility.PUBLIC);
assertNull(tracker.getLocalizedResource(lr3));
final long localizedId3 = 128;
Path hierarchicalPath3 = new Path(localDirRoot + "/4/3",
Long.toString(localizedId3));
Path localizedPath3 = new Path(hierarchicalPath3, "resource.jar");
tracker.handle(new ResourceRecoveredEvent(lr3, localizedPath3, 120));
dispatcher.await();
assertNotNull(tracker.getLocalizedResource(lr3));
assertEquals(0, dirMgrRoot.getDirectory("").getCount());
assertEquals(2, dirMgrRoot.getDirectory("4/2").getCount());
assertEquals(1, dirMgrRoot.getDirectory("4/3").getCount());
LocalResourceRequest lr4 = createLocalResourceRequest(user, 4, 4,
LocalResourceVisibility.PUBLIC);
assertNull(tracker.getLocalizedResource(lr4));
final long localizedId4 = 256;
Path hierarchicalPath4 = new Path(localDirRoot + "/4",
Long.toString(localizedId4));
Path localizedPath4 = new Path(hierarchicalPath4, "resource.jar");
tracker.handle(new ResourceRecoveredEvent(lr4, localizedPath4, 120));
dispatcher.await();
assertNotNull(tracker.getLocalizedResource(lr4));
assertEquals(0, dirMgrRoot.getDirectory("").getCount());
assertEquals(1, dirMgrRoot.getDirectory("4").getCount());
assertEquals(2, dirMgrRoot.getDirectory("4/2").getCount());
assertEquals(1, dirMgrRoot.getDirectory("4/3").getCount());
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@SuppressWarnings("unchecked")
public void testGetPathForLocalization() throws Exception {
FileContext lfs = FileContext.getLocalFSFileContext();
Path base_path = new Path("target",
TestLocalResourcesTrackerImpl.class.getSimpleName());
final String user = "someuser";
final ApplicationId appId = ApplicationId.newInstance(1, 1);
Configuration conf = new YarnConfiguration();
DrainDispatcher dispatcher = null;
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
NMStateStoreService stateStore = mock(NMStateStoreService.class);
DeletionService delService = mock(DeletionService.class);
try {
LocalResourceRequest req1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.PUBLIC);
LocalizedResource lr1 = createLocalizedResource(req1, dispatcher);
ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc =
new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
localrsrc.put(req1, lr1);
LocalResourcesTrackerImpl tracker = new LocalResourcesTrackerImpl(user,
appId, dispatcher, localrsrc, true, conf, stateStore, null);
Path conflictPath = new Path(base_path, "10");
Path qualifiedConflictPath = lfs.makeQualified(conflictPath);
lfs.mkdir(qualifiedConflictPath, null, true);
Path rPath = tracker.getPathForLocalization(req1, base_path,
delService);
assertFalse(lfs.util().exists(rPath));
verify(delService, times(1)).delete(argThat(new FileDeletionMatcher(
delService, user, conflictPath, null)));
} finally {
lfs.delete(base_path, true);
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@SuppressWarnings("unchecked")
@Test
public void testResourcePresentInGoodDir() throws IOException {
String user = "testuser";
DrainDispatcher dispatcher = null;
try {
Configuration conf = new Configuration();
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
LocalResourceRequest req1 =
createLocalResourceRequest(user, 1, 1, LocalResourceVisibility.PUBLIC);
LocalResourceRequest req2 =
createLocalResourceRequest(user, 2, 1, LocalResourceVisibility.PUBLIC);
LocalizedResource lr1 = createLocalizedResource(req1, dispatcher);
LocalizedResource lr2 = createLocalizedResource(req2, dispatcher);
ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc =
new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
localrsrc.put(req1, lr1);
localrsrc.put(req2, lr2);
LocalDirsHandlerService dirsHandler = mock(LocalDirsHandlerService.class);
List<String> goodDirs = new ArrayList<String>();
// /tmp/somedir2 is bad
goodDirs.add("/tmp/somedir1/");
goodDirs.add("/tmp/somedir2");
Mockito.when(dirsHandler.getLocalDirs()).thenReturn(goodDirs);
Mockito.when(dirsHandler.getLocalDirsForRead()).thenReturn(goodDirs);
LocalResourcesTrackerImpl tracker =
new LocalResourcesTrackerImpl(user, null, dispatcher, localrsrc,
true , conf, new NMNullStateStoreService(), dirsHandler);
ResourceEvent req11Event =
new ResourceRequestEvent(req1, LocalResourceVisibility.PUBLIC, lc1);
ResourceEvent req21Event =
new ResourceRequestEvent(req2, LocalResourceVisibility.PUBLIC, lc1);
// Localize R1 for C1
tracker.handle(req11Event);
// Localize R2 for C1
tracker.handle(req21Event);
dispatcher.await();
// Localize resource1
Path p1 = tracker.getPathForLocalization(req1,
new Path("/tmp/somedir1"), null);
Path p2 = tracker.getPathForLocalization(req2,
new Path("/tmp/somedir2"), null);
ResourceLocalizedEvent rle1 = new ResourceLocalizedEvent(req1, p1, 1);
tracker.handle(rle1);
ResourceLocalizedEvent rle2 = new ResourceLocalizedEvent(req2, p2, 1);
tracker.handle(rle2);
dispatcher.await();
// Remove somedir2 from gooddirs
assertTrue(tracker.checkLocalResource(lr2));
goodDirs.remove(1);
assertFalse(tracker.checkLocalResource(lr2));
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@SuppressWarnings("unchecked")
public void testReleaseWhileDownloading() throws Exception {
String user = "testuser";
DrainDispatcher dispatcher = null;
try {
Configuration conf = new Configuration();
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
ContainerId cId = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalizerContext lc = new LocalizerContext(user, cId, null);
LocalResourceRequest req =
createLocalResourceRequest(user, 1, 1, LocalResourceVisibility.PUBLIC);
LocalizedResource lr = createLocalizedResource(req, dispatcher);
ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc =
new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
localrsrc.put(req, lr);
LocalResourcesTracker tracker =
new LocalResourcesTrackerImpl(user, null, dispatcher, localrsrc,
false, conf, new NMNullStateStoreService(), null);
// request the resource
ResourceEvent reqEvent =
new ResourceRequestEvent(req, LocalResourceVisibility.PUBLIC, lc);
tracker.handle(reqEvent);
// release the resource
ResourceEvent relEvent = new ResourceReleaseEvent(req, cId);
tracker.handle(relEvent);
// download completing after release
ResourceLocalizedEvent rle =
new ResourceLocalizedEvent(req, new Path("file:///tmp/r1"), 1);
tracker.handle(rle);
dispatcher.await();
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
private boolean createdummylocalizefile(Path path) {
boolean ret = false;
File file = new File(path.toUri().getRawPath().toString());
try {
ret = file.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
return ret;
}
private void verifyTrackedResourceCount(LocalResourcesTracker tracker,
int expected) {
int count = 0;
Iterator<LocalizedResource> iter = tracker.iterator();
while (iter.hasNext()) {
iter.next();
count++;
}
assertEquals(expected,
count, "Tracker resource count does not match");
}
private LocalResourceRequest createLocalResourceRequest(String user, int i,
long ts, LocalResourceVisibility vis) {
final LocalResourceRequest req =
new LocalResourceRequest(new Path("file:///tmp/" + user + "/rsrc" + i),
ts + i * 2000, LocalResourceType.FILE, vis, null);
return req;
}
private LocalizedResource createLocalizedResource(LocalResourceRequest req,
Dispatcher dispatcher) {
LocalizedResource lr = new LocalizedResource(req, dispatcher);
return lr;
}
private DrainDispatcher createDispatcher(Configuration conf) {
DrainDispatcher dispatcher = new DrainDispatcher();
dispatcher.init(conf);
dispatcher.start();
return dispatcher;
}
}
| TestLocalResourcesTrackerImpl |
java | apache__camel | components/camel-file/src/main/java/org/apache/camel/component/file/cluster/FileLockClusterService.java | {
"start": 1144,
"end": 5452
} | class ____ extends AbstractCamelClusterService<FileLockClusterView> {
private String root;
private long acquireLockDelay;
private TimeUnit acquireLockDelayUnit;
private long acquireLockInterval;
private TimeUnit acquireLockIntervalUnit;
private ScheduledExecutorService executor;
private int heartbeatTimeoutMultiplier;
public FileLockClusterService() {
this.acquireLockDelay = 1;
this.acquireLockDelayUnit = TimeUnit.SECONDS;
this.acquireLockInterval = 10;
this.acquireLockIntervalUnit = TimeUnit.SECONDS;
this.heartbeatTimeoutMultiplier = 5;
}
@Override
protected FileLockClusterView createView(String namespace) throws Exception {
return new FileLockClusterView(this, namespace);
}
public String getRoot() {
return root;
}
/**
* Sets the root path.
*/
public void setRoot(String root) {
this.root = root;
}
public long getAcquireLockDelay() {
return acquireLockDelay;
}
/**
* The time to wait before starting to try to acquire lock, default 1.
*/
public void setAcquireLockDelay(long acquireLockDelay) {
this.acquireLockDelay = acquireLockDelay;
}
public void setAcquireLockDelay(long pollDelay, TimeUnit pollDelayUnit) {
setAcquireLockDelay(pollDelay);
setAcquireLockDelayUnit(pollDelayUnit);
}
public TimeUnit getAcquireLockDelayUnit() {
return acquireLockDelayUnit;
}
/**
* The time unit for the acquireLockDelay, default to TimeUnit.SECONDS.
*/
public void setAcquireLockDelayUnit(TimeUnit acquireLockDelayUnit) {
this.acquireLockDelayUnit = acquireLockDelayUnit;
}
public long getAcquireLockInterval() {
return acquireLockInterval;
}
/**
* The time to wait between attempts to try to acquire lock, default 10.
*/
public void setAcquireLockInterval(long acquireLockInterval) {
this.acquireLockInterval = acquireLockInterval;
}
public void setAcquireLockInterval(long pollInterval, TimeUnit pollIntervalUnit) {
setAcquireLockInterval(pollInterval);
setAcquireLockIntervalUnit(pollIntervalUnit);
}
public TimeUnit getAcquireLockIntervalUnit() {
return acquireLockIntervalUnit;
}
/**
* The time unit for the acquireLockInterval, default to TimeUnit.SECONDS.
*/
public void setAcquireLockIntervalUnit(TimeUnit acquireLockIntervalUnit) {
this.acquireLockIntervalUnit = acquireLockIntervalUnit;
}
/**
* Multiplier applied to the cluster leader {@code acquireLockInterval} to determine how long followers should wait
* before considering the leader "stale".
* <p>
* For example, if the leader updates its heartbeat every 2 seconds and the {@code heartbeatTimeoutMultiplier} is 3,
* followers will tolerate up to {@code 2s * 3 = 6s} of silence before declaring the leader unavailable.
* <p>
*/
public void setHeartbeatTimeoutMultiplier(int heartbeatTimeoutMultiplier) {
this.heartbeatTimeoutMultiplier = heartbeatTimeoutMultiplier;
}
public int getHeartbeatTimeoutMultiplier() {
return heartbeatTimeoutMultiplier;
}
@Override
protected void doStop() throws Exception {
super.doStop();
CamelContext context = getCamelContext();
if (executor != null) {
if (context != null) {
context.getExecutorServiceManager().shutdown(executor);
} else {
executor.shutdown();
}
executor = null;
}
}
ScheduledExecutorService getExecutor() {
Lock internalLock = getInternalLock();
internalLock.lock();
try {
if (executor == null) {
// Camel context should be set at this stage.
final CamelContext context = ObjectHelper.notNull(getCamelContext(), "CamelContext");
executor = context.getExecutorServiceManager()
.newSingleThreadScheduledExecutor(this, "FileLockClusterService-" + getId());
}
return executor;
} finally {
internalLock.unlock();
}
}
}
| FileLockClusterService |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/aggregate/MinWithRetractAggFunction.java | {
"start": 1379,
"end": 2978
} | class ____<T extends Comparable<T>>
extends BuiltInAggregateFunction<
T, MinWithRetractAggFunction.MinWithRetractAccumulator<T>> {
private static final long serialVersionUID = 4253774292802374843L;
private final transient DataType valueDataType;
public MinWithRetractAggFunction(LogicalType valueType) {
this.valueDataType = toInternalDataType(valueType);
}
// --------------------------------------------------------------------------------------------
// Planning
// --------------------------------------------------------------------------------------------
@Override
public List<DataType> getArgumentDataTypes() {
return Collections.singletonList(valueDataType);
}
@Override
public DataType getAccumulatorDataType() {
return DataTypes.STRUCTURED(
MinWithRetractAccumulator.class,
DataTypes.FIELD("min", valueDataType.nullable()),
DataTypes.FIELD("mapSize", DataTypes.BIGINT()),
DataTypes.FIELD(
"map",
MapView.newMapViewDataType(valueDataType.notNull(), DataTypes.BIGINT())));
}
@Override
public DataType getOutputDataType() {
return valueDataType;
}
// --------------------------------------------------------------------------------------------
// Runtime
// --------------------------------------------------------------------------------------------
/** Accumulator for MIN with retraction. */
public static | MinWithRetractAggFunction |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/UseNamedDatasourceTest.java | {
"start": 354,
"end": 1033
} | class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClass(MyEntity.class)
.addAsResource("application-named-datasource.properties", "application.properties"));
@Inject
EntityManager entityManager;
@Test
@Transactional
public void testPersistenceAndConfigTest() {
MyEntity entity = new MyEntity("name");
entityManager.persist(entity);
MyEntity savedEntity = entityManager.find(MyEntity.class, entity.getId());
assertEquals(entity.getName(), savedEntity.getName());
}
}
| UseNamedDatasourceTest |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/multipart/OtherMultipartResource.java | {
"start": 303,
"end": 678
} | class ____ {
@Path("simple")
@Produces(MediaType.TEXT_PLAIN)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@POST
@SuppressWarnings({ "removal" })
public String simple(@MultipartForm OtherFormData formData) {
return formData.first + " - " + formData.last + " - " + formData.finalField + " - " + OtherFormData.staticField;
}
}
| OtherMultipartResource |
java | apache__camel | core/camel-xml-jaxp/src/main/java/org/apache/camel/util/xml/StreamSourceCache.java | {
"start": 1605,
"end": 4636
} | class ____ extends StreamSource implements StreamCache {
private final StreamCache streamCache;
private final ReaderCache readCache;
public StreamSourceCache(StreamSource source, Exchange exchange) throws IOException {
if (source.getInputStream() != null) {
// set up CachedOutputStream with the properties
CachedOutputStream cos = new CachedOutputStream(exchange);
IOHelper.copyAndCloseInput(source.getInputStream(), cos);
streamCache = cos.newStreamCache();
readCache = null;
setSystemId(source.getSystemId());
setInputStream((InputStream) streamCache);
} else if (source.getReader() != null) {
String data = exchange.getContext().getTypeConverter().convertTo(String.class, exchange, source.getReader());
readCache = new ReaderCache(data);
streamCache = null;
setReader(readCache);
} else {
streamCache = null;
readCache = null;
}
}
public StreamSourceCache(StreamCache streamCache) {
this.streamCache = streamCache;
if (streamCache instanceof InputStream inputStream) {
setInputStream(inputStream);
this.readCache = null;
} else if (streamCache instanceof ReaderCache readerCache) {
this.readCache = readerCache;
setReader((java.io.Reader) streamCache);
} else {
this.readCache = null;
}
}
@Override
public void reset() {
if (streamCache != null) {
streamCache.reset();
}
if (readCache != null) {
readCache.reset();
}
}
@Override
public void writeTo(OutputStream os) throws IOException {
if (streamCache != null) {
streamCache.writeTo(os);
} else if (readCache != null) {
readCache.writeTo(os);
}
}
@Override
public StreamCache copy(Exchange exchange) throws IOException {
if (streamCache != null) {
return new StreamSourceCache(streamCache.copy(exchange));
}
if (readCache != null) {
return new StreamSourceCache(readCache.copy(exchange));
}
return null;
}
@Override
public boolean inMemory() {
if (streamCache != null) {
return streamCache.inMemory();
} else if (readCache != null) {
return readCache.inMemory();
} else {
// should not happen
return true;
}
}
@Override
public long length() {
if (streamCache != null) {
return streamCache.length();
} else if (readCache != null) {
return readCache.length();
} else {
// should not happen
return 0;
}
}
@Override
public long position() {
return -1;
}
@Override
public boolean isEmpty() {
return length() == 0;
}
}
| StreamSourceCache |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/deser/date/DateParseTest7.java | {
"start": 289,
"end": 1650
} | class ____ extends TestCase {
protected void setUp() throws Exception {
JSON.defaultTimeZone = TimeZone.getTimeZone("Asia/Shanghai");
JSON.defaultLocale = Locale.CHINA;
}
public void test_date() throws Exception {
System.out.println(System.currentTimeMillis());
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", JSON.defaultLocale);
dateFormat.setTimeZone(JSON.defaultTimeZone);
System.out.println(dateFormat.parse("1970-01-01 20:00:01").getTime());
System.out.println(new Date().toString());
//1369273142603
String text = "\"19790714130723\"";
Date date = JSON.parseObject(text, Date.class);
Calendar calendar = Calendar.getInstance(JSON.defaultTimeZone, JSON.defaultLocale);
calendar.setTime(date);
Assert.assertEquals(1979, calendar.get(Calendar.YEAR));
Assert.assertEquals(6, calendar.get(Calendar.MONTH));
Assert.assertEquals(14, calendar.get(Calendar.DAY_OF_MONTH));
Assert.assertEquals(13, calendar.get(Calendar.HOUR_OF_DAY));
Assert.assertEquals(7, calendar.get(Calendar.MINUTE));
Assert.assertEquals(23, calendar.get(Calendar.SECOND));
Assert.assertEquals(0, calendar.get(Calendar.MILLISECOND));
}
}
| DateParseTest7 |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicResponse.java | {
"start": 1357,
"end": 5331
} | enum ____ {
GENERATED_BASIC(true, null, RestStatus.OK),
ALREADY_USING_BASIC(false, "Operation failed: Current license is basic.", RestStatus.FORBIDDEN),
NEED_ACKNOWLEDGEMENT(false, "Operation failed: Needs acknowledgement.", RestStatus.OK);
private final boolean isBasicStarted;
private final String errorMessage;
private final RestStatus restStatus;
Status(boolean isBasicStarted, String errorMessage, RestStatus restStatus) {
this.isBasicStarted = isBasicStarted;
this.errorMessage = errorMessage;
this.restStatus = restStatus;
}
public boolean isBasicStarted() {
return isBasicStarted;
}
public String getErrorMessage() {
return errorMessage;
}
}
private final Status status;
public PostStartBasicResponse(StreamInput in) throws IOException {
super(in);
status = in.readEnum(Status.class);
acknowledgeMessage = in.readOptionalString();
int size = in.readVInt();
Map<String, String[]> acknowledgeMessages = Maps.newMapWithExpectedSize(size);
for (int i = 0; i < size; i++) {
String feature = in.readString();
int nMessages = in.readVInt();
String[] messages = new String[nMessages];
for (int j = 0; j < nMessages; j++) {
messages[j] = in.readString();
}
acknowledgeMessages.put(feature, messages);
}
this.acknowledgeMessages = acknowledgeMessages;
}
PostStartBasicResponse(Status status) {
this(status, Collections.emptyMap(), null);
}
public PostStartBasicResponse(Status status, Map<String, String[]> acknowledgeMessages, String acknowledgeMessage) {
super(status != Status.NEED_ACKNOWLEDGEMENT);
this.status = status;
this.acknowledgeMessages = acknowledgeMessages;
this.acknowledgeMessage = acknowledgeMessage;
}
public Status getStatus() {
return status;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeEnum(status);
out.writeOptionalString(acknowledgeMessage);
out.writeMap(acknowledgeMessages, StreamOutput::writeStringArray);
}
@Override
protected void addCustomFields(XContentBuilder builder, Params params) throws IOException {
if (status.isBasicStarted()) {
builder.field(BASIC_WAS_STARTED_FIELD.getPreferredName(), true);
} else {
builder.field(BASIC_WAS_STARTED_FIELD.getPreferredName(), false);
builder.field(ERROR_MESSAGE_FIELD.getPreferredName(), status.getErrorMessage());
}
if (acknowledgeMessages.isEmpty() == false) {
builder.startObject("acknowledge");
builder.field(MESSAGE_FIELD.getPreferredName(), acknowledgeMessage);
for (Map.Entry<String, String[]> entry : acknowledgeMessages.entrySet()) {
builder.array(entry.getKey(), entry.getValue());
}
builder.endObject();
}
}
public RestStatus status() {
return status.restStatus;
}
public String getAcknowledgeMessage() {
return acknowledgeMessage;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (super.equals(o) == false) return false;
PostStartBasicResponse that = (PostStartBasicResponse) o;
return status == that.status
&& ProtocolUtils.equals(acknowledgeMessages, that.acknowledgeMessages)
&& Objects.equals(acknowledgeMessage, that.acknowledgeMessage);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), status, ProtocolUtils.hashCode(acknowledgeMessages), acknowledgeMessage);
}
}
| Status |
java | elastic__elasticsearch | modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java | {
"start": 1841,
"end": 3395
} | class ____ extends AbstractTokenFilterFactory {
private final CharArraySet keepWords;
private static final String KEEP_WORDS_KEY = "keep_words";
private static final String KEEP_WORDS_PATH_KEY = KEEP_WORDS_KEY + "_path";
@SuppressWarnings("unused")
private static final String KEEP_WORDS_CASE_KEY = KEEP_WORDS_KEY + "_case"; // for javadoc
// unsupported ancient option
private static final String ENABLE_POS_INC_KEY = "enable_position_increments";
KeepWordFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name);
final List<String> arrayKeepWords = settings.getAsList(KEEP_WORDS_KEY, null);
final String keepWordsPath = settings.get(KEEP_WORDS_PATH_KEY, null);
if ((arrayKeepWords == null && keepWordsPath == null) || (arrayKeepWords != null && keepWordsPath != null)) {
// we don't allow both or none
throw new IllegalArgumentException(
"keep requires either `" + KEEP_WORDS_KEY + "` or `" + KEEP_WORDS_PATH_KEY + "` to be configured"
);
}
if (settings.get(ENABLE_POS_INC_KEY) != null) {
throw new IllegalArgumentException(ENABLE_POS_INC_KEY + " is not supported anymore. Please fix your analysis chain");
}
this.keepWords = Analysis.getWordSet(env, settings, KEEP_WORDS_KEY);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new KeepWordFilter(tokenStream, keepWords);
}
}
| KeepWordFilterFactory |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/runners/DefaultInternalRunnerTest.java | {
"start": 3322,
"end": 3486
} | class ____ {
@Test
public void this_test_is_NOT_supposed_to_fail() {
assertTrue(true);
}
}
public static final | SuccessTest |
java | quarkusio__quarkus | extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcTenantConfigBuilder.java | {
"start": 29002,
"end": 32922
} | class ____ {
private record ResourceMetadataImpl(boolean enabled, Optional<String> resource, Optional<Set<String>> scopes,
Optional<String> authorizationServer, boolean forceHttpsScheme) implements ResourceMetadata {
}
private final OidcTenantConfigBuilder builder;
private boolean enabled;
private Optional<String> resource;
private Optional<Set<String>> scopes;
private Optional<String> authorizationServer;
private boolean forceHttpsScheme;
public ResourceMetadataBuilder() {
this(new OidcTenantConfigBuilder());
}
public ResourceMetadataBuilder(OidcTenantConfigBuilder builder) {
this.builder = Objects.requireNonNull(builder);
this.enabled = builder.resourceMetadata.enabled();
this.resource = builder.resourceMetadata.resource();
this.scopes = builder.resourceMetadata.scopes().isEmpty() ? Optional.empty()
: Optional.of(Set.copyOf(builder.resourceMetadata.scopes().get()));
this.authorizationServer = builder.resourceMetadata.authorizationServer();
this.forceHttpsScheme = builder.resourceMetadata.forceHttpsScheme();
}
/**
* {@link ResourceMetadata#enabled()}
*
* @return this builder
*/
public ResourceMetadataBuilder enabled() {
return enabled(true);
}
/**
* @param enabled {@link ResourceMetadata#enabled()}
* @return this builder
*/
public ResourceMetadataBuilder enabled(boolean enabled) {
this.enabled = enabled;
return this;
}
/**
* @param resource {@link ResourceMetadata#resource()}
* @return this builder
*/
public ResourceMetadataBuilder resource(String resource) {
this.resource = Optional.ofNullable(resource);
return this;
}
/**
* @param scopes {@link ResourceMetadata#scopes()}
* @return this builder
*/
public ResourceMetadataBuilder scopes(String scope) {
return this.scopes(Set.of(scope));
}
/**
* @param scopes {@link ResourceMetadata#scopes()}
* @return this builder
*/
public ResourceMetadataBuilder scopes(Set<String> scopes) {
this.scopes = Optional.ofNullable(scopes);
return this;
}
/**
* @param resource {@link ResourceMetadata#authorizationServer()}
* @return this builder
*/
public ResourceMetadataBuilder authorizationServer(String authorizationServer) {
this.authorizationServer = Optional.ofNullable(authorizationServer);
return this;
}
/**
* forceHttpsScheme {@link ResourceMetadata#forceHttpsScheme()}
*
* @return this builder
*/
public ResourceMetadataBuilder forceHttpsScheme() {
return forceHttpsScheme(true);
}
/**
* @param forceHttpsScheme {@link ResourceMetadata#forceHttpsScheme()}
* @return this builder
*/
public ResourceMetadataBuilder forceHttpsScheme(boolean forceHttpsScheme) {
this.forceHttpsScheme = forceHttpsScheme;
return this;
}
/**
* @return OidcTenantConfigBuilder builder
*/
public OidcTenantConfigBuilder end() {
return builder.resourceMetadata(build());
}
/**
* @return built ResourceMetadata
*/
public ResourceMetadata build() {
return new ResourceMetadataImpl(enabled, resource, scopes, authorizationServer, forceHttpsScheme);
}
}
/**
* Builder for the {@link Roles}.
*/
public static final | ResourceMetadataBuilder |
java | google__guava | android/guava-tests/test/com/google/common/collect/ForwardingListIteratorTest.java | {
"start": 953,
"end": 1630
} | class ____ extends TestCase {
@SuppressWarnings("rawtypes")
public void testForwarding() {
new ForwardingWrapperTester()
.testForwarding(
ListIterator.class,
new Function<ListIterator, ListIterator<?>>() {
@Override
public ListIterator<?> apply(ListIterator delegate) {
return wrap((ListIterator<?>) delegate);
}
});
}
private static <T> ListIterator<T> wrap(ListIterator<T> delegate) {
return new ForwardingListIterator<T>() {
@Override
protected ListIterator<T> delegate() {
return delegate;
}
};
}
}
| ForwardingListIteratorTest |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/util/pool/FactoryPools.java | {
"start": 4852,
"end": 5052
} | interface ____<T> {
void reset(@NonNull T object);
}
/**
* Allows additional verification to catch errors caused by using objects while they are in an
* object pool.
*/
public | Resetter |
java | quarkusio__quarkus | extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/ChangeContainerNameDecorator.java | {
"start": 1392,
"end": 2527
} | class ____ extends ApplicationContainerDecorator<ContainerFluent<?>> {
private final String name;
public ChangeContainerNameDecorator(String name) {
this.name = name;
}
@Override
public void andThenVisit(ContainerFluent<?> containerFluent) {
containerFluent.withName(name);
}
@Override
public Class<? extends Decorator>[] after() {
return new Class[] {
ApplyRequestsMemoryDecorator.class, AddEnvVarDecorator.class, AddMountDecorator.class,
AddPortDecorator.class, ApplyArgsDecorator.class, ApplyCommandDecorator.class,
ApplyImagePullPolicyDecorator.class,
ApplyLimitsCpuDecorator.class, ApplyLimitsMemoryDecorator.class, ApplyPortNameDecorator.class,
ApplyRequestsCpuDecorator.class,
ApplyWorkingDirDecorator.class, ResourceProvidingDecorator.class, AddSidecarDecorator.class,
AddInitContainerDecorator.class,
AddLivenessProbeDecorator.class, AddReadinessProbeDecorator.class, ApplyImageDecorator.class
};
}
}
| ChangeContainerNameDecorator |
java | elastic__elasticsearch | libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentGenerator.java | {
"start": 1866,
"end": 20879
} | class ____ implements XContentGenerator {
/** Generator used to write content **/
protected final JsonGenerator generator;
/**
* Reference to base generator because
* writing raw values needs a specific method call.
*/
private final GeneratorBase base;
/**
* Reference to filtering generator because
* writing an empty object '{}' when everything is filtered
* out needs a specific treatment
*/
private final FilteringGeneratorDelegate filter;
private final OutputStream os;
private boolean writeLineFeedAtEnd;
private static final SerializedString LF = new SerializedString("\n");
private static final DefaultPrettyPrinter.Indenter INDENTER = new DefaultIndenter(" ", LF.getValue());
private boolean prettyPrint = false;
public JsonXContentGenerator(JsonGenerator baseJsonGenerator, OutputStream os, Set<String> includes, Set<String> excludes) {
Objects.requireNonNull(includes, "Including filters must not be null");
Objects.requireNonNull(excludes, "Excluding filters must not be null");
this.os = os;
if (baseJsonGenerator instanceof GeneratorBase) {
this.base = (GeneratorBase) baseJsonGenerator;
} else {
this.base = null;
}
JsonGenerator jsonGenerator = baseJsonGenerator;
boolean hasExcludes = excludes.isEmpty() == false;
if (hasExcludes) {
jsonGenerator = new FilteringGeneratorDelegate(jsonGenerator, new FilterPathBasedFilter(excludes, false), true, true);
}
boolean hasIncludes = includes.isEmpty() == false;
if (hasIncludes) {
jsonGenerator = new FilteringGeneratorDelegate(jsonGenerator, new FilterPathBasedFilter(includes, true), true, true);
}
if (hasExcludes || hasIncludes) {
this.filter = (FilteringGeneratorDelegate) jsonGenerator;
} else {
this.filter = null;
}
this.generator = jsonGenerator;
}
@Override
public XContentType contentType() {
return XContentType.JSON;
}
@Override
public final void usePrettyPrint() {
generator.setPrettyPrinter(new DefaultPrettyPrinter().withObjectIndenter(INDENTER).withArrayIndenter(INDENTER));
prettyPrint = true;
}
@Override
public boolean isPrettyPrint() {
return this.prettyPrint;
}
@Override
public void usePrintLineFeedAtEnd() {
writeLineFeedAtEnd = true;
}
private boolean isFiltered() {
return filter != null;
}
private JsonGenerator getLowLevelGenerator() {
if (isFiltered()) {
JsonGenerator delegate = filter.getDelegate();
if (delegate instanceof JsonGeneratorDelegate) {
// In case of combined inclusion and exclusion filters, we have one and only one another delegating level
delegate = ((JsonGeneratorDelegate) delegate).getDelegate();
assert delegate instanceof JsonGeneratorDelegate == false;
}
return delegate;
}
return generator;
}
private boolean inRoot() {
JsonStreamContext context = generator.getOutputContext();
return ((context != null) && (context.inRoot() && context.getCurrentName() == null));
}
@Override
public void writeStartObject() throws IOException {
if (inRoot()) {
// Use the low level generator to write the startObject so that the root
// start object is always written even if a filtered generator is used
getLowLevelGenerator().writeStartObject();
return;
}
generator.writeStartObject();
}
@Override
public void writeEndObject() throws IOException {
try {
if (inRoot()) {
// Use the low level generator to write the startObject so that the root
// start object is always written even if a filtered generator is used
getLowLevelGenerator().writeEndObject();
return;
}
generator.writeEndObject();
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeStartArray() throws IOException {
try {
generator.writeStartArray();
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeEndArray() throws IOException {
try {
generator.writeEndArray();
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeFieldName(String name) throws IOException {
try {
generator.writeFieldName(name);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNull() throws IOException {
try {
generator.writeNull();
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNullField(String name) throws IOException {
try {
generator.writeNullField(name);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeBooleanField(String name, boolean value) throws IOException {
try {
generator.writeBooleanField(name, value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeBoolean(boolean value) throws IOException {
try {
generator.writeBoolean(value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumberField(String name, double value) throws IOException {
try {
generator.writeNumberField(name, value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumber(double value) throws IOException {
try {
generator.writeNumber(value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumberField(String name, float value) throws IOException {
try {
generator.writeNumberField(name, value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumber(float value) throws IOException {
try {
generator.writeNumber(value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumberField(String name, int value) throws IOException {
try {
generator.writeNumberField(name, value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumberField(String name, BigInteger value) throws IOException {
// as jackson's JsonGenerator doesn't have this method for BigInteger
// we have to implement it ourselves
try {
generator.writeFieldName(name);
generator.writeNumber(value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumberField(String name, BigDecimal value) throws IOException {
try {
generator.writeNumberField(name, value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumber(int value) throws IOException {
try {
generator.writeNumber(value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumberField(String name, long value) throws IOException {
try {
generator.writeNumberField(name, value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumber(long value) throws IOException {
try {
generator.writeNumber(value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumber(short value) throws IOException {
try {
generator.writeNumber(value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumber(BigInteger value) throws IOException {
try {
generator.writeNumber(value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeNumber(BigDecimal value) throws IOException {
try {
generator.writeNumber(value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeStringField(String name, String value) throws IOException {
try {
generator.writeStringField(name, value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeString(String value) throws IOException {
try {
generator.writeString(value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeStringArray(String[] array) throws IOException {
try {
if (isFiltered()) {
// filtered serialization does not work correctly with the bulk array serializer, so we need to fall back to serializing
// the array one-by-one
// TODO: this can probably be removed after upgrading Jackson to 2.15.1 or later, see
// https://github.com/FasterXML/jackson-core/issues/1023
writeStringArrayFiltered(array);
} else {
generator.writeArray(array, 0, array.length);
}
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
private void writeStringArrayFiltered(String[] array) throws IOException {
writeStartArray();
for (String s : array) {
writeString(s);
}
writeEndArray();
}
@Override
public void writeString(char[] value, int offset, int len) throws IOException {
try {
generator.writeString(value, offset, len);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeUTF8String(byte[] value, int offset, int length) throws IOException {
try {
generator.writeUTF8String(value, offset, length);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeBinaryField(String name, byte[] value) throws IOException {
try {
generator.writeBinaryField(name, value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeBinary(byte[] value) throws IOException {
try {
generator.writeBinary(value);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
@Override
public void writeBinary(byte[] value, int offset, int len) throws IOException {
try {
generator.writeBinary(value, offset, len);
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
private void writeStartRaw(String name) throws IOException {
try {
writeFieldName(name);
generator.writeRaw(':');
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
public void writeEndRaw() {
assert base != null : "JsonGenerator should be of instance GeneratorBase but was: " + generator.getClass();
if (base != null) {
JsonStreamContext context = base.getOutputContext();
assert (context instanceof JsonWriteContext) : "Expected an instance of JsonWriteContext but was: " + context.getClass();
((JsonWriteContext) context).writeValue();
}
}
@Override
public void writeRawField(String name, InputStream content) throws IOException {
if (content.markSupported() == false) {
// needed for the XContentFactory.xContentType call
content = new BufferedInputStream(content);
}
XContentType contentType = XContentFactory.xContentType(content);
if (contentType == null) {
throw new IllegalArgumentException("Can't write raw bytes whose xcontent-type can't be guessed");
}
writeRawField(name, content, contentType);
}
@Override
public void writeRawField(String name, InputStream content, XContentType contentType) throws IOException {
if (mayWriteRawData(contentType) == false) {
try (XContentParser parser = XContentFactory.xContent(contentType).createParser(XContentParserConfiguration.EMPTY, content)) {
parser.nextToken();
writeFieldName(name);
copyCurrentStructure(parser);
}
} else {
writeStartRaw(name);
flush();
Streams.copy(content, os);
writeEndRaw();
}
}
@Override
public void writeRawValue(InputStream stream, XContentType xContentType) throws IOException {
if (mayWriteRawData(xContentType) == false) {
copyRawValue(stream, xContentType.xContent());
} else {
if (generator.getOutputContext().getCurrentName() != null) {
// If we've just started a field we'll need to add the separator
generator.writeRaw(':');
}
flush();
Streams.copy(stream, os, false);
writeEndRaw();
}
}
@Override
public void writeRawValue(String value) throws IOException {
try {
if (supportsRawWrites()) {
generator.writeRawValue(value);
} else {
// fallback to a regular string for formats that don't allow writing the value as is
generator.writeString(value);
}
} catch (JsonGenerationException e) {
throw new XContentGenerationException(e);
}
}
private boolean mayWriteRawData(XContentType contentType) {
// When the current generator is filtered (ie filter != null)
// or the content is in a different format than the current generator,
// we need to copy the whole structure so that it will be correctly
// filtered or converted
return supportsRawWrites() && isFiltered() == false && contentType == contentType() && prettyPrint == false;
}
/** Whether this generator supports writing raw data directly */
protected boolean supportsRawWrites() {
return true;
}
protected void copyRawValue(InputStream stream, XContent xContent) throws IOException {
try (XContentParser parser = xContent.createParser(XContentParserConfiguration.EMPTY, stream)) {
copyCurrentStructure(parser);
}
}
@Override
public void copyCurrentStructure(XContentParser parser) throws IOException {
// the start of the parser
if (parser.currentToken() == null) {
parser.nextToken();
}
if (parser instanceof JsonXContentParser) {
generator.copyCurrentStructure(((JsonXContentParser) parser).parser);
} else {
copyCurrentStructure(this, parser);
}
}
/**
* Low level implementation detail of {@link XContentGenerator#copyCurrentStructure(XContentParser)}.
*/
private static void copyCurrentStructure(XContentGenerator destination, XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
// Let's handle field-name separately first
if (token == XContentParser.Token.FIELD_NAME) {
destination.writeFieldName(parser.currentName());
token = parser.nextToken();
// fall-through to copy the associated value
}
switch (token) {
case START_ARRAY -> {
destination.writeStartArray();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
copyCurrentStructure(destination, parser);
}
destination.writeEndArray();
}
case START_OBJECT -> {
destination.writeStartObject();
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
copyCurrentStructure(destination, parser);
}
destination.writeEndObject();
}
default -> // others are simple:
destination.copyCurrentEvent(parser);
}
}
@Override
public void writeDirectField(String name, CheckedConsumer<OutputStream, IOException> writer) throws IOException {
writeStartRaw(name);
flush();
writer.accept(os);
flush();
writeEndRaw();
}
@Override
public void flush() throws IOException {
generator.flush();
}
@Override
public void close() throws IOException {
if (generator.isClosed()) {
return;
}
JsonStreamContext context = generator.getOutputContext();
if ((context != null) && (context.inRoot() == false)) {
throw new IOException("Unclosed object or array found");
}
if (writeLineFeedAtEnd) {
flush();
// Bypass generator to always write the line feed
getLowLevelGenerator().writeRaw(LF);
}
generator.close();
}
@Override
public boolean isClosed() {
return generator.isClosed();
}
}
| JsonXContentGenerator |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/DefaultHttp2Connection.java | {
"start": 22946,
"end": 23015
} | class ____ the connection, itself.
*/
private final | representing |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java | {
"start": 145169,
"end": 148178
} | class ____ extends MetadataFieldMapper {
private static final String CONTENT_TYPE = "_mock_metadata";
private static final String FIELD_NAME = "_mock_metadata";
protected MockMetadataMapper() {
super(new KeywordFieldMapper.KeywordFieldType(FIELD_NAME));
}
@Override
protected void parseCreateField(DocumentParserContext context) throws IOException {
if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) {
context.doc().add(new StringField(FIELD_NAME, context.parser().text(), Field.Store.YES));
} else {
throw new IllegalArgumentException("Field [" + fieldType().name() + "] must be a string.");
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
protected SyntheticSourceSupport syntheticSourceSupport() {
return new SyntheticSourceSupport.Native(() -> new StringStoredFieldFieldLoader(fullPath(), leafName()) {
@Override
protected void write(XContentBuilder b, Object value) throws IOException {
BytesRef ref = (BytesRef) value;
b.utf8Value(ref.bytes, ref.offset, ref.length);
}
});
}
private static final TypeParser PARSER = new FixedTypeParser(c -> new MockMetadataMapper());
}
@Override
public Map<String, MetadataFieldMapper.TypeParser> getMetadataMappers() {
return Collections.singletonMap(MockMetadataMapper.CONTENT_TYPE, MockMetadataMapper.PARSER);
}
@Override
public Map<String, RuntimeField.Parser> getRuntimeFields() {
return Collections.singletonMap("test-composite", new RuntimeField.Parser(n -> new RuntimeField.Builder(n) {
@Override
protected RuntimeField createRuntimeField(MappingParserContext parserContext) {
return new TestRuntimeField(
n,
List.of(
new TestRuntimeField.TestRuntimeFieldType(n + ".foo", KeywordFieldMapper.CONTENT_TYPE),
new TestRuntimeField.TestRuntimeFieldType(n + ".bar", KeywordFieldMapper.CONTENT_TYPE)
)
);
}
@Override
protected RuntimeField createChildRuntimeField(
MappingParserContext parserContext,
String parentName,
Function<SearchLookup, CompositeFieldScript.LeafFactory> parentScriptFactory,
OnScriptError onScriptError
) {
throw new UnsupportedOperationException();
}
}));
}
}
}
| MockMetadataMapper |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/transaction/TransactionalTestExecutionListener.java | {
"start": 4691,
"end": 5610
} | class ____ and at the
* method level.
*
* <h3>Programmatic Transaction Management</h3>
* <p>As of Spring Framework 4.1, it is possible to interact with test-managed
* transactions programmatically via the static methods in {@link TestTransaction}.
* {@code TestTransaction} may be used within <em>test</em> methods,
* <em>before</em> methods, and <em>after</em> methods.
*
* <h3>Executing Code outside of a Transaction</h3>
* <p>When executing transactional tests, it is sometimes useful to be able to
* execute certain <em>set up</em> or <em>tear down</em> code outside a
* transaction. {@code TransactionalTestExecutionListener} provides such
* support for methods annotated with {@link BeforeTransaction @BeforeTransaction}
* or {@link AfterTransaction @AfterTransaction}. As of Spring Framework 4.3,
* {@code @BeforeTransaction} and {@code @AfterTransaction} may also be declared
* on Java 8 based | level |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/codec/xml/XmlEventDecoderTests.java | {
"start": 1224,
"end": 5691
} | class ____ extends AbstractLeakCheckingTests {
private static final String XML = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
"<pojo>" +
"<foo>foofoo</foo>" +
"<bar>barbar</bar>" +
"</pojo>";
private final XmlEventDecoder decoder = new XmlEventDecoder();
@Test
void toXMLEventsAalto() {
Flux<XMLEvent> events =
this.decoder.decode(stringBufferMono(XML), null, null, Collections.emptyMap());
StepVerifier.create(events)
.consumeNextWith(e -> assertThat(e.isStartDocument()).isTrue())
.consumeNextWith(e -> assertStartElement(e, "pojo"))
.consumeNextWith(e -> assertStartElement(e, "foo"))
.consumeNextWith(e -> assertCharacters(e, "foofoo"))
.consumeNextWith(e -> assertEndElement(e, "foo"))
.consumeNextWith(e -> assertStartElement(e, "bar"))
.consumeNextWith(e -> assertCharacters(e, "barbar"))
.consumeNextWith(e -> assertEndElement(e, "bar"))
.consumeNextWith(e -> assertEndElement(e, "pojo"))
.expectComplete()
.verify();
}
@Test
void toXMLEventsNonAalto() {
decoder.useAalto = false;
Flux<XMLEvent> events =
this.decoder.decode(stringBufferMono(XML), null, null, Collections.emptyMap());
StepVerifier.create(events)
.consumeNextWith(e -> assertThat(e.isStartDocument()).isTrue())
.consumeNextWith(e -> assertStartElement(e, "pojo"))
.consumeNextWith(e -> assertStartElement(e, "foo"))
.consumeNextWith(e -> assertCharacters(e, "foofoo"))
.consumeNextWith(e -> assertEndElement(e, "foo"))
.consumeNextWith(e -> assertStartElement(e, "bar"))
.consumeNextWith(e -> assertCharacters(e, "barbar"))
.consumeNextWith(e -> assertEndElement(e, "bar"))
.consumeNextWith(e -> assertEndElement(e, "pojo"))
.consumeNextWith(e -> assertThat(e.isEndDocument()).isTrue())
.expectComplete()
.verify();
}
@Test
void toXMLEventsWithLimit() {
this.decoder.setMaxInMemorySize(6);
Flux<String> source = Flux.just(
"<pojo>", "<foo>", "foofoo", "</foo>", "<bar>", "barbarbar", "</bar>", "</pojo>");
Flux<XMLEvent> events = this.decoder.decode(
source.map(this::stringBuffer), null, null, Collections.emptyMap());
StepVerifier.create(events)
.consumeNextWith(e -> assertThat(e.isStartDocument()).isTrue())
.consumeNextWith(e -> assertStartElement(e, "pojo"))
.consumeNextWith(e -> assertStartElement(e, "foo"))
.consumeNextWith(e -> assertCharacters(e, "foofoo"))
.consumeNextWith(e -> assertEndElement(e, "foo"))
.consumeNextWith(e -> assertStartElement(e, "bar"))
.expectError(DataBufferLimitException.class)
.verify();
}
@Test
void decodeErrorAalto() {
Flux<DataBuffer> source = Flux.concat(
stringBufferMono("<pojo>"),
Flux.error(new RuntimeException()));
Flux<XMLEvent> events =
this.decoder.decode(source, null, null, Collections.emptyMap());
StepVerifier.create(events)
.consumeNextWith(e -> assertThat(e.isStartDocument()).isTrue())
.consumeNextWith(e -> assertStartElement(e, "pojo"))
.expectError(RuntimeException.class)
.verify();
}
@Test
void decodeErrorNonAalto() {
decoder.useAalto = false;
Flux<DataBuffer> source = Flux.concat(
stringBufferMono("<pojo>"),
Flux.error(new RuntimeException()));
Flux<XMLEvent> events =
this.decoder.decode(source, null, null, Collections.emptyMap());
StepVerifier.create(events)
.expectError(RuntimeException.class)
.verify();
}
private static void assertStartElement(XMLEvent event, String expectedLocalName) {
assertThat(event.isStartElement()).isTrue();
assertThat(event.asStartElement().getName().getLocalPart()).isEqualTo(expectedLocalName);
}
private static void assertEndElement(XMLEvent event, String expectedLocalName) {
assertThat(event.isEndElement()).as(event + " is no end element").isTrue();
assertThat(event.asEndElement().getName().getLocalPart()).isEqualTo(expectedLocalName);
}
private static void assertCharacters(XMLEvent event, String expectedData) {
assertThat(event.isCharacters()).isTrue();
assertThat(event.asCharacters().getData()).isEqualTo(expectedData);
}
private DataBuffer stringBuffer(String value) {
byte[] bytes = value.getBytes(StandardCharsets.UTF_8);
DataBuffer buffer = this.bufferFactory.allocateBuffer(bytes.length);
buffer.write(bytes);
return buffer;
}
private Mono<DataBuffer> stringBufferMono(String value) {
return Mono.defer(() -> Mono.just(stringBuffer(value)));
}
}
| XmlEventDecoderTests |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertTokenizationResultTests.java | {
"start": 1099,
"end": 6349
} | class ____ extends ESTestCase {
private BertTokenizer tokenizer;
@After
public void closeIt() {
if (tokenizer != null) {
tokenizer.close();
}
}
public void testBuildRequest() throws IOException {
tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null, null)).build();
var requestBuilder = tokenizer.requestBuilder();
NlpTask.Request request = requestBuilder.buildRequest(
List.of("Elasticsearch fun"),
"request1",
Tokenization.Truncate.NONE,
-1,
null
);
Map<String, Object> jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2();
assertThat(jsonDocAsMap.keySet(), hasSize(5));
assertEquals("request1", jsonDocAsMap.get("request_id"));
assertEquals(Arrays.asList(12, 0, 1, 3, 13), firstListItemFromMap("tokens", jsonDocAsMap));
assertEquals(Arrays.asList(1, 1, 1, 1, 1), firstListItemFromMap("arg_1", jsonDocAsMap));
assertEquals(Arrays.asList(0, 0, 0, 0, 0), firstListItemFromMap("arg_2", jsonDocAsMap));
assertEquals(Arrays.asList(0, 1, 2, 3, 4), firstListItemFromMap("arg_3", jsonDocAsMap));
}
private List<Integer> firstListItemFromMap(String name, Map<String, Object> jsonDocAsMap) {
return nthListItemFromMap(name, 0, jsonDocAsMap);
}
@SuppressWarnings("unchecked")
public static List<Integer> nthListItemFromMap(String name, int n, Map<String, Object> jsonDocAsMap) {
return ((List<List<Integer>>) jsonDocAsMap.get(name)).get(n);
}
public void testInputTooLarge() throws IOException {
tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 5, null, null)).build();
{
var requestBuilder = tokenizer.requestBuilder();
ElasticsearchStatusException e = expectThrows(
ElasticsearchStatusException.class,
() -> requestBuilder.buildRequest(
Collections.singletonList("Elasticsearch fun Elasticsearch fun Elasticsearch fun"),
"request1",
Tokenization.Truncate.NONE,
-1,
null
)
);
assertThat(
e.getMessage(),
containsString("Input too large. The tokenized input length [11] exceeds the maximum sequence length [5]")
);
}
{
var requestBuilder = tokenizer.requestBuilder();
// input will become 3 tokens + the Class and Separator token = 5 which is
// our max sequence length
requestBuilder.buildRequest(Collections.singletonList("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE, -1, null);
}
}
@SuppressWarnings("unchecked")
public void testBatchWithPadding() throws IOException {
tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null, null)).build();
var requestBuilder = tokenizer.requestBuilder();
NlpTask.Request request = requestBuilder.buildRequest(
List.of("Elasticsearch", "my little red car", "Godzilla day"),
"request1",
Tokenization.Truncate.NONE,
-1,
null
);
Map<String, Object> jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2();
assertThat(jsonDocAsMap.keySet(), hasSize(5));
assertThat((List<List<Integer>>) jsonDocAsMap.get("tokens"), hasSize(3));
assertThat((List<List<Integer>>) jsonDocAsMap.get("arg_1"), hasSize(3));
assertThat((List<List<Integer>>) jsonDocAsMap.get("arg_2"), hasSize(3));
assertThat((List<List<Integer>>) jsonDocAsMap.get("arg_3"), hasSize(3));
assertEquals("request1", jsonDocAsMap.get("request_id"));
assertEquals(Arrays.asList(12, 0, 1, 13, 19, 19), nthListItemFromMap("tokens", 0, jsonDocAsMap));
assertEquals(Arrays.asList(1, 1, 1, 1, 19, 19), nthListItemFromMap("arg_1", 0, jsonDocAsMap));
assertEquals(Arrays.asList(0, 0, 0, 0, 0, 0), nthListItemFromMap("arg_2", 0, jsonDocAsMap));
assertEquals(Arrays.asList(0, 1, 2, 3, 4, 5), nthListItemFromMap("arg_3", 0, jsonDocAsMap));
assertEquals(Arrays.asList(12, 4, 5, 6, 7, 13), nthListItemFromMap("tokens", 1, jsonDocAsMap));
assertEquals(Arrays.asList(1, 1, 1, 1, 1, 1), nthListItemFromMap("arg_1", 1, jsonDocAsMap));
assertEquals(Arrays.asList(0, 0, 0, 0, 0, 0), nthListItemFromMap("arg_2", 1, jsonDocAsMap));
assertEquals(Arrays.asList(0, 1, 2, 3, 4, 5), nthListItemFromMap("arg_3", 1, jsonDocAsMap));
assertEquals(Arrays.asList(12, 8, 9, 16, 13, 19), nthListItemFromMap("tokens", 2, jsonDocAsMap));
assertEquals(Arrays.asList(1, 1, 1, 1, 1, 19), nthListItemFromMap("arg_1", 2, jsonDocAsMap));
assertEquals(Arrays.asList(0, 0, 0, 0, 0, 0), nthListItemFromMap("arg_2", 2, jsonDocAsMap));
assertEquals(Arrays.asList(0, 1, 2, 3, 4, 5), nthListItemFromMap("arg_3", 2, jsonDocAsMap));
}
}
| BertTokenizationResultTests |
java | greenrobot__greendao | tests/DaoTestPerformance/src/androidTest/java/org/greenrobot/greendao/performance/target/LongSparseArray.java | {
"start": 1101,
"end": 9859
} | class ____<E> {
private static final Object DELETED = new Object();
private boolean mGarbage = false;
/**
* Creates a new SparseArray containing no mappings.
*/
public LongSparseArray() {
this(10);
}
/**
* Creates a new SparseArray containing no mappings that will not
* require any additional memory allocation to store the specified
* number of mappings.
*/
public LongSparseArray(int initialCapacity) {
initialCapacity = ArrayUtils.idealIntArraySize(initialCapacity);
mKeys = new long[initialCapacity];
mValues = new Object[initialCapacity];
mSize = 0;
}
/**
* @return A copy of all keys contained in the sparse array.
*/
public long[] getKeys() {
int length = mKeys.length;
long[] result = new long[length];
System.arraycopy(mKeys, 0, result, 0, length);
return result;
}
/**
* Sets all supplied keys to the given unique value.
* @param keys Keys to set
* @param uniqueValue Value to set all supplied keys to
*/
public void setValues(long[] keys, E uniqueValue) {
int length = keys.length;
for (int i = 0; i < length; i++) {
put(keys[i], uniqueValue);
}
}
/**
* Gets the Object mapped from the specified key, or <code>null</code>
* if no such mapping has been made.
*/
public E get(long key) {
return get(key, null);
}
/**
* Gets the Object mapped from the specified key, or the specified Object
* if no such mapping has been made.
*/
public E get(long key, E valueIfKeyNotFound) {
int i = binarySearch(mKeys, 0, mSize, key);
if (i < 0 || mValues[i] == DELETED) {
return valueIfKeyNotFound;
} else {
return (E) mValues[i];
}
}
/**
* Removes the mapping from the specified key, if there was any.
*/
public void delete(long key) {
int i = binarySearch(mKeys, 0, mSize, key);
if (i >= 0) {
if (mValues[i] != DELETED) {
mValues[i] = DELETED;
mGarbage = true;
}
}
}
/**
* Alias for {@link #delete(long)}.
*/
public void remove(long key) {
delete(key);
}
private void gc() {
// Log.e("SparseArray", "gc start with " + mSize);
int n = mSize;
int o = 0;
long[] keys = mKeys;
Object[] values = mValues;
for (int i = 0; i < n; i++) {
Object val = values[i];
if (val != DELETED) {
if (i != o) {
keys[o] = keys[i];
values[o] = val;
}
o++;
}
}
mGarbage = false;
mSize = o;
// Log.e("SparseArray", "gc end with " + mSize);
}
/**
* Adds a mapping from the specified key to the specified value,
* replacing the previous mapping from the specified key if there
* was one.
*/
public void put(long key, E value) {
int i = binarySearch(mKeys, 0, mSize, key);
if (i >= 0) {
mValues[i] = value;
} else {
i = ~i;
if (i < mSize && mValues[i] == DELETED) {
mKeys[i] = key;
mValues[i] = value;
return;
}
if (mGarbage && mSize >= mKeys.length) {
gc();
// Search again because indices may have changed.
i = ~binarySearch(mKeys, 0, mSize, key);
}
if (mSize >= mKeys.length) {
int n = ArrayUtils.idealIntArraySize(mSize + 1);
long[] nkeys = new long[n];
Object[] nvalues = new Object[n];
// Log.e("SparseArray", "grow " + mKeys.length + " to " + n);
System.arraycopy(mKeys, 0, nkeys, 0, mKeys.length);
System.arraycopy(mValues, 0, nvalues, 0, mValues.length);
mKeys = nkeys;
mValues = nvalues;
}
if (mSize - i != 0) {
// Log.e("SparseArray", "move " + (mSize - i));
System.arraycopy(mKeys, i, mKeys, i + 1, mSize - i);
System.arraycopy(mValues, i, mValues, i + 1, mSize - i);
}
mKeys[i] = key;
mValues[i] = value;
mSize++;
}
}
/**
* Returns the number of key-value mappings that this SparseArray
* currently stores.
*/
public int size() {
if (mGarbage) {
gc();
}
return mSize;
}
/**
* Given an index in the range <code>0...size()-1</code>, returns
* the key from the <code>index</code>th key-value mapping that this
* SparseArray stores.
*/
public long keyAt(int index) {
if (mGarbage) {
gc();
}
return mKeys[index];
}
/**
* Given an index in the range <code>0...size()-1</code>, returns
* the value from the <code>index</code>th key-value mapping that this
* SparseArray stores.
*/
public E valueAt(int index) {
if (mGarbage) {
gc();
}
return (E) mValues[index];
}
/**
* Given an index in the range <code>0...size()-1</code>, sets a new
* value for the <code>index</code>th key-value mapping that this
* SparseArray stores.
*/
public void setValueAt(int index, E value) {
if (mGarbage) {
gc();
}
mValues[index] = value;
}
/**
* Returns the index for which {@link #keyAt} would return the
* specified key, or a negative number if the specified
* key is not mapped.
*/
public int indexOfKey(long key) {
if (mGarbage) {
gc();
}
return binarySearch(mKeys, 0, mSize, key);
}
/**
* Returns an index for which {@link #valueAt} would return the
* specified key, or a negative number if no keys map to the
* specified value.
* Beware that this is a linear search, unlike lookups by key,
* and that multiple keys can map to the same value and this will
* find only one of them.
*/
public int indexOfValue(E value) {
if (mGarbage) {
gc();
}
for (int i = 0; i < mSize; i++)
if (mValues[i] == value)
return i;
return -1;
}
/**
* Removes all key-value mappings from this SparseArray.
*/
public void clear() {
int n = mSize;
Object[] values = mValues;
for (int i = 0; i < n; i++) {
values[i] = null;
}
mSize = 0;
mGarbage = false;
}
/**
* Puts a key/value pair into the array, optimizing for the case where
* the key is greater than all existing keys in the array.
*/
public void append(long key, E value) {
if (mSize != 0 && key <= mKeys[mSize - 1]) {
put(key, value);
return;
}
if (mGarbage && mSize >= mKeys.length) {
gc();
}
int pos = mSize;
if (pos >= mKeys.length) {
int n = ArrayUtils.idealIntArraySize(pos + 1);
long[] nkeys = new long[n];
Object[] nvalues = new Object[n];
// Log.e("SparseArray", "grow " + mKeys.length + " to " + n);
System.arraycopy(mKeys, 0, nkeys, 0, mKeys.length);
System.arraycopy(mValues, 0, nvalues, 0, mValues.length);
mKeys = nkeys;
mValues = nvalues;
}
mKeys[pos] = key;
mValues[pos] = value;
mSize = pos + 1;
}
private static int binarySearch(long[] a, int start, int len, long key) {
int high = start + len, low = start - 1, guess;
while (high - low > 1) {
guess = (high + low) / 2;
if (a[guess] < key)
low = guess;
else
high = guess;
}
if (high == start + len)
return ~(start + len);
else if (a[high] == key)
return high;
else
return ~high;
}
private void checkIntegrity() {
for (int i = 1; i < mSize; i++) {
if (mKeys[i] <= mKeys[i - 1]) {
for (int j = 0; j < mSize; j++) {
Log.e("FAIL", j + ": " + mKeys[j] + " -> " + mValues[j]);
}
throw new RuntimeException();
}
}
}
private long[] mKeys;
private Object[] mValues;
private int mSize;
} | LongSparseArray |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/catalog/IntervalFreshness.java | {
"start": 1399,
"end": 10708
} | class ____ {
private static final String SECOND_CRON_EXPRESSION_TEMPLATE = "0/%s * * * * ? *";
private static final String MINUTE_CRON_EXPRESSION_TEMPLATE = "0 0/%s * * * ? *";
private static final String HOUR_CRON_EXPRESSION_TEMPLATE = "0 0 0/%s * * ? *";
private static final String ONE_DAY_CRON_EXPRESSION_TEMPLATE = "0 0 0 * * ? *";
private static final long SECOND_CRON_UPPER_BOUND = 60;
private static final long MINUTE_CRON_UPPER_BOUND = 60;
private static final long HOUR_CRON_UPPER_BOUND = 24;
private final int interval;
private final TimeUnit timeUnit;
private IntervalFreshness(int interval, TimeUnit timeUnit) {
this.interval = interval;
this.timeUnit = timeUnit;
}
public static IntervalFreshness of(String interval, TimeUnit timeUnit) {
final int validateIntervalInput = validateIntervalInput(interval);
return new IntervalFreshness(validateIntervalInput, timeUnit);
}
private static int validateIntervalInput(final String interval) {
final int parsedInt;
try {
parsedInt = Integer.parseInt(interval);
} catch (Exception e) {
final String errorMessage =
String.format(
"The freshness interval currently only supports positive integer type values. But was: %s",
interval);
throw new ValidationException(errorMessage, e);
}
if (parsedInt <= 0) {
final String errorMessage =
String.format(
"The freshness interval currently only supports positive integer type values. But was: %s",
interval);
throw new ValidationException(errorMessage);
}
return parsedInt;
}
public static IntervalFreshness ofSecond(String interval) {
return IntervalFreshness.of(interval, TimeUnit.SECOND);
}
public static IntervalFreshness ofMinute(String interval) {
return IntervalFreshness.of(interval, TimeUnit.MINUTE);
}
public static IntervalFreshness ofHour(String interval) {
return IntervalFreshness.of(interval, TimeUnit.HOUR);
}
public static IntervalFreshness ofDay(String interval) {
return IntervalFreshness.of(interval, TimeUnit.DAY);
}
/**
* Validates that the given freshness can be converted to a cron expression in full refresh
* mode. Since freshness and cron expression cannot be converted equivalently, there are
* currently only a limited patterns of freshness that are supported.
*
* @param intervalFreshness the freshness to validate
* @throws ValidationException if the freshness cannot be converted to a valid cron expression
*/
public static void validateFreshnessForCron(IntervalFreshness intervalFreshness) {
switch (intervalFreshness.getTimeUnit()) {
case SECOND:
validateCronConstraints(intervalFreshness, SECOND_CRON_UPPER_BOUND);
break;
case MINUTE:
validateCronConstraints(intervalFreshness, MINUTE_CRON_UPPER_BOUND);
break;
case HOUR:
validateCronConstraints(intervalFreshness, HOUR_CRON_UPPER_BOUND);
break;
case DAY:
validateDayConstraints(intervalFreshness);
break;
default:
throw new ValidationException(
String.format(
"Unknown freshness time unit: %s.",
intervalFreshness.getTimeUnit()));
}
}
/**
* Converts the freshness of materialized table to cron expression in full refresh mode. The
* freshness must first pass validation via {@link #validateFreshnessForCron}.
*
* @param intervalFreshness the freshness to convert
* @return the corresponding cron expression
* @throws ValidationException if the freshness cannot be converted to a valid cron expression
*/
public static String convertFreshnessToCron(IntervalFreshness intervalFreshness) {
// First validate that conversion is possible
validateFreshnessForCron(intervalFreshness);
// Then perform the conversion
switch (intervalFreshness.getTimeUnit()) {
case SECOND:
return String.format(
SECOND_CRON_EXPRESSION_TEMPLATE, intervalFreshness.getIntervalInt());
case MINUTE:
return String.format(
MINUTE_CRON_EXPRESSION_TEMPLATE, intervalFreshness.getIntervalInt());
case HOUR:
return String.format(
HOUR_CRON_EXPRESSION_TEMPLATE, intervalFreshness.getIntervalInt());
case DAY:
return ONE_DAY_CRON_EXPRESSION_TEMPLATE;
default:
throw new ValidationException(
String.format(
"Unknown freshness time unit: %s.",
intervalFreshness.getTimeUnit()));
}
}
private static void validateCronConstraints(
IntervalFreshness intervalFreshness, long cronUpperBound) {
int interval = intervalFreshness.getIntervalInt();
TimeUnit timeUnit = intervalFreshness.getTimeUnit();
// Freshness must be less than cronUpperBound for corresponding time unit when convert it
// to cron expression
if (interval >= cronUpperBound) {
throw new ValidationException(
String.format(
"In full refresh mode, freshness must be less than %s when the time unit is %s.",
cronUpperBound, timeUnit));
}
// Freshness must be factors of cronUpperBound for corresponding time unit
if (cronUpperBound % interval != 0) {
throw new ValidationException(
String.format(
"In full refresh mode, only freshness that are factors of %s are currently supported when the time unit is %s.",
cronUpperBound, timeUnit));
}
}
private static void validateDayConstraints(IntervalFreshness intervalFreshness) {
// Since the number of days in each month is different, only one day of freshness is
// currently supported when the time unit is DAY
int interval = intervalFreshness.getIntervalInt();
if (interval > 1) {
throw new ValidationException(
"In full refresh mode, freshness must be 1 when the time unit is DAY.");
}
}
/**
* Creates an IntervalFreshness from a Duration, choosing the most appropriate time unit.
* Prefers larger units when possible (e.g., 60 seconds → 1 minute).
*/
public static IntervalFreshness fromDuration(Duration duration) {
if (duration.equals(duration.truncatedTo(ChronoUnit.DAYS))) {
return new IntervalFreshness((int) duration.toDays(), TimeUnit.DAY);
}
if (duration.equals(duration.truncatedTo(ChronoUnit.HOURS))) {
return new IntervalFreshness((int) duration.toHours(), TimeUnit.HOUR);
}
if (duration.equals(duration.truncatedTo(ChronoUnit.MINUTES))) {
return new IntervalFreshness((int) duration.toMinutes(), TimeUnit.MINUTE);
}
return new IntervalFreshness((int) duration.getSeconds(), TimeUnit.SECOND);
}
/**
* @deprecated Use {@link #getIntervalInt()} instead.
*/
@Deprecated
public String getInterval() {
return String.valueOf(interval);
}
public int getIntervalInt() {
return interval;
}
public TimeUnit getTimeUnit() {
return timeUnit;
}
public Duration toDuration() {
switch (timeUnit) {
case SECOND:
return Duration.ofSeconds(interval);
case MINUTE:
return Duration.ofMinutes(interval);
case HOUR:
return Duration.ofHours(interval);
case DAY:
return Duration.ofDays(interval);
default:
throw new IllegalStateException("Unexpected value: " + timeUnit);
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
IntervalFreshness that = (IntervalFreshness) o;
return Objects.equals(interval, that.interval) && timeUnit == that.timeUnit;
}
@Override
public String toString() {
return "INTERVAL '" + interval + "' " + timeUnit;
}
@Override
public int hashCode() {
return Objects.hash(interval, timeUnit);
}
// --------------------------------------------------------------------------------------------
// TimeUnit enums
// --------------------------------------------------------------------------------------------
/** An enumeration of time unit representing the unit of interval freshness. */
@PublicEvolving
public | IntervalFreshness |
java | apache__camel | core/camel-management/src/test/java/org/apache/camel/management/ManagedConsumerCacheTest.java | {
"start": 1548,
"end": 3857
} | class ____ extends ManagementTestSupport {
@Test
public void testManageConsumerCache() throws Exception {
// always register services in JMX so we can enlist our consumer template/cache
context.getManagementStrategy().getManagementAgent().setRegisterAlways(true);
DefaultConsumerCache cache = new DefaultConsumerCache(this, context, 0);
context.addService(cache);
template.sendBody("direct:start", "Hello World");
Endpoint endpoint = context.getEndpoint("seda:queue");
PollingConsumer consumer = cache.acquirePollingConsumer(endpoint);
Exchange out = consumer.receive(3000);
assertNotNull(out, "Should got an exchange");
assertEquals("Hello World", out.getIn().getBody());
// get the stats for the route
MBeanServer mbeanServer = getMBeanServer();
Set<ObjectName> set = mbeanServer.queryNames(new ObjectName("*:type=services,*"), null);
List<ObjectName> list = new ArrayList<>(set);
ObjectName on = null;
for (ObjectName name : list) {
if (name.getCanonicalName().contains("ConsumerCache")) {
on = name;
break;
}
}
assertNotNull(on, "Should have found ConsumerCache");
Integer max = (Integer) mbeanServer.getAttribute(on, "MaximumCacheSize");
assertEquals(1000, max.intValue());
Integer current = (Integer) mbeanServer.getAttribute(on, "Size");
assertEquals(1, current.intValue());
String source = (String) mbeanServer.getAttribute(on, "Source");
assertNotNull(source);
assertTrue(source.contains("testManageConsumerCache"));
// purge
mbeanServer.invoke(on, "purge", null, null);
current = (Integer) mbeanServer.getAttribute(on, "Size");
assertEquals(0, current.intValue());
// stop the consumer as it was purged from the cache
// so we need to manually stop it
consumer.stop();
cache.stop();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("seda:queue");
}
};
}
}
| ManagedConsumerCacheTest |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxErrorTests.java | {
"start": 795,
"end": 1599
} | class ____ extends ErrorsForCasesWithoutExamplesTestCase {
@Override
protected List<TestCaseSupplier> cases() {
return paramsToSuppliers(MaxTests.parameters());
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new Max(source, args.get(0));
}
@Override
protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) {
return equalTo(
typeErrorMessage(
false,
validPerPosition,
signature,
(v, p) -> "boolean, date, ip, string, version, aggregate_metric_double, "
+ "exponential_histogram or numeric except counter types"
)
);
}
}
| MaxErrorTests |
java | apache__camel | components/camel-quartz/src/test/java/org/apache/camel/routepolicy/quartz/CronScheduledRoutePolicyTest.java | {
"start": 1637,
"end": 3846
} | class ____ extends NoBuilderTest {
@Test
public void testScheduledStartRoutePolicyWithTwoRoutes() throws Exception {
MockEndpoint success1 = context.getEndpoint("mock:success1", MockEndpoint.class);
MockEndpoint success2 = context.getEndpoint("mock:success2", MockEndpoint.class);
success1.expectedMessageCount(1);
success2.expectedMessageCount(1);
context.getComponent("direct", DirectComponent.class).setBlock(false);
context.getComponent("quartz", QuartzComponent.class)
.setPropertiesFile("org/apache/camel/routepolicy/quartz/myquartz.properties");
context.addRoutes(new RouteBuilder() {
public void configure() {
CronScheduledRoutePolicy policy = new CronScheduledRoutePolicy();
policy.setRouteStartTime("*/3 * * * * ?");
from("direct:start1")
.routeId("test1")
.routePolicy(policy)
.to("mock:success1");
from("direct:start2")
.routeId("test2")
.routePolicy(policy)
.to("mock:success2");
}
});
context.start();
context.getRouteController().stopRoute("test1", 1000, TimeUnit.MILLISECONDS);
context.getRouteController().stopRoute("test2", 1000, TimeUnit.MILLISECONDS);
Awaitility.await().atMost(5, TimeUnit.SECONDS)
.untilAsserted(
() -> {
assertSame(ServiceStatus.Started, context.getRouteController().getRouteStatus("test1"));
assertSame(ServiceStatus.Started, context.getRouteController().getRouteStatus("test2"));
});
template.sendBody("direct:start1", "Ready or not, Here, I come");
template.sendBody("direct:start2", "Ready or not, Here, I come");
success1.assertIsSatisfied();
success2.assertIsSatisfied();
}
}
@Nested
| CronTest1 |
java | elastic__elasticsearch | x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/async/AsyncTaskManagementServiceTests.java | {
"start": 2454,
"end": 2872
} | class ____ extends LegacyActionRequest {
private final String string;
private final TimeValue keepAlive;
public TestRequest(String string, TimeValue keepAlive) {
this.string = string;
this.keepAlive = keepAlive;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}
public static | TestRequest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/TestRequestFilterWithHighestPriority.java | {
"start": 357,
"end": 621
} | class ____ implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
requestContext.getHeaders().add("filter-request", "authentication");
}
}
| TestRequestFilterWithHighestPriority |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/performance/DecoderPerformanceTest.java | {
"start": 429,
"end": 2904
} | class ____ extends TestCase {
final int COUNT = 1000 * 100;
private String text;
protected void setUp() throws Exception {
text = "{\"old\":true,\"description\":\"神棍\",\"name\":\"校长\",\"age\":3,\"salary\":123456789.0123}";
text = "[{\"S\":321061,\"T\":\"GetAttributeResp\"},{\"ERROR\":null,\"TS\":0,\"VAL\":{\"SqlList\":[{\"BatchSizeMax\":0,\"BatchSizeTotal\":0,\"ConcurrentMax\":1,\"DataSource\":\"jdbc:wrap-jdbc:filters=default,encoding:name=ds-offer:jdbc:mysql://100.10.10.10:8066/xxx\",\"EffectedRowCount\":0,\"ErrorCount\":0,\"ExecuteCount\":5,\"FetchRowCount\":5,\"File\":null,\"ID\":2001,\"LastError\":null,\"LastTime\":1292742908178,\"MaxTimespan\":16,\"MaxTimespanOccurTime\":1292742668191,\"Name\":null,\"RunningCount\":0,\"SQL\":\"SELECT @@SQL_MODE\",\"TotalTime\":83}]}}]";
// text =
// "[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]";
// text =
// "[\"description\", \"神棍\", \"校长\", \"old\",\"salary\", \"jdbc:wrap-jdbc:filters=default,encoding:name=ds-offer:jdbc:mysql://100.10.10.10:8066/xxx\"]";
// text =
// "{\"OLD\":true,\"DESCRIPTION\":\"神棍\",\"name\":\"校长\",\"AGE\":3,\"SALARY\":123456789.0123}";
text = "{\"badboy\":true,\"description\":\"神棍敌人姐\",\"name\":\"校长\",\"age\":3,\"birthdate\":1293278091773,\"salary\":123456789.0123}";
String resource = "json/group.json";
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(resource);
text = IOUtils.toString(is);
is.close();
}
public void test_performance() throws Exception {
List<Codec> decoders = new ArrayList<Codec>();
decoders.add(new JacksonCodec());
decoders.add(new FastjsonCodec());
for (int i = 0; i < 20; ++i) {
for (Codec decoder : decoders) {
decode(text, decoder);
// decodeToJavaBean(text, decoder);
}
System.out.println();
}
System.out.println();
System.out.println(text);
}
private void decode(String text, Codec decoder) throws Exception {
long startNano = System.nanoTime();
for (int i = 0; i < COUNT; ++i) {
decoder.decode(text);
}
long nano = System.nanoTime() - startNano;
System.out.println(decoder.getName() + " : \t" + NumberFormat.getInstance().format(nano));
}
public static | DecoderPerformanceTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java | {
"start": 1214,
"end": 1980
} | class ____ {
/**
* These are common statistic names.
* <p>
* The following names are considered general and preserved across different
* StorageStatistics classes. When implementing a new StorageStatistics, it is
* highly recommended to use the common statistic names.
* <p>
* When adding new common statistic name constants, please make them unique.
* By convention, they are implicitly unique:
* <ul>
* <li>the name of the constants are uppercase, words separated by
* underscores.</li>
* <li>the value of the constants are lowercase of the constant names.</li>
* </ul>
* See {@link StoreStatisticNames} for the field names used here
* and elsewhere.
*/
@InterfaceStability.Evolving
public | StorageStatistics |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/autoproxy/AspectJAutoProxyCreatorTests.java | {
"start": 16116,
"end": 16303
} | class ____ {
@Around("execution(* setAge(int)) && args(age)")
public Object test(ProceedingJoinPoint pjp, int age) throws Throwable {
return pjp.proceed();
}
}
| DummyAspectWithParameter |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/customproviders/AnotherValidNonBlockingFiltersTest.java | {
"start": 1253,
"end": 3150
} | class ____ {
@RegisterExtension
static ResteasyReactiveUnitTest test = new ResteasyReactiveUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(StandardBlockingRequestFilter.class, AnotherStandardBlockingRequestFilter.class,
StandardNonBlockingRequestFilter.class, PreMatchingNonBlockingRequestFilter.class,
CustomFilters.class,
DummyResource.class);
}
});
@Test
public void testBlockingEndpoint() {
Headers headers = RestAssured.given().get("/dummy/blocking")
.then().statusCode(200).extract().headers();
assertEquals(
"1-pre-matching-non-blocking/2-another-custom-non-blocking/3-standard-non-blocking/4-standard-blocking/5-another-standard-blocking/6-custom-blocking",
headers.get("filter-request").getValue());
assertEquals(
"false/false/false/true/true/true",
headers.get("thread").getValue());
}
@Test
public void testNonBlockingEndpoint() {
Headers headers = RestAssured.given().get("/dummy/nonblocking")
.then().statusCode(200).extract().headers();
assertEquals(
"1-pre-matching-non-blocking/2-another-custom-non-blocking/3-standard-non-blocking/4-standard-blocking/5-another-standard-blocking/6-custom-blocking",
headers.get("filter-request").getValue());
assertEquals(
"false/false/false/false/false/false",
headers.get("thread").getValue());
}
@Blocking
@Path("dummy")
public static | AnotherValidNonBlockingFiltersTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java | {
"start": 18241,
"end": 27453
} | class ____ extends AbstractRunnable {
private final ActionListener<Response> onCompletionListener;
private final ReplicationTask replicationTask;
private final ConcreteShardRequest<Request> primaryRequest;
AsyncPrimaryAction(
ConcreteShardRequest<Request> primaryRequest,
ActionListener<Response> onCompletionListener,
ReplicationTask replicationTask
) {
this.primaryRequest = primaryRequest;
this.onCompletionListener = onCompletionListener;
this.replicationTask = replicationTask;
}
@Override
protected void doRun() throws Exception {
final ShardId shardId = primaryRequest.getRequest().shardId();
final IndexShard indexShard = getIndexShard(shardId);
final ShardRouting shardRouting = indexShard.routingEntry();
// we may end up here if the cluster state used to route the primary is so stale that the underlying
// index shard was replaced with a replica. For example - in a two node cluster, if the primary fails
// the replica will take over and a replica will be assigned to the first node.
if (shardRouting.primary() == false) {
throw new ReplicationOperation.RetryOnPrimaryException(shardId, "actual shard is not a primary " + shardRouting);
}
final String actualAllocationId = shardRouting.allocationId().getId();
if (actualAllocationId.equals(primaryRequest.getTargetAllocationID()) == false) {
throw new ShardNotFoundException(
shardId,
"expected allocation id [{}] but found [{}]",
primaryRequest.getTargetAllocationID(),
actualAllocationId
);
}
final long actualTerm = indexShard.getPendingPrimaryTerm();
if (actualTerm != primaryRequest.getPrimaryTerm()) {
throw new ShardNotFoundException(
shardId,
"expected allocation id [{}] with term [{}] but found [{}]",
primaryRequest.getTargetAllocationID(),
primaryRequest.getPrimaryTerm(),
actualTerm
);
}
acquirePrimaryOperationPermit(
indexShard,
primaryRequest.getRequest(),
ActionListener.wrap(releasable -> runWithPrimaryShardReference(new PrimaryShardReference(indexShard, releasable)), e -> {
if (e instanceof ShardNotInPrimaryModeException) {
onFailure(new ReplicationOperation.RetryOnPrimaryException(shardId, "shard is not in primary mode", e));
} else {
onFailure(e);
}
})
);
}
void runWithPrimaryShardReference(final PrimaryShardReference primaryShardReference) {
ActionListener<Response> setFinishedListener = ActionListener.runBefore(
onCompletionListener,
() -> setPhase(replicationTask, "finished")
);
try {
final ClusterState clusterState = clusterService.state();
final Index index = primaryShardReference.routingEntry().index();
final ProjectMetadata project = clusterState.metadata().projectFor(index);
final ProjectId projectId = project.id();
final IndexMetadata indexMetadata = project.index(index);
final ClusterBlockException blockException = blockExceptions(clusterState, projectId, index.getName());
if (blockException != null) {
logger.trace("cluster is blocked, action failed on primary", blockException);
throw blockException;
}
if (primaryShardReference.isRelocated()) {
primaryShardReference.close(); // release shard operation lock as soon as possible
setPhase(replicationTask, "primary_delegation");
// delegate primary phase to relocation target
// it is safe to execute primary phase on relocation target as there are no more in-flight operations where primary
// phase is executed on local shard and all subsequent operations are executed on relocation target as primary phase.
final ShardRouting primary = primaryShardReference.routingEntry();
assert primary.relocating() : "indexShard is marked as relocated but routing isn't" + primary;
DiscoveryNode relocatingNode = clusterState.nodes().get(primary.relocatingNodeId());
String allocationID = primary.allocationId().getRelocationId();
transportService.sendRequest(
relocatingNode,
transportPrimaryAction,
new ConcreteShardRequest<>(primaryRequest.getRequest(), allocationID, primaryRequest.getPrimaryTerm()),
transportOptions,
new ActionListenerResponseHandler<>(
setFinishedListener,
TransportReplicationAction.this::newResponseInstance,
TransportResponseHandler.TRANSPORT_WORKER
)
);
} else if (ReplicationSplitHelper.needsSplitCoordination(primaryRequest.getRequest(), indexMetadata)) {
ReplicationSplitHelper<Request, ReplicaRequest, Response>.SplitCoordinator splitCoordinator = splitHelper
.newSplitRequest(
TransportReplicationAction.this,
replicationTask,
project,
primaryShardReference,
primaryRequest.getRequest(),
this::executePrimaryRequest,
setFinishedListener
);
splitCoordinator.coordinate();
} else {
setPhase(replicationTask, "primary");
executePrimaryRequest(primaryShardReference, setFinishedListener);
}
} catch (Exception e) {
Releasables.closeWhileHandlingException(primaryShardReference);
setFinishedListener.onFailure(e);
}
}
private void executePrimaryRequest(
final TransportReplicationAction<Request, ReplicaRequest, Response>.PrimaryShardReference primaryShardReference,
final ActionListener<Response> listener
) throws Exception {
final ActionListener<Response> responseListener = ActionListener.wrap(response -> {
adaptResponse(response, primaryShardReference.indexShard);
if (syncGlobalCheckpointAfterOperation) {
try {
primaryShardReference.indexShard.maybeSyncGlobalCheckpoint("post-operation");
} catch (final Exception e) {
// only log non-closed exceptions
if (ExceptionsHelper.unwrap(e, AlreadyClosedException.class, IndexShardClosedException.class) == null) {
// intentionally swallow, a missed global checkpoint sync should not fail this operation
logger.info(
() -> format(
"%s failed to execute post-operation global checkpoint sync",
primaryShardReference.indexShard.shardId()
),
e
);
}
}
}
assert primaryShardReference.indexShard.isPrimaryMode();
primaryShardReference.close(); // release shard operation lock before responding to caller
listener.onResponse(response);
}, e -> {
Releasables.closeWhileHandlingException(primaryShardReference);
listener.onFailure(e);
});
new ReplicationOperation<>(
primaryRequest.getRequest(),
primaryShardReference,
responseListener.map(result -> result.replicationResponse),
newReplicasProxy(),
logger,
threadPool,
actionName,
primaryRequest.getPrimaryTerm(),
initialRetryBackoffBound,
retryTimeout
).execute();
}
@Override
public void onFailure(Exception e) {
setPhase(replicationTask, "finished");
onCompletionListener.onFailure(e);
}
}
// allows subclasses to adapt the response
protected void adaptResponse(Response response, IndexShard indexShard) {
}
public static | AsyncPrimaryAction |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.