language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/graph/spi/GraphImplementor.java | {
"start": 782,
"end": 4633
} | interface ____<J> extends Graph<J>, GraphNodeImplementor<J> {
void merge(GraphImplementor<J> other);
@Internal
void mergeInternal(GraphImplementor<J> graph);
@Override
@Deprecated(forRemoval = true)
RootGraphImplementor<J> makeRootGraph(String name, boolean mutable);
@Override
@Deprecated(forRemoval = true)
SubGraphImplementor<J> makeSubGraph(boolean mutable);
@Override
GraphImplementor<J> makeCopy(boolean mutable);
@Override
List<? extends AttributeNodeImplementor<?,?,?>> getAttributeNodeList();
Map<PersistentAttribute<? super J, ?>, AttributeNodeImplementor<?,?,?>> getNodes();
Map<Class<? extends J>, SubGraphImplementor<? extends J>> getTreatedSubgraphs();
@Override
<Y> AttributeNodeImplementor<Y,?,?> getAttributeNode(String attributeName);
@Override
<Y> AttributeNodeImplementor<Y,?,?> getAttributeNode(Attribute<? super J, Y> attribute);
@Override
<AJ> AttributeNodeImplementor<AJ,?,?> findAttributeNode(String attributeName);
@Override
<AJ> AttributeNodeImplementor<AJ,?,?> findAttributeNode(PersistentAttribute<? super J, AJ> attribute);
<AJ> AttributeNodeImplementor<AJ,?,?> findOrCreateAttributeNode(String name);
<AJ> AttributeNodeImplementor<AJ,?,?> findOrCreateAttributeNode(PersistentAttribute<? super J, AJ> attribute);
<AJ> AttributeNodeImplementor<AJ,?,?> addAttributeNode(PersistentAttribute<? super J, AJ> attribute);
@Override
<Y> AttributeNodeImplementor<Y,?,?> addAttributeNode(Attribute<? super J, Y> attribute);
@Override
<Y extends J> SubGraphImplementor<Y> addTreatedSubgraph(Class<Y> type);
<Y extends J> SubGraphImplementor<Y> addTreatedSubgraph(ManagedType<Y> type);
@Override
<X> SubGraphImplementor<X> addSubgraph(String attributeName);
@Override
<AJ> SubGraphImplementor<AJ> addSubGraph(String attributeName);
@Override
<AJ> SubGraphImplementor<AJ> addSubGraph(String attributeName, Class<AJ> subType);
@Override
<AJ> SubGraphImplementor<AJ> addSubGraph(PersistentAttribute<? super J, AJ> attribute);
@Override
<AJ> SubGraphImplementor<AJ> addSubGraph(PersistentAttribute<? super J, ? super AJ> attribute, Class<AJ> subtype);
@Override
<AJ> SubGraphImplementor<AJ> addKeySubGraph(String attributeName);
@Override
<AJ> SubGraphImplementor<AJ> addKeySubGraph(String attributeName, Class<AJ> subtype);
@Override
<AJ> SubGraphImplementor<AJ> addKeySubGraph(MapPersistentAttribute<? super J, ? super AJ, ?> attribute, Class<AJ> subtype);
@Override
<X> SubGraphImplementor<X> addSubgraph(String attributeName, Class<X> type);
@Override
<X> SubGraphImplementor<X> addSubgraph(Attribute<? super J, X> attribute);
@Override
<Y> SubGraphImplementor<Y> addTreatedSubgraph(Attribute<? super J, ? super Y> attribute, Class<Y> type);
@Override
<AJ> SubGraphImplementor<AJ> addTreatedSubgraph(Attribute<? super J, ? super AJ> attribute, ManagedType<AJ> type);
@Override
<E> SubGraphImplementor<E> addTreatedElementSubgraph(PluralAttribute<? super J, ?, ? super E> attribute, Class<E> type);
@Override
<AJ> SubGraph<AJ> addTreatedElementSubgraph(PluralAttribute<? super J, ?, ? super AJ> attribute, ManagedType<AJ> type);
@Override
<X> SubGraphImplementor<X> addKeySubgraph(String attributeName);
@Override
<X> SubGraphImplementor<X> addKeySubgraph(String attributeName, Class<X> type);
@Override
<K> SubGraphImplementor<K> addTreatedMapKeySubgraph(MapAttribute<? super J, ? super K, ?> attribute, Class<K> type);
@Override
<AJ> SubGraphImplementor<AJ> addTreatedMapKeySubgraph(MapAttribute<? super J, ? super AJ, ?> attribute, ManagedType<AJ> type);
@Override
default boolean hasAttributeNode(String attributeName) {
return getAttributeNode( attributeName ) != null;
}
@Override
default boolean hasAttributeNode(Attribute<? super J, ?> attribute) {
return getAttributeNode( attribute ) != null;
}
}
| GraphImplementor |
java | apache__camel | components/camel-cassandraql/src/generated/java/org/apache/camel/processor/idempotent/cassandra/CassandraIdempotentRepositoryConfigurer.java | {
"start": 759,
"end": 4188
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.processor.idempotent.cassandra.CassandraIdempotentRepository target = (org.apache.camel.processor.idempotent.cassandra.CassandraIdempotentRepository) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "pkcolumns":
case "pkColumns": target.setPkColumns(property(camelContext, java.lang.String.class, value)); return true;
case "prefixpkvalues":
case "prefixPKValues": target.setPrefixPKValues(property(camelContext, java.lang.String.class, value)); return true;
case "readconsistencylevel":
case "readConsistencyLevel": target.setReadConsistencyLevel(property(camelContext, com.datastax.oss.driver.api.core.ConsistencyLevel.class, value)); return true;
case "session": target.setSession(property(camelContext, com.datastax.oss.driver.api.core.CqlSession.class, value)); return true;
case "table": target.setTable(property(camelContext, java.lang.String.class, value)); return true;
case "ttl": target.setTtl(property(camelContext, java.lang.Integer.class, value)); return true;
case "writeconsistencylevel":
case "writeConsistencyLevel": target.setWriteConsistencyLevel(property(camelContext, com.datastax.oss.driver.api.core.ConsistencyLevel.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "pkcolumns":
case "pkColumns": return java.lang.String.class;
case "prefixpkvalues":
case "prefixPKValues": return java.lang.String.class;
case "readconsistencylevel":
case "readConsistencyLevel": return com.datastax.oss.driver.api.core.ConsistencyLevel.class;
case "session": return com.datastax.oss.driver.api.core.CqlSession.class;
case "table": return java.lang.String.class;
case "ttl": return java.lang.Integer.class;
case "writeconsistencylevel":
case "writeConsistencyLevel": return com.datastax.oss.driver.api.core.ConsistencyLevel.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.processor.idempotent.cassandra.CassandraIdempotentRepository target = (org.apache.camel.processor.idempotent.cassandra.CassandraIdempotentRepository) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "pkcolumns":
case "pkColumns": return target.getPkColumns();
case "prefixpkvalues":
case "prefixPKValues": return target.getPrefixPKValues();
case "readconsistencylevel":
case "readConsistencyLevel": return target.getReadConsistencyLevel();
case "session": return target.getSession();
case "table": return target.getTable();
case "ttl": return target.getTtl();
case "writeconsistencylevel":
case "writeConsistencyLevel": return target.getWriteConsistencyLevel();
default: return null;
}
}
}
| CassandraIdempotentRepositoryConfigurer |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java | {
"start": 1783,
"end": 8051
} | class ____ {
private static final int MODIFICATION_TIME = 12345000;
private static final int ACCESS_TIME = 23456000;
private static final Path DIR_FROM = new Path("d0");
private static final Path DIR_FROM_SPL = new Path("d0 space");
private static final Path DIR_TO1 = new Path("d1");
private static final Path DIR_TO2 = new Path("d2");
private static final Path FROM = new Path(DIR_FROM, "f0");
private static final Path FROM_SPL = new Path(DIR_FROM_SPL, "f0");
private static final Path TO = new Path(DIR_TO1, "f1");
private static final FsPermission PERMISSIONS = new FsPermission(
FsAction.ALL,
FsAction.EXECUTE,
FsAction.READ_WRITE);
private FileSystem fs;
private Path testDir;
private Configuration conf;
@BeforeEach
public void initialize() throws Exception {
conf = new Configuration(false);
conf.set("fs.file.impl", LocalFileSystem.class.getName());
fs = FileSystem.getLocal(conf);
testDir = new FileSystemTestHelper().getTestRootPath(fs);
// don't want scheme on the path, just an absolute path
testDir = new Path(fs.makeQualified(testDir).toUri().getPath());
FileSystem.setDefaultUri(conf, fs.getUri());
fs.setWorkingDirectory(testDir);
fs.mkdirs(DIR_FROM);
fs.mkdirs(DIR_TO1);
fs.createNewFile(FROM);
FSDataOutputStream output = fs.create(FROM, true);
for(int i = 0; i < 100; ++i) {
output.writeInt(i);
output.writeChar('\n');
}
output.close();
fs.setPermission(FROM, PERMISSIONS);
fs.setTimes(FROM, MODIFICATION_TIME, ACCESS_TIME);
fs.setPermission(DIR_FROM, PERMISSIONS);
fs.setTimes(DIR_FROM, MODIFICATION_TIME, ACCESS_TIME);
}
@AfterEach
public void cleanup() throws Exception {
fs.delete(testDir, true);
fs.close();
}
private void assertAttributesPreserved(Path to) throws IOException {
FileStatus status = fs.getFileStatus(to);
assertEquals(MODIFICATION_TIME, status.getModificationTime());
assertEquals(ACCESS_TIME, status.getAccessTime());
assertEquals(PERMISSIONS, status.getPermission());
}
private void assertAttributesChanged(Path to) throws IOException {
FileStatus status = fs.getFileStatus(to);
assertNotEquals(MODIFICATION_TIME, status.getModificationTime());
assertNotEquals(ACCESS_TIME, status.getAccessTime());
assertNotEquals(PERMISSIONS, status.getPermission());
}
private void run(CommandWithDestination cmd, String... args) {
cmd.setConf(conf);
assertEquals(0, cmd.run(args));
}
@Test
@Timeout(value = 10)
public void testPutWithP() throws Exception {
run(new Put(), "-p", FROM.toString(), TO.toString());
assertAttributesPreserved(TO);
}
@Test
@Timeout(value = 10)
public void testPutWithoutP() throws Exception {
run(new Put(), FROM.toString(), TO.toString());
assertAttributesChanged(TO);
}
@Test
@Timeout(value = 10)
public void testPutWithPQ() throws Exception {
Put put = new Put();
run(put, "-p", "-q", "100", FROM.toString(), TO.toString());
assertEquals(put.getThreadPoolQueueSize(), 100);
assertAttributesPreserved(TO);
}
@Test
@Timeout(value = 10)
public void testPutWithQ() throws Exception {
Put put = new Put();
run(put, "-q", "100", FROM.toString(), TO.toString());
assertEquals(put.getThreadPoolQueueSize(), 100);
assertAttributesChanged(TO);
}
@Test
@Timeout(value = 10)
public void testPutWithSplCharacter() throws Exception {
fs.mkdirs(DIR_FROM_SPL);
fs.createNewFile(FROM_SPL);
run(new Put(), FROM_SPL.toString(), TO.toString());
assertAttributesChanged(TO);
}
@Test
@Timeout(value = 10)
public void testCopyFromLocal() throws Exception {
run(new CopyFromLocal(), FROM.toString(), TO.toString());
assertAttributesChanged(TO);
}
@Test
@Timeout(value = 10)
public void testCopyFromLocalWithThreads() throws Exception {
run(new CopyFromLocal(), "-t", "10", FROM.toString(), TO.toString());
assertAttributesChanged(TO);
}
@Test
@Timeout(value = 10)
public void testCopyFromLocalWithThreadsPreserve() throws Exception {
run(new CopyFromLocal(), "-p", "-t", "10", FROM.toString(), TO.toString());
assertAttributesPreserved(TO);
}
@Test
@Timeout(value = 10)
public void testGetWithP() throws Exception {
run(new Get(), "-p", FROM.toString(), TO.toString());
assertAttributesPreserved(TO);
}
@Test
@Timeout(value = 10)
public void testGetWithoutP() throws Exception {
run(new Get(), FROM.toString(), TO.toString());
assertAttributesChanged(TO);
}
@Test
@Timeout(value = 10)
public void testGetWithPQ() throws Exception {
Get get = new Get();
run(get, "-p", "-q", "100", FROM.toString(), TO.toString());
assertEquals(get.getThreadPoolQueueSize(), 100);
assertAttributesPreserved(TO);
}
@Test
@Timeout(value = 10)
public void testGetWithQ() throws Exception {
Get get = new Get();
run(get, "-q", "100", FROM.toString(), TO.toString());
assertEquals(get.getThreadPoolQueueSize(), 100);
assertAttributesChanged(TO);
}
@Test
@Timeout(value = 10)
public void testGetWithThreads() throws Exception {
run(new Get(), "-t", "10", FROM.toString(), TO.toString());
assertAttributesChanged(TO);
}
@Test
@Timeout(value = 10)
public void testGetWithThreadsPreserve() throws Exception {
run(new Get(), "-p", "-t", "10", FROM.toString(), TO.toString());
assertAttributesPreserved(TO);
}
@Test
@Timeout(value = 10)
public void testCpWithP() throws Exception {
run(new Cp(), "-p", FROM.toString(), TO.toString());
assertAttributesPreserved(TO);
}
@Test
@Timeout(value = 10)
public void testCpWithoutP() throws Exception {
run(new Cp(), FROM.toString(), TO.toString());
assertAttributesChanged(TO);
}
@Test
@Timeout(value = 10)
public void testDirectoryCpWithP() throws Exception {
run(new Cp(), "-p", DIR_FROM.toString(), DIR_TO2.toString());
assertAttributesPreserved(DIR_TO2);
}
@Test
@Timeout(value = 10)
public void testDirectoryCpWithoutP() throws Exception {
run(new Cp(), DIR_FROM.toString(), DIR_TO2.toString());
assertAttributesChanged(DIR_TO2);
}
}
| TestCopyPreserveFlag |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_611/SomeClass.java | {
"start": 340,
"end": 481
} | interface ____ {
InnerMapper INSTANCE = Mappers.getMapper( InnerMapper.class );
Target toTarget(Source in);
| InnerMapper |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java | {
"start": 839,
"end": 2152
} | class ____ {
private ActionTypes() {};
// Note: this action is *not* prefixed with `cluster:admin/xpack/security` since it would otherwise be excluded from the `manage`
// privilege -- instead it matches its prefix to `TransportNodesReloadSecureSettingsAction` which is the "parent" transport action
// that invokes the overall reload flow.
// This allows us to maintain the invariant that the parent reload secure settings action can be executed with the `manage` privilege
// without trappy system-context switches.
public static final ActionType<ActionResponse.Empty> RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION = new ActionType<>(
"cluster:admin/nodes/reload_secure_settings/security/remote_cluster_credentials"
);
public static final ActionType<QueryUserResponse> QUERY_USER_ACTION = new ActionType<>("cluster:admin/xpack/security/user/query");
public static final ActionType<BulkRolesResponse> BULK_PUT_ROLES = new ActionType<>("cluster:admin/xpack/security/role/bulk_put");
public static final ActionType<QueryRoleResponse> QUERY_ROLE_ACTION = new ActionType<>("cluster:admin/xpack/security/role/query");
public static final ActionType<BulkRolesResponse> BULK_DELETE_ROLES = new ActionType<>("cluster:admin/xpack/security/role/bulk_delete");
}
| ActionTypes |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/json/CustomQuoteCharTest.java | {
"start": 360,
"end": 3784
} | class ____
extends tools.jackson.core.unittest.JacksonCoreTestBase
{
final JsonFactory JSON_F = streamFactoryBuilder()
.quoteChar('\'')
.build();
// Only ASCII range supported as of 2.10
@Test
void invalidQuote() throws Exception
{
try {
streamFactoryBuilder()
.quoteChar('\u00A0');
fail("Should not allow quote character outside ASCII range");
} catch (IllegalArgumentException e) {
verifyException(e, "Can only use Unicode characters up to 0x7F");
}
}
@Test
void basicAposWithCharBased() throws Exception
{
StringWriter w;
JsonGenerator g;
// with Object
w = new StringWriter();
g = createGenerator(JSON_F, w);
_writeObject(g, "question", "answer");
g.close();
assertEquals("{'question':'answer'}", w.toString());
// with Array
w = new StringWriter();
g = createGenerator(JSON_F, w);
_writeArray(g, "hello world");
g.close();
assertEquals("['hello world']", w.toString());
}
@Test
void basicAposWithByteBased() throws Exception
{
ByteArrayOutputStream out;
JsonGenerator g;
// with Object
out = new ByteArrayOutputStream();
g = createGenerator(JSON_F, out);
_writeObject(g, "question", "answer");
g.close();
assertEquals("{'question':'answer'}", out.toString("UTF-8"));
// with Array
out = new ByteArrayOutputStream();
g = createGenerator(JSON_F, out);
_writeArray(g, "hello world");
g.close();
assertEquals("['hello world']", out.toString("UTF-8"));
}
@Test
void aposQuotingWithCharBased() throws Exception
{
StringWriter w;
JsonGenerator g;
// with Object
w = new StringWriter();
g = createGenerator(JSON_F, w);
_writeObject(g, "key", "It's \"fun\"");
g.close();
// should escape apostrophes but not quotes?
assertEquals("{'key':'It\\'s \\\"fun\\\"'}", w.toString());
// with Array
w = new StringWriter();
g = createGenerator(JSON_F, w);
_writeArray(g, "It's a sin");
g.close();
assertEquals("['It\\'s a sin']", w.toString());
}
@Test
void aposQuotingWithByteBased() throws Exception
{
ByteArrayOutputStream out;
JsonGenerator g;
// with Object
out = new ByteArrayOutputStream();
g = createGenerator(JSON_F, out);
_writeObject(g, "key", "It's \"fun\"");
g.close();
// should escape apostrophes but not quotes?
assertEquals("{'key':'It\\'s \\\"fun\\\"'}", out.toString("UTF-8"));
// with Array
out = new ByteArrayOutputStream();
g = createGenerator(JSON_F, out);
_writeArray(g, "It's a sin");
g.close();
assertEquals("['It\\'s a sin']", out.toString("UTF-8"));
}
private void _writeObject(JsonGenerator g, String key, String value) throws Exception {
g.writeStartObject();
g.writeStringProperty(key, value);
g.writeEndObject();
}
private void _writeArray(JsonGenerator g, String value) throws Exception {
g.writeStartArray();
g.writeString(value);
g.writeEndArray();
}
}
| CustomQuoteCharTest |
java | apache__dubbo | dubbo-config/dubbo-config-api/src/test/java/org/apache/dubbo/config/AbstractConfigTest.java | {
"start": 38945,
"end": 39477
} | class ____ extends AbstractConfig {
private Integer a1;
@Nested
private InnerConfig b;
OuterConfig() {}
OuterConfig(String id) {
this.setId(id);
}
public Integer getA1() {
return a1;
}
public void setA1(Integer a1) {
this.a1 = a1;
}
public InnerConfig getB() {
return b;
}
public void setB(InnerConfig b) {
this.b = b;
}
}
public static | OuterConfig |
java | apache__camel | components/camel-sql/src/test/java/org/apache/camel/processor/aggregate/jdbc/JdbcAggregateConcurrentDifferentGroupsTest.java | {
"start": 1074,
"end": 2453
} | class ____ extends AbstractJdbcAggregationTestSupport {
@Test
public void testNoConcurrentProducers() throws Exception {
doSendMessages(2, 1);
}
@Test
public void testConcurrentProducers() throws Exception {
doSendMessages(10, 5);
}
@Test
public void testMoreConcurrentProducers() throws Exception {
doSendMessages(50, 10);
}
private void doSendMessages(int files, int poolSize) throws Exception {
MockEndpoint mock = getMockEndpoint("mock:aggregated");
mock.expectedMessageCount(2);
mock.setResultWaitTime(30 * 1000L);
ExecutorService executor = Executors.newFixedThreadPool(poolSize);
for (int i = 0; i < files; i++) {
final int index = i;
executor.submit(new Callable<Object>() {
public Object call() throws Exception {
String id = index % 2 == 0 ? "A" : "B";
template.sendBodyAndHeader("direct:start", index, "id", id);
// simulate a little delay
Thread.sleep(3);
return null;
}
});
}
MockEndpoint.assertIsSatisfied(context);
executor.shutdownNow();
}
@Override
long getCompletionInterval() {
return 1000;
}
}
| JdbcAggregateConcurrentDifferentGroupsTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java | {
"start": 29050,
"end": 29116
} | class ____ the job output data.
*
* @param theClass the key | for |
java | quarkusio__quarkus | extensions/spring-web/resteasy-reactive/runtime/src/main/java/io/quarkus/spring/web/resteasy/reactive/runtime/ResponseStatusHandler.java | {
"start": 272,
"end": 1229
} | class ____ implements ServerRestHandler {
// make mutable to allow for bytecode serialization
private int defaultResponseCode;
private int newResponseCode;
public int getDefaultResponseCode() {
return defaultResponseCode;
}
public void setDefaultResponseCode(int defaultResponseCode) {
this.defaultResponseCode = defaultResponseCode;
}
public int getNewResponseCode() {
return newResponseCode;
}
public void setNewResponseCode(int newResponseCode) {
this.newResponseCode = newResponseCode;
}
@Override
public void handle(ResteasyReactiveRequestContext requestContext) throws Exception {
ResponseImpl response = (ResponseImpl) requestContext.getResponse().get();
if (response.getStatus() == defaultResponseCode) { // only set the status if it has not already been set
response.setStatus(newResponseCode);
}
}
}
| ResponseStatusHandler |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/histogram/ExtractHistogramComponentErrorTests.java | {
"start": 891,
"end": 1937
} | class ____ extends ErrorsForCasesWithoutExamplesTestCase {
@Before
public void setup() {
assumeTrue(
"Only when esql_exponential_histogram feature flag is enabled",
EsqlCorePlugin.EXPONENTIAL_HISTOGRAM_FEATURE_FLAG.isEnabled()
);
}
@Override
protected List<TestCaseSupplier> cases() {
return paramsToSuppliers(HistogramPercentileTests.parameters());
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new ExtractHistogramComponent(source, args.get(0), args.get(1));
}
@Override
protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) {
return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> switch (p) {
case 0 -> "exponential_histogram";
case 1 -> "integer";
default -> throw new IllegalStateException("Unexpected value: " + p);
}));
}
}
| ExtractHistogramComponentErrorTests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DFCachingGetSpaceUsed.java | {
"start": 1402,
"end": 1722
} | class ____ extends CachingGetSpaceUsed {
private final DF df;
public DFCachingGetSpaceUsed(Builder builder) throws IOException {
super(builder);
this.df = new DF(builder.getPath(), builder.getInterval());
}
@Override
protected void refresh() {
this.used.set(df.getUsed());
}
}
| DFCachingGetSpaceUsed |
java | spring-projects__spring-boot | buildSrc/src/main/java/org/springframework/boot/build/antora/SyncAntoraSource.java | {
"start": 1166,
"end": 2087
} | class ____ extends DefaultTask {
private final FileSystemOperations fileSystemOperations;
private final ArchiveOperations archiveOperations;
private FileCollection source;
@Inject
public SyncAntoraSource(FileSystemOperations fileSystemOperations, ArchiveOperations archiveOperations) {
this.fileSystemOperations = fileSystemOperations;
this.archiveOperations = archiveOperations;
}
@OutputDirectory
public abstract DirectoryProperty getOutputDirectory();
@InputFiles
public FileCollection getSource() {
return this.source;
}
public void setSource(FileCollection source) {
this.source = source;
}
@TaskAction
void syncAntoraSource() {
this.fileSystemOperations.sync(this::syncAntoraSource);
}
private void syncAntoraSource(CopySpec sync) {
sync.into(getOutputDirectory());
this.source.getFiles().forEach((file) -> sync.from(this.archiveOperations.zipTree(file)));
}
}
| SyncAntoraSource |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/stmt/OracleAlterTableSplitPartition.java | {
"start": 999,
"end": 2498
} | class ____ extends OracleAlterTableItem {
private SQLName name;
private List<SQLExpr> at = new ArrayList<SQLExpr>();
private List<SQLExpr> values = new ArrayList<SQLExpr>();
private List<NestedTablePartitionSpec> into = new ArrayList<NestedTablePartitionSpec>();
private UpdateIndexesClause updateIndexes;
@Override
public void accept0(OracleASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, name);
acceptChild(visitor, at);
acceptChild(visitor, values);
acceptChild(visitor, updateIndexes);
}
visitor.endVisit(this);
}
public UpdateIndexesClause getUpdateIndexes() {
return updateIndexes;
}
public void setUpdateIndexes(UpdateIndexesClause updateIndexes) {
this.updateIndexes = updateIndexes;
}
public SQLName getName() {
return name;
}
public void setName(SQLName name) {
this.name = name;
}
public List<SQLExpr> getAt() {
return at;
}
public void setAt(List<SQLExpr> at) {
this.at = at;
}
public List<NestedTablePartitionSpec> getInto() {
return into;
}
public void setInto(List<NestedTablePartitionSpec> into) {
this.into = into;
}
public List<SQLExpr> getValues() {
return values;
}
public void setValues(List<SQLExpr> values) {
this.values = values;
}
public static | OracleAlterTableSplitPartition |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/jdk8/ObservableFromCompletionStageTest.java | {
"start": 872,
"end": 1883
} | class ____ extends RxJavaTest {
@Test
public void syncSuccess() {
Observable.fromCompletionStage(CompletableFuture.completedFuture(1))
.test()
.assertResult(1);
}
@Test
public void syncFailure() {
CompletableFuture<Integer> cf = new CompletableFuture<>();
cf.completeExceptionally(new TestException());
Observable.fromCompletionStage(cf)
.test()
.assertFailure(TestException.class);
}
@Test
public void syncNull() {
Observable.fromCompletionStage(CompletableFuture.<Integer>completedFuture(null))
.test()
.assertFailure(NullPointerException.class);
}
@Test
public void cancel() {
CompletableFuture<Integer> cf = new CompletableFuture<>();
TestObserver<Integer> to = Observable.fromCompletionStage(cf)
.test();
to.assertEmpty();
to.dispose();
cf.complete(1);
to.assertEmpty();
}
}
| ObservableFromCompletionStageTest |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbstractAbfsStatisticsSource.java | {
"start": 1049,
"end": 1103
} | class ____ Abfs statistics source.
*/
public abstract | for |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/source/expressions/java/MultiLineExpressionMapper.java | {
"start": 488,
"end": 1181
} | interface ____ {
MultiLineExpressionMapper INSTANCE = Mappers.getMapper( MultiLineExpressionMapper.class );
@Mappings({
@Mapping(target = "timeAndFormat", expression = "java( new TimeAndFormat(\ns.getTime(),\ns.getFormat()\n ))"),
@Mapping(target = "anotherProp", ignore = true)
})
Target mapUsingMultiLineExpression(Source s);
@Mappings({
@Mapping(
target = "timeAndFormat",
expression = " java( new TimeAndFormat(\ns.getTime(),\ns.getFormat()\n )) "
),
@Mapping(target = "anotherProp", ignore = true)
})
Target mapUsingMultiLineExpressionWithLeadingSpaces(Source s);
}
| MultiLineExpressionMapper |
java | apache__maven | api/maven-api-spi/src/main/java/org/apache/maven/api/spi/ExtensibleEnumProvider.java | {
"start": 1439,
"end": 1627
} | interface ____ discovered through the Java ServiceLoader mechanism.
* Each implementation must be registered in a {@code META-INF/services/} file corresponding
* to the specific provider | are |
java | quarkusio__quarkus | core/deployment/src/test/java/io/quarkus/runner/classloading/MemoryClassPathElementTestCase.java | {
"start": 494,
"end": 1658
} | class ____ {
static Map<String, byte[]> data;
@BeforeAll
public static void before() throws Exception {
data = new HashMap<>();
data.put("a.txt", "A file".getBytes(StandardCharsets.UTF_8));
data.put("b.txt", "another file".getBytes(StandardCharsets.UTF_8));
data.put("foo/sub.txt", "subdir file".getBytes(StandardCharsets.UTF_8));
}
@AfterAll
public static void after() throws Exception {
data = null;
}
@Test
public void testGetAllResources() {
MemoryClassPathElement f = new MemoryClassPathElement(data, true);
Set<String> res = f.getProvidedResources();
Assertions.assertEquals(3, res.size());
Assertions.assertEquals(new HashSet<>(Arrays.asList("a.txt", "b.txt", "foo/sub.txt")), res);
}
@Test
public void testGetResource() {
MemoryClassPathElement f = new MemoryClassPathElement(data, true);
ClassPathResource res = f.getResource("foo/sub.txt");
Assertions.assertNotNull(res);
Assertions.assertEquals("subdir file", new String(res.getData(), StandardCharsets.UTF_8));
}
}
| MemoryClassPathElementTestCase |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/config/ConfigDataEnvironmentContributorTests.java | {
"start": 21394,
"end": 21622
} | class ____ extends ConfigDataResource {
private final String location;
TestResource(String location) {
this.location = location;
}
@Override
public String toString() {
return this.location;
}
}
}
| TestResource |
java | quarkusio__quarkus | devtools/cli/src/main/java/io/quarkus/cli/plugin/CliPluginsAdd.java | {
"start": 358,
"end": 3877
} | class ____ extends CliPluginsBase implements Callable<Integer> {
@CommandLine.Mixin
RunModeOption runMode;
@CommandLine.Option(names = { "-d",
"--description" }, paramLabel = "Plugin description", order = 5, description = "The plugin description")
Optional<String> description;
@CommandLine.Parameters(arity = "1", paramLabel = "PLUGIN_NAME", description = " The plugin name or location (e.g. url, path or maven coordinates in GACTV form)")
String nameOrLocation;
@Override
public Integer call() {
try {
output.debug("Add plugin with initial parameters: %s", this);
output.throwIfUnmatchedArguments(spec.commandLine());
if (runMode.isDryRun()) {
dryRunAdd(spec.commandLine().getHelp());
return CommandLine.ExitCode.OK;
}
return addPlugin();
} catch (Exception e) {
return output.handleCommandException(e,
"Unable to add plugin(s): " + nameOrLocation + " of type: " + type.map(PluginType::name).orElse("<any>")
+ "."
+ e.getMessage());
}
}
Integer addPlugin() throws IOException {
PluginManager pluginManager = pluginManager();
String name = pluginManager.getUtil().getName(nameOrLocation);
Optional<Plugin> existingPlugin = Optional.ofNullable(pluginManager.getInstalledPlugins().get(name));
Optional<Plugin> addedPlugin = pluginManager.addPlugin(nameOrLocation, catalogOptions.user, description);
return addedPlugin.map(plugin -> {
PluginListTable table = new PluginListTable(List.of(new PluginListItem(true, plugin)), false);
output.info("Added plugin:");
output.info(table.getContent());
if (plugin.isInProjectCatalog() && existingPlugin.filter(p -> p.isInUserCatalog()).isPresent()) {
output.warn(
"Plugin was added in the project scope, but another with the same name exists in the user scope!\nThe project scoped one will take precedence when invoked from within the project!");
}
if (plugin.isInUserCatalog() && existingPlugin.filter(p -> p.isInProjectCatalog()).isPresent()) {
output.warn(
"Plugin was added in the user scope, but another with the same name exists in the project scope!\nThe project scoped one will take precedence when invoked from within the project!");
}
return CommandLine.ExitCode.OK;
}).orElseGet(() -> {
output.error("No plugin available at: " + this.nameOrLocation);
printHints(true);
return CommandLine.ExitCode.USAGE;
});
}
private void printHints(boolean pluginListHint) {
if (runMode.isBatchMode())
return;
if (pluginListHint) {
output.info("To see the list of installable plugins, use the 'plugin list' subcommand.");
}
}
void dryRunAdd(CommandLine.Help help) {
output.printText(new String[] {
"\nAdd plugin to the CLI\n",
"\t" + projectRoot().toString()
});
Map<String, String> dryRunOutput = new TreeMap<>();
dryRunOutput.put("Name or Location", nameOrLocation);
type.ifPresent(t -> dryRunOutput.put("Type", t.name()));
output.info(help.createTextTable(dryRunOutput).toString());
};
}
| CliPluginsAdd |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/requestcontext/RequestContextAlwaysActiveTest.java | {
"start": 571,
"end": 1367
} | class ____ {
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> {
root.addClasses(Endpoint.class, WSClient.class);
})
.overrideConfigKey("quarkus.websockets-next.server.activate-request-context", "always");
@Inject
Vertx vertx;
@TestHTTPResource("end")
URI endUri;
@Test
void testRequestContext() throws InterruptedException {
try (WSClient client = WSClient.create(vertx).connect(endUri)) {
client.sendAndAwait("ping");
client.waitForMessages(1);
assertEquals("pong:true", client.getLastMessage().toString());
}
}
@WebSocket(path = "/end")
public static | RequestContextAlwaysActiveTest |
java | apache__maven | compat/maven-toolchain-builder/src/test/java/org/apache/maven/toolchain/building/DefaultToolchainsBuilderTest.java | {
"start": 2186,
"end": 13996
} | class ____ {
private static final String LS = System.lineSeparator();
@Spy
private DefaultToolchainsReader toolchainsReader;
@Spy
private DefaultToolchainsWriter toolchainsWriter;
@InjectMocks
private DefaultToolchainsBuilder toolchainBuilder;
@BeforeEach
void onSetup() {
// MockitoAnnotations.openMocks(this);
Map<String, String> envVarMap = new HashMap<>();
envVarMap.put("testKey", "testValue");
envVarMap.put("testSpecialCharactersKey", "<test&Value>");
OperatingSystemUtils.setEnvVarSource(new TestEnvVarSource(envVarMap));
}
@Test
void testBuildEmptyRequest() throws Exception {
ToolchainsBuildingRequest request = new DefaultToolchainsBuildingRequest();
ToolchainsBuildingResult result = toolchainBuilder.build(request);
assertNotNull(result.getEffectiveToolchains());
assertNotNull(result.getProblems());
assertEquals(0, result.getProblems().size());
}
@Test
void testBuildRequestWithUserToolchains() throws Exception {
Properties props = new Properties();
props.put("key", "user_value");
ToolchainModel toolchain = new ToolchainModel();
toolchain.setType("TYPE");
toolchain.setProvides(props);
PersistedToolchains persistedToolchains = new PersistedToolchains();
persistedToolchains.setToolchains(Collections.singletonList(toolchain));
String xml = new DefaultToolchainsXmlFactory().toXmlString(persistedToolchains.getDelegate());
ToolchainsBuildingRequest request = new DefaultToolchainsBuildingRequest();
request.setUserToolchainsSource(new StringSource(xml));
ToolchainsBuildingResult result = toolchainBuilder.build(request);
assertNotNull(result.getEffectiveToolchains());
assertEquals(1, result.getEffectiveToolchains().getToolchains().size());
assertEquals(
"TYPE", result.getEffectiveToolchains().getToolchains().get(0).getType());
assertEquals(
"user_value",
result.getEffectiveToolchains()
.getToolchains()
.get(0)
.getProvides()
.get("key"));
assertNotNull(result.getProblems());
assertEquals(0, result.getProblems().size());
}
@Test
void testBuildRequestWithGlobalToolchains() throws Exception {
Properties props = new Properties();
props.put("key", "global_value");
ToolchainModel toolchain = new ToolchainModel();
toolchain.setType("TYPE");
toolchain.setProvides(props);
PersistedToolchains persistedToolchains = new PersistedToolchains();
persistedToolchains.setToolchains(Collections.singletonList(toolchain));
String xml = new DefaultToolchainsXmlFactory().toXmlString(persistedToolchains.getDelegate());
ToolchainsBuildingRequest request = new DefaultToolchainsBuildingRequest();
request.setGlobalToolchainsSource(new StringSource(xml));
ToolchainsBuildingResult result = toolchainBuilder.build(request);
assertNotNull(result.getEffectiveToolchains());
assertEquals(1, result.getEffectiveToolchains().getToolchains().size());
assertEquals(
"TYPE", result.getEffectiveToolchains().getToolchains().get(0).getType());
assertEquals(
"global_value",
result.getEffectiveToolchains()
.getToolchains()
.get(0)
.getProvides()
.get("key"));
assertNotNull(result.getProblems());
assertEquals(0, result.getProblems().size());
}
@Test
void testBuildRequestWithBothToolchains() throws Exception {
Properties props = new Properties();
props.put("key", "user_value");
ToolchainModel toolchain = new ToolchainModel();
toolchain.setType("TYPE");
toolchain.setProvides(props);
PersistedToolchains userResult = new PersistedToolchains();
userResult.setToolchains(Collections.singletonList(toolchain));
props = new Properties();
props.put("key", "global_value");
toolchain = new ToolchainModel();
toolchain.setType("TYPE");
toolchain.setProvides(props);
PersistedToolchains globalResult = new PersistedToolchains();
globalResult.setToolchains(Collections.singletonList(toolchain));
ToolchainsBuildingRequest request = new DefaultToolchainsBuildingRequest();
request.setUserToolchainsSource(
new StringSource(new DefaultToolchainsXmlFactory().toXmlString(userResult.getDelegate())));
request.setGlobalToolchainsSource(
new StringSource(new DefaultToolchainsXmlFactory().toXmlString(globalResult.getDelegate())));
ToolchainsBuildingResult result = toolchainBuilder.build(request);
assertNotNull(result.getEffectiveToolchains());
assertEquals(2, result.getEffectiveToolchains().getToolchains().size());
assertEquals(
"TYPE", result.getEffectiveToolchains().getToolchains().get(0).getType());
assertEquals(
"user_value",
result.getEffectiveToolchains()
.getToolchains()
.get(0)
.getProvides()
.get("key"));
assertEquals(
"TYPE", result.getEffectiveToolchains().getToolchains().get(1).getType());
assertEquals(
"global_value",
result.getEffectiveToolchains()
.getToolchains()
.get(1)
.getProvides()
.get("key"));
assertNotNull(result.getProblems());
assertEquals(0, result.getProblems().size());
}
@Test
void testStrictToolchainsParseException() throws Exception {
ToolchainsBuildingRequest request = new DefaultToolchainsBuildingRequest();
request.setGlobalToolchainsSource(new StringSource(""));
ToolchainsParseException parseException = new ToolchainsParseException("MESSAGE", 4, 2);
doThrow(parseException).when(toolchainsReader).read(any(InputStream.class), ArgumentMatchers.anyMap());
try {
toolchainBuilder.build(request);
} catch (ToolchainsBuildingException e) {
assertEquals(
"1 problem was encountered while building the effective toolchains" + LS
+ "[FATAL] Non-parseable toolchains (memory): MESSAGE @ line 4, column 2" + LS,
e.getMessage());
}
}
@Test
void testIOException() throws Exception {
Source src = mock(Source.class);
IOException ioException = new IOException("MESSAGE");
doThrow(ioException).when(src).getInputStream();
doReturn("LOCATION").when(src).getLocation();
ToolchainsBuildingRequest request = new DefaultToolchainsBuildingRequest();
request.setGlobalToolchainsSource(src);
try {
toolchainBuilder.build(request);
} catch (ToolchainsBuildingException e) {
assertEquals(
"1 problem was encountered while building the effective toolchains" + LS
+ "[FATAL] Non-readable toolchains LOCATION: MESSAGE" + LS,
e.getMessage());
}
}
@Test
void testEnvironmentVariablesAreInterpolated() throws Exception {
Properties props = new Properties();
props.put("key", "${env.testKey}");
Xpp3Dom configurationChild = new Xpp3Dom("jdkHome");
configurationChild.setValue("${env.testKey}");
Xpp3Dom configuration = new Xpp3Dom("configuration");
configuration.addChild(configurationChild);
ToolchainModel toolchain = new ToolchainModel();
toolchain.setType("TYPE");
toolchain.setProvides(props);
toolchain.setConfiguration(configuration);
PersistedToolchains persistedToolchains = new PersistedToolchains();
persistedToolchains.setToolchains(Collections.singletonList(toolchain));
String xml = new DefaultToolchainsXmlFactory().toXmlString(persistedToolchains.getDelegate());
ToolchainsBuildingRequest request = new DefaultToolchainsBuildingRequest();
request.setUserToolchainsSource(new StringSource(xml));
ToolchainsBuildingResult result = toolchainBuilder.build(request);
String interpolatedValue = "testValue";
assertEquals(
interpolatedValue,
result.getEffectiveToolchains()
.getToolchains()
.get(0)
.getProvides()
.get("key"));
org.codehaus.plexus.util.xml.Xpp3Dom toolchainConfiguration = (org.codehaus.plexus.util.xml.Xpp3Dom)
result.getEffectiveToolchains().getToolchains().get(0).getConfiguration();
assertEquals(
interpolatedValue, toolchainConfiguration.getChild("jdkHome").getValue());
assertNotNull(result.getProblems());
assertEquals(0, result.getProblems().size());
}
@Test
void testNonExistingEnvironmentVariablesAreNotInterpolated() throws Exception {
Properties props = new Properties();
props.put("key", "${env.testNonExistingKey}");
ToolchainModel toolchain = new ToolchainModel();
toolchain.setType("TYPE");
toolchain.setProvides(props);
PersistedToolchains persistedToolchains = new PersistedToolchains();
persistedToolchains.setToolchains(Collections.singletonList(toolchain));
String xml = new DefaultToolchainsXmlFactory().toXmlString(persistedToolchains.getDelegate());
ToolchainsBuildingRequest request = new DefaultToolchainsBuildingRequest();
request.setUserToolchainsSource(new StringSource(xml));
ToolchainsBuildingResult result = toolchainBuilder.build(request);
assertEquals(
"${env.testNonExistingKey}",
result.getEffectiveToolchains()
.getToolchains()
.get(0)
.getProvides()
.get("key"));
assertNotNull(result.getProblems());
assertEquals(0, result.getProblems().size());
}
@Test
void testEnvironmentVariablesWithSpecialCharactersAreInterpolated() throws Exception {
Properties props = new Properties();
props.put("key", "${env.testSpecialCharactersKey}");
ToolchainModel toolchain = new ToolchainModel();
toolchain.setType("TYPE");
toolchain.setProvides(props);
PersistedToolchains persistedToolchains = new PersistedToolchains();
persistedToolchains.setToolchains(Collections.singletonList(toolchain));
String xml = new DefaultToolchainsXmlFactory().toXmlString(persistedToolchains.getDelegate());
ToolchainsBuildingRequest request = new DefaultToolchainsBuildingRequest();
request.setUserToolchainsSource(new StringSource(xml));
ToolchainsBuildingResult result = toolchainBuilder.build(request);
String interpolatedValue = "<test&Value>";
assertEquals(
interpolatedValue,
result.getEffectiveToolchains()
.getToolchains()
.get(0)
.getProvides()
.get("key"));
assertNotNull(result.getProblems());
assertEquals(0, result.getProblems().size());
}
static | DefaultToolchainsBuilderTest |
java | elastic__elasticsearch | x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryIntegrityVerifier.java | {
"start": 11227,
"end": 16829
} | class ____ {
private final SnapshotId snapshotId;
SnapshotVerifier(SnapshotId snapshotId) {
this.snapshotId = snapshotId;
}
void run(ActionListener<Void> listener) {
if (isCancelledSupplier.getAsBoolean()) {
// getSnapshotInfo does its own forking, so we must check for cancellation here
listener.onResponse(null);
return;
}
blobStoreRepository.getSnapshotInfo(snapshotId, new ActionListener<>() {
@Override
public void onResponse(SnapshotInfo snapshotInfo) {
verifySnapshotInfo(snapshotInfo, listener);
}
@Override
public void onFailure(Exception e) {
unreadableSnapshotInfoUuids.add(snapshotId.getUUID());
anomaly("failed to load snapshot info").snapshotId(snapshotId).exception(e).write(listener);
}
});
}
void verifySnapshotInfo(SnapshotInfo snapshotInfo, ActionListener<Void> listener) {
final var chunkBuilder = new RepositoryVerifyIntegrityResponseChunk.Builder(
responseChunkWriter,
RepositoryVerifyIntegrityResponseChunk.Type.SNAPSHOT_INFO,
currentTimeMillisSupplier.getAsLong()
).snapshotInfo(snapshotInfo);
// record the SnapshotInfo in the response
final var chunkWrittenStep = SubscribableListener.newForked(chunkBuilder::write);
if (failedShardSnapshotsCount.get() < requestParams.maxFailedShardSnapshots()) {
for (final var shardFailure : snapshotInfo.shardFailures()) {
if (failedShardSnapshotsCount.getAndIncrement() < requestParams.maxFailedShardSnapshots()) {
failedShardSnapshotDescriptions.add(
getShardSnapshotDescription(snapshotId, shardFailure.index(), shardFailure.shardId())
);
}
}
} else {
failedShardSnapshotsCount.addAndGet(snapshotInfo.shardFailures().size());
}
// check the indices in the SnapshotInfo match those in RepositoryData
final var snapshotContentsOkStep = chunkWrittenStep.<Void>andThen(l -> {
if (Set.copyOf(snapshotInfo.indices()).equals(indexNamesBySnapshotName.get(snapshotId.getName()))) {
l.onResponse(null);
} else {
anomaly("snapshot contents mismatch").snapshotId(snapshotId).write(l);
}
});
// check the global metadata is readable if present
final var globalMetadataOkStep = Boolean.TRUE.equals(snapshotInfo.includeGlobalState())
? snapshotContentsOkStep.<Void>andThen(this::verifySnapshotGlobalMetadata)
: snapshotContentsOkStep;
globalMetadataOkStep.addListener(listener);
}
private void verifySnapshotGlobalMetadata(ActionListener<Void> listener) {
metadataTaskRunner.run(ActionRunnable.wrap(listener, l -> {
try {
blobStoreRepository.getSnapshotGlobalMetadata(snapshotId, false);
// no checks here, loading it is enough
l.onResponse(null);
} catch (Exception e) {
anomaly("failed to load global metadata").snapshotId(snapshotId).exception(e).write(l);
}
}));
}
}
}
private void checkFailedShardSnapshotCount(ActionListener<Void> listener) {
if (failedShardSnapshotDescriptions.size() < failedShardSnapshotsCount.get()) {
listener.onFailure(
new RepositoryVerificationException(
repositoryName,
Strings.format(
"""
Cannot verify the integrity of all index snapshots because this repository contains too many shard snapshot \
failures: there are [%d] shard snapshot failures but [?%s] is set to [%d]. \
Please increase this limit if it is safe to do so.""",
failedShardSnapshotsCount.get(),
RepositoryVerifyIntegrityParams.MAX_FAILED_SHARD_SNAPSHOTS,
requestParams.maxFailedShardSnapshots()
)
)
);
} else {
listener.onResponse(null);
}
}
private void verifyIndices(ActionListener<Void> listener) {
var listeners = new RefCountingListener(listener);
runThrottled(
Iterators.failFast(
repositoryData.getIndices().values().iterator(),
() -> isCancelledSupplier.getAsBoolean() || listeners.isFailing()
),
(releasable, indexId) -> new IndexVerifier(indexId).run(ActionListener.releaseAfter(listeners.acquire(), releasable)),
requestParams.indexVerificationConcurrency(),
indexProgress,
listeners
);
}
/**
* Verifies the integrity of the snapshots of a specific index
*/
private | SnapshotVerifier |
java | elastic__elasticsearch | modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistInstanceBinding.java | {
"start": 961,
"end": 1236
} | class ____ {
/** Information about where this constructor was whitelisted from. */
public final String origin;
/** The Java instance this instance binding targets. */
public final Object targetInstance;
/** The method name for this | WhitelistInstanceBinding |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/runtime/DoubleScriptFieldTermQuery.java | {
"start": 638,
"end": 1801
} | class ____ extends AbstractDoubleScriptFieldQuery {
private final double term;
public DoubleScriptFieldTermQuery(Script script, DoubleFieldScript.LeafFactory leafFactory, String fieldName, double term) {
super(script, leafFactory, fieldName);
this.term = term;
}
@Override
protected boolean matches(double[] values, int count) {
for (int i = 0; i < count; i++) {
if (term == values[i]) {
return true;
}
}
return false;
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return Double.toString(term);
}
return fieldName() + ":" + term;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), term);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
DoubleScriptFieldTermQuery other = (DoubleScriptFieldTermQuery) obj;
return term == other.term;
}
double term() {
return term;
}
}
| DoubleScriptFieldTermQuery |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequest.java | {
"start": 992,
"end": 6267
} | class ____ extends LegacyActionRequest implements UserRequest, WriteRequest<PutUserRequest> {
private String username;
private String[] roles;
private String fullName;
private String email;
private Map<String, Object> metadata;
private char[] passwordHash;
private boolean enabled = true;
private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE;
public PutUserRequest(StreamInput in) throws IOException {
super(in);
username = in.readString();
passwordHash = readCharArrayFromStream(in);
roles = in.readStringArray();
fullName = in.readOptionalString();
email = in.readOptionalString();
metadata = in.readBoolean() ? in.readGenericMap() : null;
refreshPolicy = RefreshPolicy.readFrom(in);
enabled = in.readBoolean();
}
public PutUserRequest() {}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (username == null) {
validationException = addValidationError("user is missing", validationException);
}
if (roles == null) {
validationException = addValidationError("roles are missing", validationException);
}
if (metadata != null && metadata.keySet().stream().anyMatch(s -> s.startsWith("_"))) {
validationException = addValidationError("metadata keys may not start with [_]", validationException);
}
// we do not check for a password hash here since it is possible that the user exists and we don't want to update the password
return validationException;
}
public void username(String username) {
this.username = username;
}
public void roles(String... roles) {
this.roles = roles;
}
public void fullName(String fullName) {
this.fullName = fullName;
}
public void email(String email) {
this.email = email;
}
public void metadata(Map<String, Object> metadata) {
this.metadata = metadata;
}
public void passwordHash(@Nullable char[] passwordHash) {
this.passwordHash = passwordHash;
}
public void enabled(boolean enabled) {
this.enabled = enabled;
}
/**
* Should this request trigger a refresh ({@linkplain RefreshPolicy#IMMEDIATE}, the default), wait for a refresh (
* {@linkplain RefreshPolicy#WAIT_UNTIL}), or proceed ignore refreshes entirely ({@linkplain RefreshPolicy#NONE}).
*/
@Override
public RefreshPolicy getRefreshPolicy() {
return refreshPolicy;
}
@Override
public PutUserRequest setRefreshPolicy(RefreshPolicy refreshPolicy) {
this.refreshPolicy = refreshPolicy;
return this;
}
public String username() {
return username;
}
public String[] roles() {
return roles;
}
public String fullName() {
return fullName;
}
public String email() {
return email;
}
public Map<String, Object> metadata() {
return metadata;
}
@Nullable
public char[] passwordHash() {
return passwordHash;
}
public boolean enabled() {
return enabled;
}
@Override
public String[] usernames() {
return new String[] { username };
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(username);
writeCharArrayToStream(out, passwordHash);
out.writeStringArray(roles);
out.writeOptionalString(fullName);
out.writeOptionalString(email);
if (metadata == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeGenericMap(metadata);
}
refreshPolicy.writeTo(out);
out.writeBoolean(enabled);
}
private static char[] readCharArrayFromStream(StreamInput in) throws IOException {
BytesReference charBytesRef = in.readSlicedBytesReference();
if (charBytesRef == BytesArray.EMPTY) {
return null;
} else {
return CharArrays.utf8BytesToChars(BytesReference.toBytes(charBytesRef));
}
}
private static void writeCharArrayToStream(StreamOutput out, char[] chars) throws IOException {
final BytesReference charBytesRef;
if (chars == null) {
charBytesRef = null;
} else {
charBytesRef = new BytesArray(CharArrays.toUtf8Bytes(chars));
}
out.writeBytesReference(charBytesRef);
}
@Override
public String toString() {
return "PutUserRequest{"
+ "username='"
+ username
+ '\''
+ ", roles="
+ Arrays.toString(roles)
+ ", fullName='"
+ fullName
+ '\''
+ ", email='"
+ email
+ '\''
+ ", metadata="
+ metadata
+ ", passwordHash="
+ (passwordHash == null ? "<null>" : "<not-null>")
+ ", enabled="
+ enabled
+ ", refreshPolicy="
+ refreshPolicy
+ '}';
}
}
| PutUserRequest |
java | apache__kafka | connect/runtime/src/test/java/org/apache/kafka/connect/integration/ConnectorValidationIntegrationTest.java | {
"start": 21554,
"end": 23030
} | class ____ throws an exception on instantiation is specified",
0
);
}
@Test
public void testConnectorHasMisconfiguredHeaderConverter() throws InterruptedException {
Map<String, String> config = defaultSinkConnectorProps();
config.put(HEADER_CONVERTER_CLASS_CONFIG, TestConverterWithSinglePropertyConfigDef.class.getName());
config.put(HEADER_CONVERTER_CLASS_CONFIG + "." + TestConverterWithSinglePropertyConfigDef.BOOLEAN_PROPERTY_NAME, "notaboolean");
connect.assertions().assertExactlyNumErrorsOnConnectorConfigValidation(
config.get(CONNECTOR_CLASS_CONFIG),
config,
1,
"Connector config should fail preflight validation when a header converter fails custom validation",
0
);
}
@Test
public void testConnectorHasHeaderConverterWithNoConfigDef() throws InterruptedException {
Map<String, String> config = defaultSinkConnectorProps();
config.put(HEADER_CONVERTER_CLASS_CONFIG, TestConverterWithNoConfigDef.class.getName());
connect.assertions().assertExactlyNumErrorsOnConnectorConfigValidation(
config.get(CONNECTOR_CLASS_CONFIG),
config,
0,
"Connector config should not fail preflight validation even when a header converter provides a null ConfigDef",
0
);
}
public abstract static | that |
java | redisson__redisson | redisson-hibernate/redisson-hibernate-4/src/main/java/org/redisson/hibernate/RedissonRegionNativeFactory.java | {
"start": 994,
"end": 2398
} | class ____ extends RedissonRegionFactory {
private static final long serialVersionUID = 4889779229712681692L;
@Override
public void start(Settings settings, Properties properties) throws CacheException {
Set<Map.Entry<Object, Object>> entrySet = properties.entrySet();
for (Map.Entry<Object, Object> entry : entrySet) {
if (entry.getKey().toString().endsWith(RedissonRegionFactory.MAX_ENTRIES_SUFFIX)) {
Integer value = Integer.valueOf(entry.getValue().toString());
if (value > 0) {
throw new IllegalArgumentException(".eviction.max_entries setting can't be non-zero");
}
}
if (entry.getKey().toString().endsWith(RedissonRegionFactory.MAX_IDLE_SUFFIX)) {
Integer value = Integer.valueOf(entry.getValue().toString());
if (value > 0) {
throw new IllegalArgumentException(".expiration.max_idle_time setting can't be non-zero");
}
}
}
super.start(settings, properties);
}
@Override
protected RMapCache<Object, Object> getCache(String regionName, Properties properties, String defaultKey) {
RMapCacheNative<Object, Object> cache = redisson.getMapCacheNative(regionName);
return new MapCacheNativeWrapper<>(cache);
}
}
| RedissonRegionNativeFactory |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/filter/IgnoredTypesTest.java | {
"start": 915,
"end": 1081
} | class ____ {
public String name;
public Person() { }
public Person(String name) {
this.name = name;
}
}
static | Person |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GridFsEndpointBuilderFactory.java | {
"start": 20423,
"end": 21069
} | class ____
* calling the WriteConcern#valueOf(String) method.
*
* The option is a: <code>com.mongodb.WriteConcern</code> type.
*
* Group: common
*
* @param writeConcern the value to set
* @return the dsl builder
*/
default GridFsEndpointProducerBuilder writeConcern(com.mongodb.WriteConcern writeConcern) {
doSetProperty("writeConcern", writeConcern);
return this;
}
/**
* Set the WriteConcern for write operations on MongoDB using the
* standard ones. Resolved from the fields of the WriteConcern | by |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/docker/PullImageUpdateEvent.java | {
"start": 888,
"end": 1123
} | class ____ extends ImageProgressUpdateEvent {
@JsonCreator
public PullImageUpdateEvent(String id, String status, ProgressDetail progressDetail, String progress) {
super(id, status, progressDetail, progress);
}
}
| PullImageUpdateEvent |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/ignoreunmapped/UserEntity.java | {
"start": 253,
"end": 803
} | class ____ {
private String username;
private String email;
private String password;
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
}
| UserEntity |
java | apache__camel | core/camel-core-languages/src/main/java/org/apache/camel/language/simple/ast/SimpleFunctionExpression.java | {
"start": 1939,
"end": 106480
} | class ____ extends LiteralExpression {
// use caches to avoid re-parsing the same expressions over and over again
private final Map<String, Expression> cacheExpression;
private final boolean skipFileFunctions;
public SimpleFunctionExpression(SimpleToken token, Map<String, Expression> cacheExpression, boolean skipFileFunctions) {
super(token);
this.cacheExpression = cacheExpression;
this.skipFileFunctions = skipFileFunctions;
}
/**
* Creates a Camel {@link Expression} based on this model.
*
* @param expression not in use
*/
@Override
public Expression createExpression(CamelContext camelContext, String expression) {
String function = text.toString();
Expression answer = cacheExpression != null ? cacheExpression.get(function) : null;
if (answer == null) {
answer = createSimpleExpression(camelContext, function, true);
if (answer != null) {
answer.init(camelContext);
}
if (cacheExpression != null && answer != null) {
cacheExpression.put(function, answer);
}
}
return answer;
}
private Expression createSimpleExpression(CamelContext camelContext, String function, boolean strict) {
// return the function directly if we can create function without analyzing the prefix
Expression answer = createSimpleExpressionDirectly(camelContext, function);
if (answer != null) {
return answer;
}
// message first
answer = createSimpleExpressionMessage(camelContext, function, strict);
if (answer != null) {
return answer;
}
// body and headers first
answer = createSimpleExpressionBodyOrHeader(function, strict);
if (answer != null) {
return answer;
}
// variables
answer = createSimpleExpressionVariables(function, strict);
if (answer != null) {
return answer;
}
// custom languages
answer = createSimpleCustomLanguage(function, strict);
if (answer != null) {
return answer;
}
// camelContext OGNL
String remainder = ifStartsWithReturnRemainder("camelContext", function);
if (remainder != null) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${camelContext.OGNL} was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.camelContextOgnlExpression(remainder);
}
// Exception OGNL
remainder = ifStartsWithReturnRemainder("exception", function);
if (remainder != null) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${exception.OGNL} was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.exchangeExceptionOgnlExpression(remainder);
}
// exchange property
remainder = ifStartsWithReturnRemainder("exchangeProperty", function);
if (remainder != null) {
// remove leading character (dot, colon or ?)
if (remainder.startsWith(".") || remainder.startsWith(":") || remainder.startsWith("?")) {
remainder = remainder.substring(1);
}
// remove starting and ending brackets
if (remainder.startsWith("[") && remainder.endsWith("]")) {
remainder = remainder.substring(1, remainder.length() - 1);
}
// validate syntax
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${exchangeProperty.OGNL} was: " + function, token.getIndex());
}
if (OgnlHelper.isValidOgnlExpression(remainder)) {
// ognl based property
return SimpleExpressionBuilder.propertyOgnlExpression(remainder);
} else {
// regular property
return ExpressionBuilder.exchangePropertyExpression(remainder);
}
}
// system property
remainder = ifStartsWithReturnRemainder("sys.", function);
if (remainder != null) {
return ExpressionBuilder.systemPropertyExpression(remainder);
}
remainder = ifStartsWithReturnRemainder("sysenv.", function);
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("sysenv:", function);
}
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("env.", function);
}
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("env:", function);
}
if (remainder != null) {
return ExpressionBuilder.systemEnvironmentExpression(remainder);
}
// exchange OGNL
remainder = ifStartsWithReturnRemainder("exchange", function);
if (remainder != null) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${exchange.OGNL} was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.exchangeOgnlExpression(remainder);
}
// pretty
remainder = ifStartsWithReturnRemainder("pretty(", function);
if (remainder != null) {
String exp = StringHelper.beforeLast(remainder, ")");
if (exp == null) {
throw new SimpleParserException("Valid syntax: ${pretty(exp)} was: " + function, token.getIndex());
}
exp = StringHelper.removeLeadingAndEndingQuotes(exp);
Expression inlined = camelContext.resolveLanguage("simple").createExpression(exp);
return ExpressionBuilder.prettyExpression(inlined);
}
// file: prefix
remainder = ifStartsWithReturnRemainder("file:", function);
if (remainder != null) {
Expression fileExpression;
if (skipFileFunctions) {
// do not create file expressions but keep the function as-is as a constant value
fileExpression = ExpressionBuilder.constantExpression("${" + function + "}");
} else {
fileExpression = createSimpleFileExpression(remainder, strict);
}
if (fileExpression != null) {
return fileExpression;
}
}
// date: prefix
remainder = ifStartsWithReturnRemainder("date:", function);
if (remainder != null) {
String[] parts = remainder.split(":", 2);
if (parts.length == 1) {
return SimpleExpressionBuilder.dateExpression(parts[0]);
} else if (parts.length == 2) {
return SimpleExpressionBuilder.dateExpression(parts[0], parts[1]);
}
}
// date-with-timezone: prefix
remainder = ifStartsWithReturnRemainder("date-with-timezone:", function);
if (remainder != null) {
String[] parts = remainder.split(":", 3);
if (parts.length < 3) {
throw new SimpleParserException(
"Valid syntax: ${date-with-timezone:command:timezone:pattern} was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.dateExpression(parts[0], parts[1], parts[2]);
}
// bean: prefix
remainder = ifStartsWithReturnRemainder("bean:", function);
if (remainder != null) {
Language bean = camelContext.resolveLanguage("bean");
String ref = remainder;
Object method = null;
Object scope = null;
// we support different syntax for bean function
if (remainder.contains("?method=") || remainder.contains("?scope=")) {
ref = StringHelper.before(remainder, "?");
String query = StringHelper.after(remainder, "?");
try {
Map<String, Object> map = URISupport.parseQuery(query);
method = map.get("method");
scope = map.get("scope");
} catch (URISyntaxException e) {
throw RuntimeCamelException.wrapRuntimeException(e);
}
} else {
//first check case :: because of my.own.Bean::method
int doubleColonIndex = remainder.indexOf("::");
//need to check that not inside params
int beginOfParameterDeclaration = remainder.indexOf('(');
if (doubleColonIndex > 0 && (!remainder.contains("(") || doubleColonIndex < beginOfParameterDeclaration)) {
ref = remainder.substring(0, doubleColonIndex);
method = remainder.substring(doubleColonIndex + 2);
} else {
int idx = remainder.indexOf('.');
if (idx > 0) {
ref = remainder.substring(0, idx);
method = remainder.substring(idx + 1);
}
}
}
Class<?> type = null;
if (ref != null && ref.startsWith("type:")) {
try {
type = camelContext.getClassResolver().resolveMandatoryClass(ref.substring(5));
ref = null;
} catch (ClassNotFoundException e) {
throw RuntimeCamelException.wrapRuntimeException(e);
}
}
// there are parameters then map them into properties
Object[] properties = new Object[7];
properties[3] = type;
properties[4] = ref;
properties[2] = method;
properties[5] = scope;
return bean.createExpression(null, properties);
}
// properties-exist: prefix
remainder = ifStartsWithReturnRemainder("propertiesExist:", function);
if (remainder != null) {
String[] parts = remainder.split(":", 2);
if (parts.length > 2) {
throw new SimpleParserException("Valid syntax: ${propertiesExist:key} was: " + function, token.getIndex());
}
String key = parts[0];
boolean negate = key != null && key.startsWith("!");
if (negate) {
key = key.substring(1);
}
return ExpressionBuilder.propertiesComponentExist(key, negate);
}
// properties: prefix
remainder = ifStartsWithReturnRemainder("properties:", function);
if (remainder != null) {
String[] parts = remainder.split(":", 2);
if (parts.length > 2) {
throw new SimpleParserException("Valid syntax: ${properties:key[:default]} was: " + function, token.getIndex());
}
String defaultValue = null;
if (parts.length >= 2) {
defaultValue = parts[1];
}
String key = parts[0];
return ExpressionBuilder.propertiesComponentExpression(key, defaultValue);
}
// ref: prefix
remainder = ifStartsWithReturnRemainder("ref:", function);
if (remainder != null) {
return ExpressionBuilder.refExpression(remainder);
}
// type: prefix
remainder = ifStartsWithReturnRemainder("type:", function);
if (remainder != null) {
Expression exp = SimpleExpressionBuilder.typeExpression(remainder);
exp.init(camelContext);
// we want to cache this expression, so we won't re-evaluate it as the type/constant won't change
return SimpleExpressionBuilder.cacheExpression(exp);
}
// miscellaneous functions
Expression misc = createSimpleExpressionMisc(function);
if (misc != null) {
return misc;
}
// attachments
if ("attachments".equals(function) || ifStartsWithReturnRemainder("attachment", function) != null) {
Expression exp = createSimpleAttachments(camelContext, function);
if (exp != null) {
return exp;
}
}
if (strict) {
throw new SimpleParserException("Unknown function: " + function, token.getIndex());
} else {
return null;
}
}
private Expression createSimpleAttachments(CamelContext camelContext, String function) {
Optional<SimpleLanguageFunctionFactory> factory = ResolverHelper.resolveService(
camelContext,
camelContext.getCamelContextExtension().getBootstrapFactoryFinder(),
SimpleLanguageFunctionFactory.FACTORY + "/camel-attachments",
SimpleLanguageFunctionFactory.class);
if (factory.isEmpty()) {
throw new IllegalArgumentException(
"Cannot find SimpleLanguageFunctionFactory on classpath. Add camel-attachments to classpath.");
}
return factory.get().createFunction(camelContext, function, token.getIndex());
}
private Expression createSimpleExpressionMessage(CamelContext camelContext, String function, boolean strict) {
// messageAs
String remainder = ifStartsWithReturnRemainder("messageAs(", function);
if (remainder != null) {
String type = StringHelper.before(remainder, ")");
if (type == null) {
throw new SimpleParserException("Valid syntax: ${messageAs(type)} was: " + function, token.getIndex());
}
type = StringHelper.removeQuotes(type);
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isNotEmpty(remainder)) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${messageAs(type).OGNL} was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.messageOgnlExpression(type, remainder);
} else {
return ExpressionBuilder.messageExpression(type);
}
}
return null;
}
private Expression createSimpleExpressionBodyOrHeader(String function, boolean strict) {
// bodyAs
String remainder = ifStartsWithReturnRemainder("bodyAs(", function);
if (remainder != null) {
String type = StringHelper.before(remainder, ")");
if (type == null) {
throw new SimpleParserException("Valid syntax: ${bodyAs(type)} was: " + function, token.getIndex());
}
type = StringHelper.removeQuotes(type);
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isNotEmpty(remainder)) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${bodyAs(type).OGNL} was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.bodyOgnlExpression(type, remainder);
} else {
return ExpressionBuilder.bodyExpression(type);
}
}
// mandatoryBodyAs
remainder = ifStartsWithReturnRemainder("mandatoryBodyAs(", function);
if (remainder != null) {
String type = StringHelper.before(remainder, ")");
if (type == null) {
throw new SimpleParserException("Valid syntax: ${mandatoryBodyAs(type)} was: " + function, token.getIndex());
}
type = StringHelper.removeQuotes(type);
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isNotEmpty(remainder)) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException(
"Valid syntax: ${mandatoryBodyAs(type).OGNL} was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.mandatoryBodyOgnlExpression(type, remainder);
} else {
return SimpleExpressionBuilder.mandatoryBodyExpression(type);
}
}
// body OGNL
remainder = ifStartsWithReturnRemainder("body", function);
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("in.body", function);
}
if (remainder != null) {
// OGNL must start with a ".", "?" or "[".
boolean ognlStart = remainder.startsWith(".") || remainder.startsWith("?") || remainder.startsWith("[");
boolean invalid = !ognlStart || OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${body.OGNL} was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.bodyOgnlExpression(remainder);
}
// headerAs
remainder = ifStartsWithReturnRemainder("headerAs(", function);
if (remainder != null) {
String keyAndType = StringHelper.before(remainder, ")");
if (keyAndType == null) {
throw new SimpleParserException("Valid syntax: ${headerAs(key, type)} was: " + function, token.getIndex());
}
String key = StringHelper.before(keyAndType, ",");
String type = StringHelper.after(keyAndType, ",");
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isEmpty(key) || ObjectHelper.isEmpty(type) || ObjectHelper.isNotEmpty(remainder)) {
throw new SimpleParserException("Valid syntax: ${headerAs(key, type)} was: " + function, token.getIndex());
}
key = StringHelper.removeQuotes(key);
type = StringHelper.removeQuotes(type);
return ExpressionBuilder.headerExpression(key, type);
}
// headers function
if ("in.headers".equals(function) || "headers".equals(function)) {
return ExpressionBuilder.headersExpression();
} else if ("headers.size".equals(function) || "headers.size()".equals(function)
|| "headers.length".equals(function) || "headers.length()".equals(function)) {
return ExpressionBuilder.headersSizeExpression();
}
// in header function
remainder = parseInHeader(function);
if (remainder != null) {
// remove leading character (dot, colon or ?)
if (remainder.startsWith(".") || remainder.startsWith(":") || remainder.startsWith("?")) {
remainder = remainder.substring(1);
}
// remove starting and ending brackets
if (remainder.startsWith("[") && remainder.endsWith("]")) {
remainder = remainder.substring(1, remainder.length() - 1);
}
// remove quotes from key
String key = StringHelper.removeLeadingAndEndingQuotes(remainder);
// validate syntax
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(key);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${header.name[key]} was: " + function, token.getIndex());
}
if (OgnlHelper.isValidOgnlExpression(key)) {
// ognl based header
return SimpleExpressionBuilder.headersOgnlExpression(key);
} else {
// regular header
return ExpressionBuilder.headerExpression(key);
}
}
return null;
}
private Expression createSimpleExpressionVariables(String function, boolean strict) {
// variableAs
String remainder = ifStartsWithReturnRemainder("variableAs(", function);
if (remainder != null) {
String keyAndType = StringHelper.before(remainder, ")");
if (keyAndType == null) {
throw new SimpleParserException("Valid syntax: ${variableAs(key, type)} was: " + function, token.getIndex());
}
String key = StringHelper.before(keyAndType, ",");
String type = StringHelper.after(keyAndType, ",");
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isEmpty(key) || ObjectHelper.isEmpty(type) || ObjectHelper.isNotEmpty(remainder)) {
throw new SimpleParserException("Valid syntax: ${variableAs(key, type)} was: " + function, token.getIndex());
}
key = StringHelper.removeQuotes(key);
type = StringHelper.removeQuotes(type);
return ExpressionBuilder.variableExpression(key, type);
}
// variables function
if ("variables".equals(function)) {
return ExpressionBuilder.variablesExpression();
} else if ("variables.size".equals(function) || "variables.size()".equals(function)
|| "variables.length".equals(function) || "variables.length()".equals(function)) {
return ExpressionBuilder.variablesSizeExpression();
}
// variable function
remainder = parseVariable(function);
if (remainder != null) {
// remove leading character (dot, colon or ?)
if (remainder.startsWith(".") || remainder.startsWith(":") || remainder.startsWith("?")) {
remainder = remainder.substring(1);
}
// remove starting and ending brackets
if (remainder.startsWith("[") && remainder.endsWith("]")) {
remainder = remainder.substring(1, remainder.length() - 1);
}
// remove quotes from key
String key = StringHelper.removeLeadingAndEndingQuotes(remainder);
// validate syntax
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(key);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${variable.name[key]} was: " + function, token.getIndex());
}
if (OgnlHelper.isValidOgnlExpression(key)) {
// ognl based variable
return SimpleExpressionBuilder.variablesOgnlExpression(key);
} else {
// regular variable
return ExpressionBuilder.variableExpression(key);
}
}
return null;
}
private Expression createSimpleCustomLanguage(String function, boolean strict) {
// jq
String remainder = ifStartsWithReturnRemainder("jq(", function);
if (remainder != null) {
String exp = StringHelper.beforeLast(remainder, ")");
if (exp == null) {
throw new SimpleParserException("Valid syntax: ${jq(exp)} was: " + function, token.getIndex());
}
exp = StringHelper.removeLeadingAndEndingQuotes(exp);
if (exp.startsWith("header:") || exp.startsWith("property:") || exp.startsWith("exchangeProperty:")
|| exp.startsWith("variable:")) {
String input = StringHelper.before(exp, ",");
exp = StringHelper.after(exp, ",");
if (input != null) {
input = input.trim();
}
if (exp != null) {
exp = exp.trim();
}
return ExpressionBuilder.singleInputLanguageExpression("jq", exp, input);
}
return ExpressionBuilder.languageExpression("jq", exp);
}
// jsonpath
remainder = ifStartsWithReturnRemainder("jsonpath(", function);
if (remainder != null) {
String exp = StringHelper.beforeLast(remainder, ")");
if (exp == null) {
throw new SimpleParserException("Valid syntax: ${jsonpath(exp)} was: " + function, token.getIndex());
}
exp = StringHelper.removeLeadingAndEndingQuotes(exp);
if (exp.startsWith("header:") || exp.startsWith("property:") || exp.startsWith("exchangeProperty:")
|| exp.startsWith("variable:")) {
String input = StringHelper.before(exp, ",");
exp = StringHelper.after(exp, ",");
if (input != null) {
input = input.trim();
}
if (exp != null) {
exp = exp.trim();
}
return ExpressionBuilder.singleInputLanguageExpression("jsonpath", exp, input);
}
return ExpressionBuilder.languageExpression("jsonpath", exp);
}
remainder = ifStartsWithReturnRemainder("xpath(", function);
if (remainder != null) {
String exp = StringHelper.beforeLast(remainder, ")");
if (exp == null) {
throw new SimpleParserException("Valid syntax: ${xpath(exp)} was: " + function, token.getIndex());
}
exp = StringHelper.removeLeadingAndEndingQuotes(exp);
if (exp.startsWith("header:") || exp.startsWith("property:") || exp.startsWith("exchangeProperty:")
|| exp.startsWith("variable:")) {
String input = StringHelper.before(exp, ",");
exp = StringHelper.after(exp, ",");
if (input != null) {
input = input.trim();
}
if (exp != null) {
exp = exp.trim();
}
return ExpressionBuilder.singleInputLanguageExpression("xpath", exp, input);
}
return ExpressionBuilder.languageExpression("xpath", exp);
}
return null;
}
private Expression createSimpleExpressionDirectly(CamelContext camelContext, String expression) {
if (ObjectHelper.isEqualToAny(expression, "body", "in.body")) {
return ExpressionBuilder.bodyExpression();
} else if (ObjectHelper.equal(expression, "prettyBody")) {
return ExpressionBuilder.prettyBodyExpression();
} else if (ObjectHelper.equal(expression, "bodyOneLine")) {
return ExpressionBuilder.bodyOneLine();
} else if (ObjectHelper.equal(expression, "originalBody")) {
return ExpressionBuilder.originalBodyExpression();
} else if (ObjectHelper.equal(expression, "id")) {
return ExpressionBuilder.messageIdExpression();
} else if (ObjectHelper.equal(expression, "messageTimestamp")) {
return ExpressionBuilder.messageTimestampExpression();
} else if (ObjectHelper.equal(expression, "exchangeId")) {
return ExpressionBuilder.exchangeIdExpression();
} else if (ObjectHelper.equal(expression, "exchange")) {
return ExpressionBuilder.exchangeExpression();
} else if (ObjectHelper.equal(expression, "logExchange")) {
return ExpressionBuilder.logExchange();
} else if (ObjectHelper.equal(expression, "exception")) {
return ExpressionBuilder.exchangeExceptionExpression();
} else if (ObjectHelper.equal(expression, "exception.message")) {
return ExpressionBuilder.exchangeExceptionMessageExpression();
} else if (ObjectHelper.equal(expression, "exception.stacktrace")) {
return ExpressionBuilder.exchangeExceptionStackTraceExpression();
} else if (ObjectHelper.equal(expression, "threadId")) {
return ExpressionBuilder.threadIdExpression();
} else if (ObjectHelper.equal(expression, "threadName")) {
return ExpressionBuilder.threadNameExpression();
} else if (ObjectHelper.equal(expression, "hostname")) {
return ExpressionBuilder.hostnameExpression();
} else if (ObjectHelper.equal(expression, "camelId")) {
return ExpressionBuilder.camelContextNameExpression();
} else if (ObjectHelper.equal(expression, "routeId")) {
return ExpressionBuilder.routeIdExpression();
} else if (ObjectHelper.equal(expression, "fromRouteId")) {
return ExpressionBuilder.fromRouteIdExpression();
} else if (ObjectHelper.equal(expression, "routeGroup")) {
return ExpressionBuilder.routeGroupExpression();
} else if (ObjectHelper.equal(expression, "stepId")) {
return ExpressionBuilder.stepIdExpression();
} else if (ObjectHelper.equal(expression, "null")) {
return SimpleExpressionBuilder.nullExpression();
}
return null;
}
private Expression createSimpleFileExpression(String remainder, boolean strict) {
if (ObjectHelper.equal(remainder, "name")) {
return SimpleExpressionBuilder.fileNameExpression();
} else if (ObjectHelper.equal(remainder, "name.noext")) {
return SimpleExpressionBuilder.fileNameNoExtensionExpression();
} else if (ObjectHelper.equal(remainder, "name.noext.single")) {
return SimpleExpressionBuilder.fileNameNoExtensionSingleExpression();
} else if (ObjectHelper.equal(remainder, "name.ext") || ObjectHelper.equal(remainder, "ext")) {
return SimpleExpressionBuilder.fileExtensionExpression();
} else if (ObjectHelper.equal(remainder, "name.ext.single")) {
return SimpleExpressionBuilder.fileExtensionSingleExpression();
} else if (ObjectHelper.equal(remainder, "onlyname")) {
return SimpleExpressionBuilder.fileOnlyNameExpression();
} else if (ObjectHelper.equal(remainder, "onlyname.noext")) {
return SimpleExpressionBuilder.fileOnlyNameNoExtensionExpression();
} else if (ObjectHelper.equal(remainder, "onlyname.noext.single")) {
return SimpleExpressionBuilder.fileOnlyNameNoExtensionSingleExpression();
} else if (ObjectHelper.equal(remainder, "parent")) {
return SimpleExpressionBuilder.fileParentExpression();
} else if (ObjectHelper.equal(remainder, "path")) {
return SimpleExpressionBuilder.filePathExpression();
} else if (ObjectHelper.equal(remainder, "absolute")) {
return SimpleExpressionBuilder.fileAbsoluteExpression();
} else if (ObjectHelper.equal(remainder, "absolute.path")) {
return SimpleExpressionBuilder.fileAbsolutePathExpression();
} else if (ObjectHelper.equal(remainder, "length") || ObjectHelper.equal(remainder, "size")) {
return SimpleExpressionBuilder.fileSizeExpression();
} else if (ObjectHelper.equal(remainder, "modified")) {
return SimpleExpressionBuilder.fileLastModifiedExpression();
}
if (strict) {
throw new SimpleParserException("Unknown file language syntax: " + remainder, token.getIndex());
}
return null;
}
private Expression createSimpleExpressionMisc(String function) {
String remainder;
// replace function
remainder = ifStartsWithReturnRemainder("replace(", function);
if (remainder != null) {
String values = StringHelper.before(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException(
"Valid syntax: ${replace(from,to)} or ${replace(from,to,expression)} was: " + function,
token.getIndex());
}
String[] tokens = StringQuoteHelper.splitSafeQuote(values, ',', false);
if (tokens.length > 3) {
throw new SimpleParserException(
"Valid syntax: ${replace(from,to,expression)} was: " + function, token.getIndex());
}
String from = StringHelper.xmlDecode(tokens[0]);
String to = StringHelper.xmlDecode(tokens[1]);
// special to make it easy to replace to an empty value (ie remove)
if ("∅".equals(to)) {
to = "";
}
String exp = "${body}";
if (tokens.length == 3) {
exp = tokens[2];
}
return SimpleExpressionBuilder.replaceExpression(exp, from, to);
}
// substring function
remainder = ifStartsWithReturnRemainder("substring(", function);
if (remainder != null) {
String values = StringHelper.before(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException(
"Valid syntax: ${substring(num)}, ${substring(num,num)}, or ${substring(num,num,expression)} was: "
+ function,
token.getIndex());
}
String[] tokens = StringQuoteHelper.splitSafeQuote(values, ',', false);
if (tokens.length > 3) {
throw new SimpleParserException(
"Valid syntax: ${substring(num,num,expression)} was: " + function, token.getIndex());
}
String num1 = tokens[0];
String num2 = "0";
if (tokens.length > 1) {
num2 = tokens[1];
}
String exp = "${body}";
if (tokens.length == 3) {
exp = tokens[2];
}
return SimpleExpressionBuilder.substringExpression(exp, num1, num2);
}
// random function
remainder = ifStartsWithReturnRemainder("random(", function);
if (remainder != null) {
String values = StringHelper.before(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException(
"Valid syntax: ${random(min,max)} or ${random(max)} was: " + function, token.getIndex());
}
if (values.contains(",")) {
String[] tokens = values.split(",", 3);
if (tokens.length > 2) {
throw new SimpleParserException(
"Valid syntax: ${random(min,max)} or ${random(max)} was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.randomExpression(tokens[0].trim(), tokens[1].trim());
} else {
return SimpleExpressionBuilder.randomExpression("0", values.trim());
}
}
// skip function
remainder = ifStartsWithReturnRemainder("skip(", function);
if (remainder != null) {
String values = StringHelper.before(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException("Valid syntax: ${skip(number)} was: " + function, token.getIndex());
}
String exp = "${body}";
int num = Integer.parseInt(values.trim());
return SimpleExpressionBuilder.skipExpression(exp, num);
}
// collate function
remainder = ifStartsWithReturnRemainder("collate(", function);
if (remainder != null) {
String values = StringHelper.before(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException("Valid syntax: ${collate(group)} was: " + function, token.getIndex());
}
String exp = "${body}";
int num = Integer.parseInt(values.trim());
return SimpleExpressionBuilder.collateExpression(exp, num);
}
// join function
remainder = ifStartsWithReturnRemainder("join(", function);
if (remainder != null) {
String values = StringHelper.before(remainder, ")");
String separator = ",";
String prefix = null;
String exp = "${body}";
if (ObjectHelper.isNotEmpty(values)) {
String[] tokens = StringQuoteHelper.splitSafeQuote(values, ',', false);
if (tokens.length > 3) {
throw new SimpleParserException(
"Valid syntax: ${join(separator,prefix,expression)} was: " + function, token.getIndex());
}
if (tokens.length == 3) {
separator = tokens[0];
prefix = tokens[1];
exp = tokens[2];
} else if (tokens.length == 2) {
separator = tokens[0];
prefix = tokens[1];
} else {
separator = tokens[0];
}
}
return SimpleExpressionBuilder.joinExpression(exp, separator, prefix);
}
// messageHistory function
remainder = ifStartsWithReturnRemainder("messageHistory", function);
if (remainder != null) {
boolean detailed;
String values = StringHelper.between(remainder, "(", ")");
if (values == null || ObjectHelper.isEmpty(values)) {
detailed = true;
} else {
detailed = Boolean.parseBoolean(values);
}
return SimpleExpressionBuilder.messageHistoryExpression(detailed);
} else if (ObjectHelper.equal(function, "messageHistory")) {
return SimpleExpressionBuilder.messageHistoryExpression(true);
}
// uuid function
remainder = ifStartsWithReturnRemainder("uuid", function);
if (remainder != null) {
String values = StringHelper.between(remainder, "(", ")");
return SimpleExpressionBuilder.uuidExpression(values);
} else if (ObjectHelper.equal(function, "uuid")) {
return SimpleExpressionBuilder.uuidExpression(null);
}
// hash function
remainder = ifStartsWithReturnRemainder("hash(", function);
if (remainder != null) {
String values = StringHelper.before(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException(
"Valid syntax: ${hash(value,algorithm)} or ${hash(value)} was: " + function, token.getIndex());
}
if (values.contains(",")) {
String[] tokens = values.split(",", 2);
if (tokens.length > 2) {
throw new SimpleParserException(
"Valid syntax: ${hash(value,algorithm)} or ${hash(value)} was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.hashExpression(tokens[0].trim(), tokens[1].trim());
} else {
return SimpleExpressionBuilder.hashExpression(values.trim(), "SHA-256");
}
}
// empty function
remainder = ifStartsWithReturnRemainder("empty(", function);
if (remainder != null) {
String value = StringHelper.before(remainder, ")");
if (ObjectHelper.isEmpty(value)) {
throw new SimpleParserException(
"Valid syntax: ${empty(<type>)} but was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.newEmptyExpression(value);
}
// iif function
remainder = ifStartsWithReturnRemainder("iif(", function);
if (remainder != null) {
String values = StringHelper.beforeLast(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException(
"Valid syntax: ${iif(predicate,trueExpression,falseExpression)} was: " + function, token.getIndex());
}
String[] tokens = StringQuoteHelper.splitSafeQuote(values, ',', true, true);
if (tokens.length > 3) {
throw new SimpleParserException(
"Valid syntax: ${iif(predicate,trueExpression,falseExpression)} was: " + function, token.getIndex());
}
return SimpleExpressionBuilder.iifExpression(tokens[0].trim(), tokens[1].trim(), tokens[2].trim());
}
// list function
remainder = ifStartsWithReturnRemainder("list(", function);
if (remainder != null) {
String values = StringHelper.beforeLast(remainder, ")");
String[] tokens = null;
if (ObjectHelper.isNotEmpty(values)) {
tokens = StringQuoteHelper.splitSafeQuote(values, ',', true, false);
}
return SimpleExpressionBuilder.listExpression(tokens);
}
// map function
remainder = ifStartsWithReturnRemainder("map(", function);
if (remainder != null) {
String values = StringHelper.beforeLast(remainder, ")");
String[] tokens = null;
if (ObjectHelper.isNotEmpty(values)) {
tokens = StringQuoteHelper.splitSafeQuote(values, ',', true, false);
}
// there must be an even number of tokens as each map element is a pair
if (tokens != null && tokens.length % 2 == 1) {
throw new SimpleParserException(
"Map function must have an even number of values, was: " + tokens.length + " values.",
token.getIndex());
}
return SimpleExpressionBuilder.mapExpression(tokens);
}
return null;
}
private String ifStartsWithReturnRemainder(String prefix, String text) {
if (text.startsWith(prefix)) {
String remainder = text.substring(prefix.length());
if (!remainder.isEmpty()) {
return remainder;
}
}
return null;
}
@Override
public String createCode(CamelContext camelContext, String expression) throws SimpleParserException {
return BaseSimpleParser.CODE_START + doCreateCode(camelContext, expression) + BaseSimpleParser.CODE_END;
}
private String doCreateCode(CamelContext camelContext, String expression) throws SimpleParserException {
String function = getText();
// return the function directly if we can create function without analyzing the prefix
String answer = createCodeDirectly(function);
if (answer != null) {
return answer;
}
// body, headers and exchange property first
answer = createCodeBody(function);
if (answer != null) {
return answer;
}
answer = createCodeHeader(function);
if (answer != null) {
return answer;
}
answer = createCodeExchangeProperty(function);
if (answer != null) {
return answer;
}
answer = createCodeVariables(function);
if (answer != null) {
return answer;
}
// camelContext OGNL
String remainder = ifStartsWithReturnRemainder("camelContext", function);
if (remainder != null) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${camelContext.OGNL} was: " + function, token.getIndex());
}
return "context" + ognlCodeMethods(remainder, null);
}
// ExceptionAs OGNL
remainder = ifStartsWithReturnRemainder("exceptionAs(", function);
if (remainder != null) {
String type = StringHelper.before(remainder, ")");
remainder = StringHelper.after(remainder, ")");
type = appendClass(type);
type = type.replace('$', '.');
type = type.trim();
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (type.isEmpty() || invalid) {
throw new SimpleParserException("Valid syntax: ${exceptionAs(type).OGNL} was: " + function, token.getIndex());
}
return "exceptionAs(exchange, " + type + ")" + ognlCodeMethods(remainder, type);
}
// Exception OGNL
remainder = ifStartsWithReturnRemainder("exception", function);
if (remainder != null) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${exceptionAs(type).OGNL} was: " + function, token.getIndex());
}
return "exception(exchange)" + ognlCodeMethods(remainder, null);
}
// system property
remainder = ifStartsWithReturnRemainder("sys.", function);
if (remainder != null) {
return "sys(\"" + remainder + "\")";
}
remainder = ifStartsWithReturnRemainder("sysenv.", function);
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("sysenv:", function);
}
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("env.", function);
}
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("env:", function);
}
if (remainder != null) {
return "sysenv(\"" + remainder + "\")";
}
// exchange OGNL
remainder = ifStartsWithReturnRemainder("exchange", function);
if (remainder != null) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${exchange.OGNL} was: " + function, token.getIndex());
}
return "exchange" + ognlCodeMethods(remainder, null);
}
// file: prefix
remainder = ifStartsWithReturnRemainder("file:", function);
if (remainder != null) {
return createCodeFileExpression(remainder);
}
// date: prefix
remainder = ifStartsWithReturnRemainder("date:", function);
if (remainder != null) {
String[] parts = remainder.split(":", 2);
if (parts.length == 1) {
return "date(exchange, \"" + parts[0] + "\")";
} else if (parts.length == 2) {
return "date(exchange, \"" + parts[0] + "\", null, \"" + parts[1] + "\")";
}
}
// date-with-timezone: prefix
remainder = ifStartsWithReturnRemainder("date-with-timezone:", function);
if (remainder != null) {
String[] parts = remainder.split(":", 3);
if (parts.length < 3) {
throw new SimpleParserException(
"Valid syntax: ${date-with-timezone:command:timezone:pattern} was: " + function, token.getIndex());
}
return "date(exchange, \"" + parts[0] + "\", \"" + parts[1] + "\", \"" + parts[2] + "\")";
}
// bean: prefix
remainder = ifStartsWithReturnRemainder("bean:", function);
if (remainder != null) {
String ref = remainder;
Object method = null;
Object scope = null;
// we support different syntax for bean function
if (remainder.contains("?method=") || remainder.contains("?scope=")) {
ref = StringHelper.before(remainder, "?");
String query = StringHelper.after(remainder, "?");
try {
Map<String, Object> map = URISupport.parseQuery(query);
method = map.get("method");
scope = map.get("scope");
} catch (URISyntaxException e) {
throw RuntimeCamelException.wrapRuntimeException(e);
}
} else {
//first check case :: because of my.own.Bean::method
int doubleColonIndex = remainder.indexOf("::");
//need to check that not inside params
int beginOfParameterDeclaration = remainder.indexOf('(');
if (doubleColonIndex > 0 && (!remainder.contains("(") || doubleColonIndex < beginOfParameterDeclaration)) {
ref = remainder.substring(0, doubleColonIndex);
method = remainder.substring(doubleColonIndex + 2);
} else {
int idx = remainder.indexOf('.');
if (idx > 0) {
ref = remainder.substring(0, idx);
method = remainder.substring(idx + 1);
}
}
}
ref = ref.trim();
if (method != null && scope != null) {
return "bean(exchange, bean, \"" + ref + "\", \"" + method + "\", \"" + scope + "\")";
} else if (method != null) {
return "bean(exchange, bean, \"" + ref + "\", \"" + method + "\", null)";
} else {
return "bean(exchange, bean, \"" + ref + "\", null, null)";
}
}
// properties: prefix
remainder = ifStartsWithReturnRemainder("properties:", function);
if (remainder != null) {
String[] parts = remainder.split(":", 2);
if (parts.length > 2) {
throw new SimpleParserException("Valid syntax: ${properties:key[:default]} was: " + function, token.getIndex());
}
String defaultValue = null;
if (parts.length >= 2) {
defaultValue = parts[1];
}
String key = parts[0];
key = key.trim();
if (defaultValue != null) {
return "properties(exchange, \"" + key + "\", \"" + defaultValue.trim() + "\")";
} else {
return "properties(exchange, \"" + key + "\")";
}
}
// ref: prefix
remainder = ifStartsWithReturnRemainder("ref:", function);
if (remainder != null) {
return "ref(exchange, \"" + remainder + "\")";
}
// type: prefix
remainder = ifStartsWithReturnRemainder("type:", function);
if (remainder != null) {
int pos = remainder.lastIndexOf('.');
String type = pos != -1 ? remainder.substring(0, pos) : remainder;
String field = pos != -1 ? remainder.substring(pos + 1) : null;
if (!type.endsWith(".class")) {
type += ".class";
}
type = type.replace('$', '.');
if (field != null) {
return "type(exchange, " + type + ", \"" + field + "\")";
} else {
return "type(exchange, " + type + ")";
}
}
// miscellaneous functions
String misc = createCodeExpressionMisc(function);
if (misc != null) {
return misc;
}
// attachments
if ("attachments".equals(function) || ifStartsWithReturnRemainder("attachment", function) != null) {
String code = createCodeAttachments(camelContext, function);
if (code != null) {
return code;
}
}
throw new SimpleParserException("Unknown function: " + function, token.getIndex());
}
public String createCodeDirectly(String expression) throws SimpleParserException {
if (ObjectHelper.isEqualToAny(expression, "body", "in.body")) {
return "body";
} else if (ObjectHelper.equal(expression, "prettyBody")) {
return "prettyBody(exchange)";
} else if (ObjectHelper.equal(expression, "bodyOneLine")) {
return "bodyOneLine(exchange)";
} else if (ObjectHelper.equal(expression, "id")) {
return "message.getMessageId()";
} else if (ObjectHelper.equal(expression, "messageTimestamp")) {
return "message.getMessageTimestamp()";
} else if (ObjectHelper.equal(expression, "exchangeId")) {
return "exchange.getExchangeId()";
} else if (ObjectHelper.equal(expression, "exchange")) {
return "exchange";
} else if (ObjectHelper.equal(expression, "logExchange")) {
return "logExchange(exchange)";
} else if (ObjectHelper.equal(expression, "exception")) {
return "exception(exchange)";
} else if (ObjectHelper.equal(expression, "exception.message")) {
return "exceptionMessage(exchange)";
} else if (ObjectHelper.equal(expression, "exception.stacktrace")) {
return "exceptionStacktrace(exchange)";
} else if (ObjectHelper.equal(expression, "threadId")) {
return "threadId()";
} else if (ObjectHelper.equal(expression, "threadName")) {
return "threadName()";
} else if (ObjectHelper.equal(expression, "hostname")) {
return "hostName()";
} else if (ObjectHelper.equal(expression, "camelId")) {
return "context.getName()";
} else if (ObjectHelper.equal(expression, "fromRouteId")) {
return "fromRouteId(exchange)";
} else if (ObjectHelper.equal(expression, "routeId")) {
return "routeId(exchange)";
} else if (ObjectHelper.equal(expression, "stepId")) {
return "stepId(exchange)";
} else if (ObjectHelper.equal(expression, "null")) {
return "null";
}
return null;
}
private String createCodeBody(final String function) {
// bodyAsIndex
String remainder = ifStartsWithReturnRemainder("bodyAsIndex(", function);
if (remainder != null) {
String typeAndIndex = StringHelper.before(remainder, ")");
if (typeAndIndex == null) {
throw new SimpleParserException(
"Valid syntax: ${bodyAsIndex(type, index).OGNL} was: " + function, token.getIndex());
}
String type = StringHelper.before(typeAndIndex, ",");
String index = StringHelper.after(typeAndIndex, ",");
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isEmpty(type) || ObjectHelper.isEmpty(index)) {
throw new SimpleParserException(
"Valid syntax: ${bodyAsIndex(type, index).OGNL} was: " + function, token.getIndex());
}
type = type.trim();
type = appendClass(type);
type = type.replace('$', '.');
index = StringHelper.removeQuotes(index);
index = index.trim();
if (ObjectHelper.isNotEmpty(remainder)) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException(
"Valid syntax: ${bodyAsIndex(type, index).OGNL} was: " + function, token.getIndex());
}
return "bodyAsIndex(message, " + type + ", \"" + index + "\")" + ognlCodeMethods(remainder, type);
} else {
return "bodyAsIndex(message, " + type + ", \"" + index + "\")";
}
}
// bodyAs
remainder = ifStartsWithReturnRemainder("bodyAs(", function);
if (remainder != null) {
String type = StringHelper.before(remainder, ")");
if (type == null) {
throw new SimpleParserException("Valid syntax: ${bodyAs(type)} was: " + function, token.getIndex());
}
type = appendClass(type);
type = type.replace('$', '.');
type = type.trim();
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isNotEmpty(remainder)) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${bodyAs(type).OGNL} was: " + function, token.getIndex());
}
if (remainder.startsWith("[")) {
// is there any index, then we should use bodyAsIndex function instead
// (use splitOgnl which assembles multiple indexes into a single part)
List<String> parts = splitOgnl(remainder);
if (!parts.isEmpty()) {
String func = "bodyAsIndex(" + type + ", \"" + parts.remove(0) + "\")";
String last = String.join("", parts);
if (!last.isEmpty()) {
func += "." + last;
}
return createCodeBody(func);
}
}
return "bodyAs(message, " + type + ")" + ognlCodeMethods(remainder, type);
} else {
return "bodyAs(message, " + type + ")";
}
}
// mandatoryBodyAsIndex
remainder = ifStartsWithReturnRemainder("mandatoryBodyAsIndex(", function);
if (remainder != null) {
String typeAndIndex = StringHelper.before(remainder, ")");
if (typeAndIndex == null) {
throw new SimpleParserException(
"Valid syntax: ${mandatoryBodyAsIndex(type, index).OGNL} was: " + function, token.getIndex());
}
String type = StringHelper.before(typeAndIndex, ",");
String index = StringHelper.after(typeAndIndex, ",");
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isEmpty(type) || ObjectHelper.isEmpty(index)) {
throw new SimpleParserException(
"Valid syntax: ${mandatoryBodyAsIndex(type, index).OGNL} was: " + function, token.getIndex());
}
type = type.trim();
type = appendClass(type);
type = type.replace('$', '.');
index = StringHelper.removeQuotes(index);
index = index.trim();
if (ObjectHelper.isNotEmpty(remainder)) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException(
"Valid syntax: ${mandatoryBodyAsIndex(type, index).OGNL} was: " + function, token.getIndex());
}
return "mandatoryBodyAsIndex(message, " + type + ", \"" + index + "\")" + ognlCodeMethods(remainder, type);
} else {
return "mandatoryBodyAsIndex(message, " + type + ", \"" + index + "\")";
}
}
// mandatoryBodyAs
remainder = ifStartsWithReturnRemainder("mandatoryBodyAs(", function);
if (remainder != null) {
String type = StringHelper.before(remainder, ")");
if (type == null) {
throw new SimpleParserException("Valid syntax: ${mandatoryBodyAs(type)} was: " + function, token.getIndex());
}
type = appendClass(type);
type = type.replace('$', '.');
type = type.trim();
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isNotEmpty(remainder)) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException(
"Valid syntax: ${mandatoryBodyAs(type).OGNL} was: " + function, token.getIndex());
}
if (remainder.startsWith("[")) {
// is there any index, then we should use mandatoryBodyAsIndex function instead
// (use splitOgnl which assembles multiple indexes into a single part)
List<String> parts = splitOgnl(remainder);
if (!parts.isEmpty()) {
String func = "mandatoryBodyAsIndex(" + type + ", \"" + parts.remove(0) + "\")";
String last = String.join("", parts);
if (!last.isEmpty()) {
func += "." + last;
}
return createCodeBody(func);
}
}
return "mandatoryBodyAs(message, " + type + ")" + ognlCodeMethods(remainder, type);
} else {
return "mandatoryBodyAs(message, " + type + ")";
}
}
// body OGNL
remainder = ifStartsWithReturnRemainder("body", function);
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("in.body", function);
}
if (remainder != null) {
// OGNL must start with a . ? or [
boolean ognlStart = remainder.startsWith(".") || remainder.startsWith("?") || remainder.startsWith("[");
boolean invalid = !ognlStart || OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${body.OGNL} was: " + function, token.getIndex());
}
if (remainder.startsWith("[")) {
// is there any index, then we should use bodyAsIndex function instead
// (use splitOgnl which assembles multiple indexes into a single part)
List<String> parts = splitOgnl(remainder);
if (!parts.isEmpty()) {
String func = "bodyAsIndex(Object.class, \"" + parts.remove(0) + "\")";
String last = String.join("", parts);
if (!last.isEmpty()) {
func += "." + last;
}
return createCodeBody(func);
}
}
return "body" + ognlCodeMethods(remainder, null);
}
return null;
}
private String createCodeHeader(final String function) {
// headerAsIndex
String remainder = ifStartsWithReturnRemainder("headerAsIndex(", function);
if (remainder != null) {
String keyTypeAndIndex = StringHelper.before(remainder, ")");
if (keyTypeAndIndex == null) {
throw new SimpleParserException(
"Valid syntax: ${headerAsIndex(key, type, index)} was: " + function, token.getIndex());
}
String[] parts = keyTypeAndIndex.split(",");
if (parts.length != 3) {
throw new SimpleParserException(
"Valid syntax: ${headerAsIndex(key, type, index)} was: " + function, token.getIndex());
}
String key = parts[0];
String type = parts[1];
String index = parts[2];
if (ObjectHelper.isEmpty(key) || ObjectHelper.isEmpty(type) || ObjectHelper.isEmpty(index)) {
throw new SimpleParserException(
"Valid syntax: ${headerAsIndex(key, type, index)} was: " + function, token.getIndex());
}
key = StringHelper.removeQuotes(key);
key = key.trim();
type = appendClass(type);
type = type.replace('$', '.');
type = type.trim();
index = StringHelper.removeQuotes(index);
index = index.trim();
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isNotEmpty(remainder)) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException(
"Valid syntax: ${headerAsIndex(key, type, index).OGNL} was: " + function, token.getIndex());
}
return "headerAsIndex(message, " + type + ", \"" + key + "\", \"" + index + "\")"
+ ognlCodeMethods(remainder, type);
} else {
return "headerAsIndex(message, " + type + ", \"" + key + "\", \"" + index + "\")";
}
}
// headerAs
remainder = ifStartsWithReturnRemainder("headerAs(", function);
if (remainder != null) {
String keyAndType = StringHelper.before(remainder, ")");
if (keyAndType == null) {
throw new SimpleParserException("Valid syntax: ${headerAs(key, type)} was: " + function, token.getIndex());
}
String key = StringHelper.before(keyAndType, ",");
String type = StringHelper.after(keyAndType, ",");
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isEmpty(key) || ObjectHelper.isEmpty(type)) {
throw new SimpleParserException("Valid syntax: ${headerAs(key, type)} was: " + function, token.getIndex());
}
key = StringHelper.removeQuotes(key);
key = key.trim();
type = appendClass(type);
type = type.replace('$', '.');
type = type.trim();
return "headerAs(message, \"" + key + "\", " + type + ")" + ognlCodeMethods(remainder, type);
}
// headers function
if ("in.headers".equals(function) || "headers".equals(function)) {
return "message.getHeaders()";
} else if ("headers.size".equals(function) || "headers.size()".equals(function)
|| "headers.length".equals(function) || "headers.length()".equals(function)) {
return "message.getHeaders().size()";
}
// in header function
remainder = parseInHeader(function);
if (remainder != null) {
// remove leading character (dot, colon or ?)
if (remainder.startsWith(".") || remainder.startsWith(":") || remainder.startsWith("?")) {
remainder = remainder.substring(1);
}
// remove starting and ending brackets
if (remainder.startsWith("[") && remainder.endsWith("]")) {
remainder = remainder.substring(1, remainder.length() - 1);
}
// remove quotes from key
String key = StringHelper.removeLeadingAndEndingQuotes(remainder);
key = key.trim();
// validate syntax
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(key);
if (invalid) {
throw new SimpleParserException("Valid syntax: ${header.name[key]} was: " + function, token.getIndex());
}
// the key can contain index as it may be a map header.foo[0]
// and the key can also be OGNL (e.g., if there is a dot)
boolean index = false;
List<String> parts = splitOgnl(key);
if (!parts.isEmpty()) {
String s = parts.get(0);
int pos = s.indexOf('[');
if (pos != -1) {
index = true;
// split key into name and index
String before = s.substring(0, pos);
String after = s.substring(pos);
parts.set(0, before);
parts.add(1, after);
}
}
if (index) {
// is there any index, then we should use headerAsIndex function instead
// (use splitOgnl which assembles multiple indexes into a single part?)
String func = "headerAsIndex(\"" + parts.get(0) + "\", Object.class, \"" + parts.get(1) + "\")";
if (parts.size() > 2) {
String last = String.join("", parts.subList(2, parts.size()));
if (!last.isEmpty()) {
func += "." + last;
}
}
return createCodeHeader(func);
} else if (OgnlHelper.isValidOgnlExpression(key)) {
// ognl based header must be typed
throw new SimpleParserException("Valid syntax: ${headerAs(key, type).OGNL} was: " + function, token.getIndex());
} else {
// regular header
return "header(message, \"" + key + "\")";
}
}
return null;
}
private String parseInHeader(String function) {
String remainder;
remainder = ifStartsWithReturnRemainder("in.headers", function);
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("in.header", function);
}
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("headers", function);
}
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("header", function);
}
return remainder;
}
private String parseVariable(String function) {
String remainder;
remainder = ifStartsWithReturnRemainder("variables", function);
if (remainder == null) {
remainder = ifStartsWithReturnRemainder("variable", function);
}
return remainder;
}
private String createCodeVariables(final String function) {
// variableAs
String remainder = ifStartsWithReturnRemainder("variableAs(", function);
if (remainder != null) {
String keyAndType = StringHelper.before(remainder, ")");
if (keyAndType == null) {
throw new SimpleParserException("Valid syntax: ${variableAs(key, type)} was: " + function, token.getIndex());
}
String key = StringHelper.before(keyAndType, ",");
String type = StringHelper.after(keyAndType, ",");
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isEmpty(key) || ObjectHelper.isEmpty(type)) {
throw new SimpleParserException(
"Valid syntax: ${variableAs(key, type)} was: " + function, token.getIndex());
}
key = StringHelper.removeQuotes(key);
key = key.trim();
type = appendClass(type);
type = type.replace('$', '.');
type = type.trim();
return "variableAs(exchange, \"" + key + "\", " + type + ")" + ognlCodeMethods(remainder, type);
}
// variables function
if ("variables".equals(function)) {
return "variables(exchange)";
} else if ("variables.size".equals(function) || "variables.size()".equals(function)
|| "variables.length".equals(function) || "variables.length()".equals(function)) {
return "variablesSize(exchange)";
}
// variable
remainder = ifStartsWithReturnRemainder("variable", function);
if (remainder != null) {
// remove leading character (dot or ?)
if (remainder.startsWith(".") || remainder.startsWith("?")) {
remainder = remainder.substring(1);
}
// remove starting and ending brackets
if (remainder.startsWith("[") && remainder.endsWith("]")) {
remainder = remainder.substring(1, remainder.length() - 1);
}
// remove quotes from key
String key = StringHelper.removeLeadingAndEndingQuotes(remainder);
key = key.trim();
// validate syntax
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(key);
if (invalid) {
throw new SimpleParserException(
"Valid syntax: ${variable.name[key]} was: " + function, token.getIndex());
}
// it is an index?
String index = null;
if (key.endsWith("]")) {
index = StringHelper.between(key, "[", "]");
if (index != null) {
key = StringHelper.before(key, "[");
}
}
if (index != null) {
index = StringHelper.removeLeadingAndEndingQuotes(index);
return "variableAsIndex(exchange, Object.class, \"" + key + "\", \"" + index + "\")";
} else if (OgnlHelper.isValidOgnlExpression(remainder)) {
// ognl based exchange property must be typed
throw new SimpleParserException(
"Valid syntax: ${variableAs(key, type)} was: " + function, token.getIndex());
} else {
// regular property
return "variable(exchange, \"" + key + "\")";
}
}
return null;
}
private String createCodeExchangeProperty(final String function) {
// exchangePropertyAsIndex
String remainder = ifStartsWithReturnRemainder("exchangePropertyAsIndex(", function);
if (remainder != null) {
String keyTypeAndIndex = StringHelper.before(remainder, ")");
if (keyTypeAndIndex == null) {
throw new SimpleParserException(
"Valid syntax: ${exchangePropertyAsIndex(key, type, index)} was: " + function, token.getIndex());
}
String[] parts = keyTypeAndIndex.split(",");
if (parts.length != 3) {
throw new SimpleParserException(
"Valid syntax: ${exchangePropertyAsIndex(key, type, index)} was: " + function, token.getIndex());
}
String key = parts[0];
String type = parts[1];
String index = parts[2];
if (ObjectHelper.isEmpty(key) || ObjectHelper.isEmpty(type) || ObjectHelper.isEmpty(index)) {
throw new SimpleParserException(
"Valid syntax: ${exchangePropertyAsIndex(key, type, index)} was: " + function, token.getIndex());
}
key = StringHelper.removeQuotes(key);
key = key.trim();
type = appendClass(type);
type = type.replace('$', '.');
type = type.trim();
index = StringHelper.removeQuotes(index);
index = index.trim();
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isNotEmpty(remainder)) {
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(remainder);
if (invalid) {
throw new SimpleParserException(
"Valid syntax: ${exchangePropertyAsIndex(key, type, index).OGNL} was: " + function,
token.getIndex());
}
return "exchangePropertyAsIndex(exchange, " + type + ", \"" + key + "\", \"" + index + "\")"
+ ognlCodeMethods(remainder, type);
} else {
return "exchangePropertyAsIndex(exchange, " + type + ", \"" + key + "\", \"" + index + "\")";
}
}
// exchangePropertyAs
remainder = ifStartsWithReturnRemainder("exchangePropertyAs(", function);
if (remainder != null) {
String keyAndType = StringHelper.before(remainder, ")");
if (keyAndType == null) {
throw new SimpleParserException(
"Valid syntax: ${exchangePropertyAs(key, type)} was: " + function, token.getIndex());
}
String key = StringHelper.before(keyAndType, ",");
String type = StringHelper.after(keyAndType, ",");
remainder = StringHelper.after(remainder, ")");
if (ObjectHelper.isEmpty(key) || ObjectHelper.isEmpty(type)) {
throw new SimpleParserException(
"Valid syntax: ${exchangePropertyAs(key, type)} was: " + function, token.getIndex());
}
key = StringHelper.removeQuotes(key);
key = key.trim();
type = appendClass(type);
type = type.replace('$', '.');
type = type.trim();
return "exchangePropertyAs(exchange, \"" + key + "\", " + type + ")" + ognlCodeMethods(remainder, type);
}
// exchange property
remainder = ifStartsWithReturnRemainder("exchangeProperty", function);
if (remainder != null) {
// remove leading character (dot or ?)
if (remainder.startsWith(".") || remainder.startsWith("?")) {
remainder = remainder.substring(1);
}
// remove starting and ending brackets
if (remainder.startsWith("[") && remainder.endsWith("]")) {
remainder = remainder.substring(1, remainder.length() - 1);
}
// remove quotes from key
String key = StringHelper.removeLeadingAndEndingQuotes(remainder);
key = key.trim();
// validate syntax
boolean invalid = OgnlHelper.isInvalidValidOgnlExpression(key);
if (invalid) {
throw new SimpleParserException(
"Valid syntax: ${exchangeProperty.name[key]} was: " + function, token.getIndex());
}
// it is an index?
String index = null;
if (key.endsWith("]")) {
index = StringHelper.between(key, "[", "]");
if (index != null) {
key = StringHelper.before(key, "[");
}
}
if (index != null) {
index = StringHelper.removeLeadingAndEndingQuotes(index);
return "exchangePropertyAsIndex(exchange, Object.class, \"" + key + "\", \"" + index + "\")";
} else if (OgnlHelper.isValidOgnlExpression(remainder)) {
// ognl based exchange property must be typed
throw new SimpleParserException(
"Valid syntax: ${exchangePropertyAs(key, type)} was: " + function, token.getIndex());
} else {
// regular property
return "exchangeProperty(exchange, \"" + key + "\")";
}
}
return null;
}
private static String appendClass(String type) {
type = StringHelper.removeQuotes(type);
if (!type.endsWith(".class")) {
type = type + ".class";
}
return type;
}
private String createCodeFileExpression(String remainder) {
if (ObjectHelper.equal(remainder, "name")) {
return "fileName(message)";
} else if (ObjectHelper.equal(remainder, "name.noext")) {
return "fileNameNoExt(message)";
} else if (ObjectHelper.equal(remainder, "name.noext.single")) {
return "fileNameNoExtSingle(message)";
} else if (ObjectHelper.equal(remainder, "name.ext") || ObjectHelper.equal(remainder, "ext")) {
return "fileNameExt(message)";
} else if (ObjectHelper.equal(remainder, "name.ext.single")) {
return "fileNameExtSingle(message)";
} else if (ObjectHelper.equal(remainder, "onlyname")) {
return "fileOnlyName(message)";
} else if (ObjectHelper.equal(remainder, "onlyname.noext")) {
return "fileOnlyNameNoExt(message)";
} else if (ObjectHelper.equal(remainder, "onlyname.noext.single")) {
return "fileOnlyNameNoExtSingle(message)";
} else if (ObjectHelper.equal(remainder, "parent")) {
return "fileParent(message)";
} else if (ObjectHelper.equal(remainder, "path")) {
return "filePath(message)";
} else if (ObjectHelper.equal(remainder, "absolute")) {
return "fileAbsolute(message)";
} else if (ObjectHelper.equal(remainder, "absolute.path")) {
return "fileAbsolutePath(message)";
} else if (ObjectHelper.equal(remainder, "length") || ObjectHelper.equal(remainder, "size")) {
return "fileSize(message)";
} else if (ObjectHelper.equal(remainder, "modified")) {
return "fileModified(message)";
}
throw new SimpleParserException("Unknown file language syntax: " + remainder, token.getIndex());
}
private String createCodeAttachments(CamelContext camelContext, String function) {
Optional<SimpleLanguageFunctionFactory> factory = ResolverHelper.resolveService(
camelContext,
camelContext.getCamelContextExtension().getBootstrapFactoryFinder(),
SimpleLanguageFunctionFactory.FACTORY + "/camel-attachments",
SimpleLanguageFunctionFactory.class);
if (factory.isEmpty()) {
throw new IllegalArgumentException(
"Cannot find SimpleLanguageFunctionFactory on classpath. Add camel-attachments to classpath.");
}
return factory.get().createCode(camelContext, function, token.getIndex());
}
private String createCodeExpressionMisc(String function) {
String remainder;
// substring function
remainder = ifStartsWithReturnRemainder("substring(", function);
if (remainder != null) {
String values = StringHelper.before(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException(
"Valid syntax: ${substring(num)}, ${substring(num,num)} was: "
+ function,
token.getIndex());
}
String[] tokens = codeSplitSafe(values, ',', true, true);
if (tokens.length > 2) {
throw new SimpleParserException(
"Valid syntax: ${substring(num,num)} was: " + function, token.getIndex());
}
String num1 = tokens[0];
String num2 = "0";
if (tokens.length > 1) {
num2 = tokens[1];
}
num1 = num1.trim();
num2 = num2.trim();
return "substring(exchange, " + num1 + ", " + num2 + ")";
}
// random function
remainder = ifStartsWithReturnRemainder("random(", function);
if (remainder != null) {
String values = StringHelper.beforeLast(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException(
"Valid syntax: ${random(min,max)} or ${random(max)} was: " + function, token.getIndex());
}
if (values.contains(",")) {
String before = StringHelper.before(remainder, ",");
before = before.trim();
String after = StringHelper.after(remainder, ",");
after = after.trim();
if (after.endsWith(")")) {
after = after.substring(0, after.length() - 1);
}
return "random(exchange, " + before + ", " + after + ")";
} else {
return "random(exchange, 0, " + values.trim() + ")";
}
}
// replace function
remainder = ifStartsWithReturnRemainder("replace(", function);
if (remainder != null) {
String values = StringHelper.before(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException(
"Valid syntax: ${replace(from,to)} was: " + function,
token.getIndex());
}
String[] tokens = codeSplitSafe(values, ',', true, false);
if (tokens.length > 2) {
throw new SimpleParserException(
"Valid syntax: ${replace(from,to)} was: " + function, token.getIndex());
}
String from = StringHelper.xmlDecode(tokens[0]);
String to = StringHelper.xmlDecode(tokens[1]);
// special to make it easy to replace to an empty value (ie remove)
if ("∅".equals(to)) {
to = "";
}
if ("\"".equals(from)) {
from = "\\\"";
}
if ("\"".equals(to)) {
to = "\\\"";
}
from = StringQuoteHelper.doubleQuote(from);
to = StringQuoteHelper.doubleQuote(to);
return "replace(exchange, " + from + ", " + to + ")";
}
// skip function
remainder = ifStartsWithReturnRemainder("skip(", function);
if (remainder != null) {
String values = StringHelper.beforeLast(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException("Valid syntax: ${skip(number)} was: " + function, token.getIndex());
}
return "skip(exchange, " + values.trim() + ")";
}
// collate function
remainder = ifStartsWithReturnRemainder("collate(", function);
if (remainder != null) {
String values = StringHelper.beforeLast(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException("Valid syntax: ${collate(group)} was: " + function, token.getIndex());
}
return "collate(exchange, " + values.trim() + ")";
}
// messageHistory function
remainder = ifStartsWithReturnRemainder("messageHistory", function);
if (remainder != null) {
boolean detailed;
String values = StringHelper.between(remainder, "(", ")");
if (values == null || ObjectHelper.isEmpty(values)) {
detailed = true;
} else {
detailed = Boolean.parseBoolean(values);
}
return "messageHistory(exchange, " + (detailed ? "true" : "false") + ")";
} else if (ObjectHelper.equal(function, "messageHistory")) {
return "messageHistory(exchange, true)";
}
// join
remainder = ifStartsWithReturnRemainder("join(", function);
if (remainder != null) {
String values = StringHelper.beforeLast(remainder, ")");
String separator = "\",\"";
String prefix = null;
String exp = "body";
if (ObjectHelper.isNotEmpty(values)) {
String[] tokens = codeSplitSafe(values, ',', true, true);
if (tokens.length > 3) {
throw new SimpleParserException(
"Valid syntax: ${join(separator,prefix,expression)} was: " + function, token.getIndex());
}
// single quotes should be double quotes
for (int i = 0; i < tokens.length; i++) {
String s = tokens[i];
if (StringHelper.isSingleQuoted(s)) {
s = StringHelper.removeLeadingAndEndingQuotes(s);
s = StringQuoteHelper.doubleQuote(s);
tokens[i] = s;
} else if (i < 2 && !StringHelper.isDoubleQuoted(s)) {
s = StringQuoteHelper.doubleQuote(s);
tokens[i] = s;
}
}
if (tokens.length == 3) {
separator = tokens[0];
prefix = tokens[1];
exp = tokens[2];
} else if (tokens.length == 2) {
separator = tokens[0];
prefix = tokens[1];
} else {
separator = tokens[0];
}
}
return "var val = " + exp + ";\n return join(exchange, val, " + separator + ", " + prefix + ");";
}
// empty function
remainder = ifStartsWithReturnRemainder("empty(", function);
if (remainder != null) {
String value = StringHelper.beforeLast(remainder, ")");
if (ObjectHelper.isEmpty(value)) {
throw new SimpleParserException(
"Valid syntax: ${empty(<type>)} but was: " + function, token.getIndex());
}
value = StringQuoteHelper.doubleQuote(value);
return "empty(exchange, " + value + ")";
}
// list function
remainder = ifStartsWithReturnRemainder("list(", function);
if (remainder != null) {
String values = StringHelper.beforeLast(remainder, ")");
String[] tokens = null;
if (ObjectHelper.isNotEmpty(values)) {
tokens = codeSplitSafe(values, ',', true, true);
}
StringJoiner sj = new StringJoiner(", ");
for (int i = 0; tokens != null && i < tokens.length; i++) {
String s = tokens[i];
// single quotes should be double quotes
if (StringHelper.isSingleQuoted(s)) {
s = StringHelper.removeLeadingAndEndingQuotes(s);
s = StringQuoteHelper.doubleQuote(s);
}
sj.add(s);
}
String p = sj.length() > 0 ? sj.toString() : "null";
return "list(exchange, " + p + ")";
}
// map function
remainder = ifStartsWithReturnRemainder("map(", function);
if (remainder != null) {
String values = StringHelper.beforeLast(remainder, ")");
String[] tokens = null;
if (ObjectHelper.isNotEmpty(values)) {
tokens = codeSplitSafe(values, ',', true, true);
}
StringJoiner sj = new StringJoiner(", ");
for (int i = 0; tokens != null && i < tokens.length; i++) {
String s = tokens[i];
// single quotes should be double quotes
if (StringHelper.isSingleQuoted(s)) {
s = StringHelper.removeLeadingAndEndingQuotes(s);
s = StringQuoteHelper.doubleQuote(s);
}
sj.add(s);
}
String p = sj.length() > 0 ? sj.toString() : "null";
return "map(exchange, " + p + ")";
}
// hash function
remainder = ifStartsWithReturnRemainder("hash(", function);
if (remainder != null) {
String values = StringHelper.beforeLast(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException(
"Valid syntax: ${hash(value,algorithm)} or ${hash(value)} was: " + function, token.getIndex());
}
String[] tokens = codeSplitSafe(values, ',', true, true);
if (tokens.length > 2) {
throw new SimpleParserException(
"Valid syntax: ${hash(value,algorithm)} or ${hash(value)} was: " + function, token.getIndex());
}
// single quotes should be double quotes
for (int i = 0; i < tokens.length; i++) {
String s = tokens[i];
if (StringHelper.isSingleQuoted(s)) {
s = StringHelper.removeLeadingAndEndingQuotes(s);
s = StringQuoteHelper.doubleQuote(s);
tokens[i] = s;
}
}
String algo = "\"SHA-256\"";
if (tokens.length == 2) {
algo = tokens[1];
if (!StringHelper.isQuoted(algo)) {
algo = StringQuoteHelper.doubleQuote(algo);
}
}
return "var val = " + tokens[0] + ";\n return hash(exchange, val, " + algo + ");";
}
// uuid function
remainder = ifStartsWithReturnRemainder("uuid", function);
if (remainder == null && "uuid".equals(function)) {
remainder = "(default)";
}
if (remainder != null) {
String generator = StringHelper.between(remainder, "(", ")");
if (generator == null) {
generator = "default";
}
StringBuilder sb = new StringBuilder(128);
if ("classic".equals(generator)) {
sb.append(" UuidGenerator uuid = new org.apache.camel.support.ClassicUuidGenerator();\n");
sb.append("return uuid.generateUuid();");
} else if ("short".equals(generator)) {
sb.append(" UuidGenerator uuid = new org.apache.camel.support.ShortUuidGenerator();\n");
sb.append("return uuid.generateUuid();");
} else if ("simple".equals(generator)) {
sb.append(" UuidGenerator uuid = new org.apache.camel.support.SimpleUuidGenerator();\n");
sb.append("return uuid.generateUuid();");
} else if ("default".equals(generator)) {
sb.append(" UuidGenerator uuid = new org.apache.camel.support.DefaultUuidGenerator();\n");
sb.append("return uuid.generateUuid();");
} else if ("random".equals(generator)) {
sb.append(" UuidGenerator uuid = new org.apache.camel.support.RandomUuidGenerator();\n");
sb.append("return uuid.generateUuid();");
} else {
generator = StringQuoteHelper.doubleQuote(generator);
sb.append("if (uuid == null) uuid = customUuidGenerator(exchange, ").append(generator)
.append("); return uuid.generateUuid();");
}
return sb.toString();
}
// iif function
remainder = ifStartsWithReturnRemainder("iif(", function);
if (remainder != null) {
String values = StringHelper.beforeLast(remainder, ")");
if (values == null || ObjectHelper.isEmpty(values)) {
throw new SimpleParserException(
"Valid syntax: ${iif(predicate,trueExpression,falseExpression)} was: " + function, token.getIndex());
}
String[] tokens = codeSplitSafe(values, ',', true, true);
if (tokens.length != 3) {
throw new SimpleParserException(
"Valid syntax: ${iif(predicate,trueExpression,falseExpression)} was: " + function, token.getIndex());
}
// single quotes should be double quotes
for (int i = 0; i < 3; i++) {
String s = tokens[i];
if (StringHelper.isSingleQuoted(s)) {
s = StringHelper.removeLeadingAndEndingQuotes(s);
s = StringQuoteHelper.doubleQuote(s);
tokens[i] = s;
}
}
return "Object o = " + tokens[0]
+ ";\n boolean b = convertTo(exchange, boolean.class, o);\n return b ? "
+ tokens[1] + " : " + tokens[2];
}
return null;
}
private static List<String> splitOgnl(String remainder) {
List<String> methods = OgnlHelper.splitOgnl(remainder);
// if its a double index [foo][0] then we want them combined into a single element
List<String> answer = new ArrayList<>();
for (String m : methods) {
if (m.startsWith(".")) {
m = m.substring(1);
}
boolean index = m.startsWith("[") && m.endsWith("]");
if (index) {
String last = answer.isEmpty() ? null : answer.get(answer.size() - 1);
boolean lastIndex = last != null && last.startsWith("[") && last.endsWith("]");
if (lastIndex) {
String line = last + m;
answer.set(answer.size() - 1, line);
} else {
answer.add(m);
}
} else {
answer.add(m);
}
}
return answer;
}
private static String ognlCodeMethods(String remainder, String type) {
StringBuilder sb = new StringBuilder(256);
if (remainder != null) {
List<String> methods = splitOgnl(remainder);
for (int i = 0; i < methods.size(); i++) {
String m = methods.get(i);
if (m.startsWith("(")) {
// its parameters for the function so add as-is and continue
sb.append(m);
continue;
}
// clip index
String index = StringHelper.betweenOuterPair(m, '[', ']');
if (index != null) {
m = StringHelper.before(m, "[");
}
// special for length on arrays
if (m != null && m.equals("length")) {
if (type != null && type.contains("[]")) {
sb.append(".length");
continue;
}
}
// single quotes for string literals should be replaced as double quotes
if (m != null) {
m = OgnlHelper.methodAsDoubleQuotes(m);
}
// shorthand getter syntax: .name -> .getName()
if (m != null && !m.isEmpty()) {
// a method so append with a dot
sb.append(".");
char ch = m.charAt(m.length() - 1);
if (Character.isAlphabetic(ch)) {
if (!m.startsWith("get")) {
sb.append("get");
sb.append(Character.toUpperCase(m.charAt(0)));
sb.append(m.substring(1));
} else {
sb.append(m);
}
sb.append("()");
} else {
sb.append(m);
}
}
// append index via a get method - eg get for a list, or get for a map (array not supported)
if (index != null) {
sb.append(".get(");
try {
long lon = Long.parseLong(index);
sb.append(lon);
if (lon > Integer.MAX_VALUE) {
sb.append("l");
}
} catch (Exception e) {
// its text based
index = StringHelper.removeLeadingAndEndingQuotes(index);
sb.append("\"");
sb.append(index);
sb.append("\"");
}
sb.append(")");
}
}
}
if (!sb.isEmpty()) {
return sb.toString();
} else {
return remainder;
}
}
private static String[] codeSplitSafe(String input, char separator, boolean trim, boolean keepQuotes) {
if (input == null) {
return null;
}
if (input.indexOf(separator) == -1) {
if (input.length() > 1) {
char ch = input.charAt(0);
char ch2 = input.charAt(input.length() - 1);
boolean singleQuoted = ch == '\'' && ch2 == '\'';
boolean doubleQuoted = ch == '"' && ch2 == '"';
if (!keepQuotes && (singleQuoted || doubleQuoted)) {
input = input.substring(1, input.length() - 1);
// do not trim quoted text
} else if (trim) {
input = input.trim();
}
}
// no separator in data, so return single string with input as is
return new String[] { input };
}
List<String> answer = new ArrayList<>();
StringBuilder sb = new StringBuilder(256);
int codeLevel = 0;
boolean singleQuoted = false;
boolean doubleQuoted = false;
boolean separating = false;
for (int i = 0; i < input.length(); i++) {
char ch = input.charAt(i);
char prev = i > 0 ? input.charAt(i - 1) : 0;
boolean isQuoting = singleQuoted || doubleQuoted;
boolean last = i == input.length() - 1;
// do not split inside code blocks
if (input.indexOf(BaseSimpleParser.CODE_START, i) == i) {
codeLevel++;
sb.append(BaseSimpleParser.CODE_START);
i = i + BaseSimpleParser.CODE_START.length() - 1;
continue;
} else if (input.indexOf(BaseSimpleParser.CODE_END, i) == i) {
codeLevel--;
sb.append(BaseSimpleParser.CODE_END);
i = i + BaseSimpleParser.CODE_END.length() - 1;
continue;
}
if (codeLevel > 0) {
sb.append(ch);
continue;
}
if (!doubleQuoted && ch == '\'') {
if (singleQuoted && prev == ch && sb.isEmpty()) {
// its an empty quote so add empty text
if (keepQuotes) {
answer.add("''");
} else {
answer.add("");
}
}
// special logic needed if this quote is the end
if (last) {
if (singleQuoted && !sb.isEmpty()) {
String text = sb.toString();
// do not trim a quoted string
if (keepQuotes) {
answer.add(text + "'"); // append ending quote
} else {
answer.add(text);
}
sb.setLength(0);
}
break; // break out as we are finished
}
singleQuoted = !singleQuoted;
if (keepQuotes) {
sb.append(ch);
}
continue;
} else if (!singleQuoted && ch == '"') {
if (doubleQuoted && prev == ch && sb.isEmpty()) {
// its an empty quote so add empty text
if (keepQuotes) {
answer.add("\""); // append ending quote
} else {
answer.add("");
}
}
// special logic needed if this quote is the end
if (last) {
if (doubleQuoted && !sb.isEmpty()) {
String text = sb.toString();
// do not trim a quoted string
if (keepQuotes) {
answer.add(text + "\"");
} else {
answer.add(text);
}
sb.setLength(0);
}
break; // break out as we are finished
}
doubleQuoted = !doubleQuoted;
if (keepQuotes) {
sb.append(ch);
}
continue;
} else if (!isQuoting && ch == separator) {
separating = true;
// add as answer if we are not in a quote
if (!sb.isEmpty()) {
String text = sb.toString();
if (trim) {
text = text.trim();
}
answer.add(text);
sb.setLength(0);
}
// we should avoid adding the separator
continue;
}
if (trim && !isQuoting && separating && separator != ' ' && ch == ' ') {
continue;
}
separating = false;
// append char
sb.append(ch);
}
// any leftover
if (!sb.isEmpty()) {
String text = sb.toString();
if (trim) {
text = text.trim();
}
answer.add(text);
}
return answer.toArray(new String[0]);
}
}
| SimpleFunctionExpression |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java | {
"start": 3999,
"end": 37299
} | class ____ extends ESTestCase {
private static ThreadPool threadPool;
private ClusterService clusterService;
private TransportService transportService;
private IndexNameExpressionResolver indexNameExpressionResolver;
@BeforeClass
public static void setupThreadPool() {
threadPool = new TestThreadPool("ClusterStateHealthTests");
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
clusterService = createClusterService(threadPool);
CapturingTransport transport = new CapturingTransport();
transportService = transport.createTransportService(
clusterService.getSettings(),
threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> clusterService.localNode(),
null,
Collections.emptySet()
);
transportService.start();
transportService.acceptIncomingRequests();
indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext());
}
@After
public void tearDown() throws Exception {
super.tearDown();
clusterService.close();
transportService.close();
}
@AfterClass
public static void terminateThreadPool() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
public void testClusterHealthWaitsForClusterStateApplication() throws InterruptedException, ExecutionException {
final CountDownLatch applyLatch = new CountDownLatch(1);
final CountDownLatch listenerCalled = new CountDownLatch(1);
var state = ClusterState.builder(clusterService.state()).nodes(clusterService.state().nodes().withMasterNodeId(null)).build();
var projectId = state.metadata().projects().keySet().iterator().next();
// Randomly add an extra project.
if (randomBoolean()) {
state = ClusterState.builder(state).putProjectMetadata(ProjectMetadata.builder(randomUniqueProjectId()).build()).build();
}
setState(clusterService, state);
clusterService.addStateApplier(event -> {
listenerCalled.countDown();
try {
applyLatch.await();
} catch (InterruptedException e) {
logger.debug("interrupted", e);
}
});
logger.info("--> submit task to restore master");
ClusterState currentState = clusterService.getClusterApplierService().state();
clusterService.getClusterApplierService()
.onNewClusterState(
"restore master",
() -> ClusterState.builder(currentState)
.nodes(currentState.nodes().withMasterNodeId(currentState.nodes().getLocalNodeId()))
.incrementVersion()
.build(),
ActionListener.noop()
);
logger.info("--> waiting for listener to be called and cluster state being blocked");
listenerCalled.await();
TransportClusterHealthAction action = new TransportClusterHealthAction(
transportService,
clusterService,
threadPool,
new ActionFilters(new HashSet<>()),
indexNameExpressionResolver,
new AllocationService(null, new TestGatewayAllocator(), null, null, null, TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY),
TestProjectResolvers.singleProject(projectId)
);
PlainActionFuture<ClusterHealthResponse> listener = new PlainActionFuture<>();
ActionTestUtils.execute(
action,
new CancellableTask(1, "direct", TransportClusterHealthAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()),
new ClusterHealthRequest(TEST_REQUEST_TIMEOUT).waitForGreenStatus(),
listener
);
assertFalse(listener.isDone());
logger.info("--> realising task to restore master");
applyLatch.countDown();
listener.get();
}
public void testClusterHealth() throws IOException {
RoutingTableGenerator routingTableGenerator = new RoutingTableGenerator();
RoutingTableGenerator.ShardCounter counter = new RoutingTableGenerator.ShardCounter();
RoutingTable.Builder routingTable = RoutingTable.builder();
ProjectId projectId = randomUniqueProjectId();
ProjectMetadata.Builder project = ProjectMetadata.builder(projectId);
for (int i = randomInt(4); i >= 0; i--) {
int numberOfShards = randomInt(3) + 1;
int numberOfReplicas = randomInt(4);
IndexMetadata indexMetadata = IndexMetadata.builder("test_" + Integer.toString(i))
.settings(settings(IndexVersion.current()))
.numberOfShards(numberOfShards)
.numberOfReplicas(numberOfReplicas)
.build();
IndexRoutingTable indexRoutingTable = routingTableGenerator.genIndexRoutingTable(indexMetadata, counter);
project.put(indexMetadata, true);
routingTable.add(indexRoutingTable);
}
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
.putProjectMetadata(project)
.routingTable(GlobalRoutingTable.builder().put(projectId, routingTable).build())
.build();
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(
clusterState.metadata().getProject(projectId),
IndicesOptions.strictExpand(),
(String[]) null
);
ClusterStateHealth clusterStateHealth = new ClusterStateHealth(clusterState, concreteIndices, projectId);
logger.info("cluster status: {}, expected {}", clusterStateHealth.getStatus(), counter.status());
clusterStateHealth = maybeSerialize(clusterStateHealth);
assertClusterHealth(clusterStateHealth, counter);
}
public void testClusterHealthOnIndexCreation() {
final String indexName = "test-idx";
final String[] indices = new String[] { indexName };
var projectId = randomUniqueProjectId();
final List<ClusterState> clusterStates = simulateIndexCreationStates(indexName, false, projectId);
for (int i = 0; i < clusterStates.size(); i++) {
// make sure cluster health is always YELLOW, up until the last state where it should be GREEN
final ClusterState clusterState = clusterStates.get(i);
final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices, projectId);
if (i < clusterStates.size() - 1) {
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.YELLOW));
} else {
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN));
}
}
}
public void testClusterHealthOnIndexCreationWithFailedAllocations() {
final String indexName = "test-idx";
final String[] indices = new String[] { indexName };
var projectId = randomUniqueProjectId();
final List<ClusterState> clusterStates = simulateIndexCreationStates(indexName, true, projectId);
for (int i = 0; i < clusterStates.size(); i++) {
// make sure cluster health is YELLOW up until the final cluster state, which contains primary shard
// failed allocations that should make the cluster health RED
final ClusterState clusterState = clusterStates.get(i);
final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices, projectId);
if (i < clusterStates.size() - 1) {
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.YELLOW));
} else {
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.RED));
}
}
}
public void testClusterHealthOnClusterRecovery() {
final String indexName = "test-idx";
final String[] indices = new String[] { indexName };
var projectId = randomUniqueProjectId();
final List<ClusterState> clusterStates = simulateClusterRecoveryStates(indexName, false, false, projectId);
for (int i = 0; i < clusterStates.size(); i++) {
// make sure cluster health is YELLOW up until the final cluster state, when it turns GREEN
final ClusterState clusterState = clusterStates.get(i);
final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices, projectId);
if (i < clusterStates.size() - 1) {
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.YELLOW));
} else {
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN));
}
}
}
public void testClusterHealthOnClusterRecoveryWithFailures() {
final String indexName = "test-idx";
final String[] indices = new String[] { indexName };
var projectId = randomUniqueProjectId();
final List<ClusterState> clusterStates = simulateClusterRecoveryStates(indexName, false, true, projectId);
for (int i = 0; i < clusterStates.size(); i++) {
// make sure cluster health is YELLOW up until the final cluster state, which contains primary shard
// failed allocations that should make the cluster health RED
final ClusterState clusterState = clusterStates.get(i);
final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices, projectId);
if (i < clusterStates.size() - 1) {
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.YELLOW));
} else {
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.RED));
}
}
}
public void testClusterHealthOnClusterRecoveryWithPreviousAllocationIds() {
final String indexName = "test-idx";
final String[] indices = new String[] { indexName };
var projectId = randomUniqueProjectId();
final List<ClusterState> clusterStates = simulateClusterRecoveryStates(indexName, true, false, projectId);
for (int i = 0; i < clusterStates.size(); i++) {
// because there were previous allocation ids, we should be RED until the primaries are started,
// then move to YELLOW, and the last state should be GREEN when all shards have been started
final ClusterState clusterState = clusterStates.get(i);
final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices, projectId);
if (i < clusterStates.size() - 1) {
// if the inactive primaries are due solely to recovery (not failed allocation or previously being allocated),
// then cluster health is YELLOW, otherwise RED
if (primaryInactiveDueToRecovery(indexName, clusterState, projectId)) {
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.YELLOW));
} else {
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.RED));
}
} else {
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN));
}
}
}
public void testClusterHealthOnClusterRecoveryWithPreviousAllocationIdsAndAllocationFailures() {
final String indexName = "test-idx";
final String[] indices = new String[] { indexName };
var projectId = randomUniqueProjectId();
for (final ClusterState clusterState : simulateClusterRecoveryStates(indexName, true, true, projectId)) {
final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices, projectId);
// if the inactive primaries are due solely to recovery (not failed allocation or previously being allocated)
// then cluster health is YELLOW, otherwise RED
if (primaryInactiveDueToRecovery(indexName, clusterState, projectId)) {
assertThat("clusterState is:\n" + clusterState, health.getStatus(), equalTo(ClusterHealthStatus.YELLOW));
} else {
assertThat("clusterState is:\n" + clusterState, health.getStatus(), equalTo(ClusterHealthStatus.RED));
}
}
}
ClusterStateHealth maybeSerialize(ClusterStateHealth clusterStateHealth) throws IOException {
if (randomBoolean()) {
BytesStreamOutput out = new BytesStreamOutput();
clusterStateHealth.writeTo(out);
StreamInput in = out.bytes().streamInput();
clusterStateHealth = new ClusterStateHealth(in);
}
return clusterStateHealth;
}
private List<ClusterState> simulateIndexCreationStates(
final String indexName,
final boolean withPrimaryAllocationFailures,
ProjectId projectId
) {
final int numberOfShards = randomIntBetween(1, 5);
final int numberOfReplicas = randomIntBetween(1, numberOfShards);
// initial index creation and new routing table info
final IndexMetadata indexMetadata = IndexMetadata.builder(indexName)
.settings(settings(IndexVersion.current()).put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()))
.numberOfShards(numberOfShards)
.numberOfReplicas(numberOfReplicas)
.build();
final var mdBuilder = Metadata.builder().put(ProjectMetadata.builder(projectId).put(indexMetadata, true).build());
final var rtBuilder = GlobalRoutingTable.builder()
.put(projectId, RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY).addAsNew(indexMetadata));
final int nrOfProjects = randomIntBetween(0, 5);
for (int i = 0; i < nrOfProjects; i++) {
var id = randomUniqueProjectId();
mdBuilder.put(ProjectMetadata.builder(id).build());
rtBuilder.put(id, RoutingTable.EMPTY_ROUTING_TABLE);
}
ClusterState clusterState = ClusterState.builder(new ClusterName("test_cluster"))
.metadata(mdBuilder.build())
.routingTable(rtBuilder.build())
.build();
return generateClusterStates(clusterState, indexName, numberOfReplicas, withPrimaryAllocationFailures, projectId);
}
private List<ClusterState> simulateClusterRecoveryStates(
final String indexName,
final boolean withPreviousAllocationIds,
final boolean withPrimaryAllocationFailures,
final ProjectId projectId
) {
final int numberOfShards = randomIntBetween(1, 5);
final int numberOfReplicas = randomIntBetween(1, numberOfShards);
// initial index creation and new routing table info
IndexMetadata indexMetadata = IndexMetadata.builder(indexName)
.settings(settings(IndexVersion.current()).put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()))
.numberOfShards(numberOfShards)
.numberOfReplicas(numberOfReplicas)
.state(IndexMetadata.State.OPEN)
.build();
if (withPreviousAllocationIds) {
final IndexMetadata.Builder idxMetaWithAllocationIds = IndexMetadata.builder(indexMetadata);
boolean atLeastOne = false;
for (int i = 0; i < numberOfShards; i++) {
if (atLeastOne == false || randomBoolean()) {
idxMetaWithAllocationIds.putInSyncAllocationIds(i, Sets.newHashSet(UUIDs.randomBase64UUID()));
atLeastOne = true;
}
}
indexMetadata = idxMetaWithAllocationIds.build();
}
final var mdBuilder = Metadata.builder().put(ProjectMetadata.builder(projectId).put(indexMetadata, true).build());
final var rtBuilder = GlobalRoutingTable.builder()
.put(projectId, RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY).addAsNew(indexMetadata));
final int nrOfProjects = randomIntBetween(0, 5);
for (int i = 0; i < nrOfProjects; i++) {
var id = randomUniqueProjectId();
mdBuilder.put(ProjectMetadata.builder(id).build());
rtBuilder.put(id, RoutingTable.EMPTY_ROUTING_TABLE);
}
ClusterState clusterState = ClusterState.builder(new ClusterName("test_cluster"))
.metadata(mdBuilder.build())
.routingTable(rtBuilder.build())
.build();
return generateClusterStates(clusterState, indexName, numberOfReplicas, withPrimaryAllocationFailures, projectId);
}
private List<ClusterState> generateClusterStates(
final ClusterState originalClusterState,
final String indexName,
final int numberOfReplicas,
final boolean withPrimaryAllocationFailures,
ProjectId projectId
) {
// generate random node ids
final Set<String> nodeIds = new HashSet<>();
final int numNodes = randomIntBetween(numberOfReplicas + 1, 10);
for (int i = 0; i < numNodes; i++) {
nodeIds.add(randomAlphaOfLength(8));
}
final List<ClusterState> clusterStates = new ArrayList<>();
clusterStates.add(originalClusterState);
ClusterState clusterState = originalClusterState;
// initialize primaries
RoutingTable routingTable = originalClusterState.routingTable(projectId);
IndexRoutingTable indexRoutingTable = routingTable.index(indexName);
IndexRoutingTable.Builder newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex());
for (int shardId = 0; shardId < indexRoutingTable.size(); shardId++) {
IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId);
for (int copy = 0; copy < shardRoutingTable.size(); copy++) {
ShardRouting shardRouting = shardRoutingTable.shard(copy);
if (shardRouting.primary()) {
newIndexRoutingTable.addShard(shardRouting.initialize(randomFrom(nodeIds), null, shardRouting.getExpectedShardSize()));
} else {
newIndexRoutingTable.addShard(shardRouting);
}
}
}
routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build();
clusterState = ClusterState.builder(clusterState)
.routingTable(GlobalRoutingTable.builder(clusterState.globalRoutingTable()).put(projectId, routingTable).build())
.build();
clusterStates.add(clusterState);
// some primaries started
indexRoutingTable = routingTable.index(indexName);
newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex());
Map<Integer, Set<String>> allocationIds = new HashMap<>();
for (int shardId = 0; shardId < indexRoutingTable.size(); shardId++) {
IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId);
for (int copy = 0; copy < shardRoutingTable.size(); copy++) {
ShardRouting shardRouting = shardRoutingTable.shard(copy);
if (shardRouting.primary() && randomBoolean()) {
final ShardRouting newShardRouting = shardRouting.moveToStarted(UNAVAILABLE_EXPECTED_SHARD_SIZE);
allocationIds.put(newShardRouting.getId(), Set.of(newShardRouting.allocationId().getId()));
newIndexRoutingTable.addShard(newShardRouting);
} else {
newIndexRoutingTable.addShard(shardRouting);
}
}
}
routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build();
final IndexMetadata.Builder idxMetaBuilder = IndexMetadata.builder(clusterState.metadata().getProject(projectId).index(indexName));
allocationIds.forEach(idxMetaBuilder::putInSyncAllocationIds);
var projectBuilder = ProjectMetadata.builder(clusterState.metadata().getProject(projectId)).put(idxMetaBuilder);
clusterState = ClusterState.builder(clusterState)
.routingTable(GlobalRoutingTable.builder(clusterState.globalRoutingTable()).put(projectId, routingTable).build())
.putProjectMetadata(projectBuilder)
.build();
clusterStates.add(clusterState);
if (withPrimaryAllocationFailures) {
boolean alreadyFailedPrimary = false;
// some primaries failed to allocate
indexRoutingTable = routingTable.index(indexName);
newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex());
for (int shardId = 0; shardId < indexRoutingTable.size(); shardId++) {
IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId);
for (int copy = 0; copy < shardRoutingTable.size(); copy++) {
ShardRouting shardRouting = shardRoutingTable.shard(copy);
if (shardRouting.primary() && (shardRouting.started() == false || alreadyFailedPrimary == false)) {
newIndexRoutingTable.addShard(
shardRouting.moveToUnassigned(new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, "unlucky shard"))
);
alreadyFailedPrimary = true;
} else {
newIndexRoutingTable.addShard(shardRouting);
}
}
}
routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build();
clusterStates.add(
ClusterState.builder(clusterState)
.routingTable(GlobalRoutingTable.builder(clusterState.globalRoutingTable()).put(projectId, routingTable).build())
.build()
);
return clusterStates;
}
// all primaries started
indexRoutingTable = routingTable.index(indexName);
newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex());
allocationIds = new HashMap<>();
for (int shardId = 0; shardId < indexRoutingTable.size(); shardId++) {
IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId);
for (int copy = 0; copy < shardRoutingTable.size(); copy++) {
ShardRouting shardRouting = shardRoutingTable.shard(copy);
if (shardRouting.primary() && shardRouting.started() == false) {
final ShardRouting newShardRouting = shardRouting.moveToStarted(UNAVAILABLE_EXPECTED_SHARD_SIZE);
allocationIds.put(newShardRouting.getId(), Set.of(newShardRouting.allocationId().getId()));
newIndexRoutingTable.addShard(newShardRouting);
} else {
newIndexRoutingTable.addShard(shardRouting);
}
}
}
routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build();
final IndexMetadata.Builder idxMetaBuilder2 = IndexMetadata.builder(clusterState.metadata().getProject(projectId).index(indexName));
allocationIds.forEach(idxMetaBuilder2::putInSyncAllocationIds);
projectBuilder = ProjectMetadata.builder(clusterState.metadata().getProject(projectId)).put(idxMetaBuilder2);
clusterState = ClusterState.builder(clusterState)
.routingTable(GlobalRoutingTable.builder(clusterState.globalRoutingTable()).put(projectId, routingTable).build())
.putProjectMetadata(projectBuilder)
.build();
clusterStates.add(clusterState);
// initialize replicas
indexRoutingTable = routingTable.index(indexName);
newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex());
for (int shardId = 0; shardId < indexRoutingTable.size(); shardId++) {
IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId);
final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId();
Set<String> allocatedNodes = new HashSet<>();
allocatedNodes.add(primaryNodeId);
for (int copy = 0; copy < shardRoutingTable.size(); copy++) {
ShardRouting shardRouting = shardRoutingTable.shard(copy);
if (shardRouting.primary() == false) {
// give the replica a different node id than the primary
String replicaNodeId = randomFrom(Sets.difference(nodeIds, allocatedNodes));
newIndexRoutingTable.addShard(shardRouting.initialize(replicaNodeId, null, shardRouting.getExpectedShardSize()));
allocatedNodes.add(replicaNodeId);
} else {
newIndexRoutingTable.addShard(shardRouting);
}
}
}
routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build();
clusterStates.add(
ClusterState.builder(clusterState)
.routingTable(GlobalRoutingTable.builder(clusterState.globalRoutingTable()).put(projectId, routingTable).build())
.build()
);
// some replicas started
indexRoutingTable = routingTable.index(indexName);
newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex());
for (int shardId = 0; shardId < indexRoutingTable.size(); shardId++) {
IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId);
for (int copy = 0; copy < shardRoutingTable.size(); copy++) {
ShardRouting shardRouting = shardRoutingTable.shard(copy);
if (shardRouting.primary() == false && randomBoolean()) {
newIndexRoutingTable.addShard(shardRouting.moveToStarted(UNAVAILABLE_EXPECTED_SHARD_SIZE));
} else {
newIndexRoutingTable.addShard(shardRouting);
}
}
}
routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build();
clusterStates.add(
ClusterState.builder(clusterState)
.routingTable(GlobalRoutingTable.builder(clusterState.globalRoutingTable()).put(projectId, routingTable).build())
.build()
);
// all replicas started
boolean replicaStateChanged = false;
indexRoutingTable = routingTable.index(indexName);
newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex());
for (int shardId = 0; shardId < indexRoutingTable.size(); shardId++) {
IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId);
for (int copy = 0; copy < shardRoutingTable.size(); copy++) {
ShardRouting shardRouting = shardRoutingTable.shard(copy);
if (shardRouting.primary() == false && shardRouting.started() == false) {
newIndexRoutingTable.addShard(shardRouting.moveToStarted(UNAVAILABLE_EXPECTED_SHARD_SIZE));
replicaStateChanged = true;
} else {
newIndexRoutingTable.addShard(shardRouting);
}
}
}
// all of the replicas may have moved to started in the previous phase already
if (replicaStateChanged) {
routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build();
clusterStates.add(
ClusterState.builder(clusterState)
.routingTable(GlobalRoutingTable.builder(clusterState.globalRoutingTable()).put(projectId, routingTable).build())
.build()
);
}
return clusterStates;
}
// returns true if the inactive primaries in the index are only due to cluster recovery
// (not because of allocation of existing shard or previously having allocation ids assigned)
private boolean primaryInactiveDueToRecovery(final String indexName, final ClusterState clusterState, final ProjectId projectId) {
final IndexRoutingTable indexRoutingTable = clusterState.routingTable(projectId).index(indexName);
for (int i = 0; i < indexRoutingTable.size(); i++) {
IndexShardRoutingTable shardRouting = indexRoutingTable.shard(i);
final ShardRouting primaryShard = shardRouting.primaryShard();
if (primaryShard.active() == false) {
if (clusterState.metadata()
.getProject(projectId)
.index(indexName)
.inSyncAllocationIds(shardRouting.shardId().id())
.isEmpty() == false) {
return false;
}
if (primaryShard.recoverySource() != null
&& primaryShard.recoverySource().getType() == RecoverySource.Type.EXISTING_STORE) {
return false;
}
if (primaryShard.unassignedInfo().failedAllocations() > 0) {
return false;
}
if (primaryShard.unassignedInfo().lastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO) {
return false;
}
}
}
return true;
}
/**
* Tests the case where indices exist in metadata but their routing tables are missing.
* This happens during cluster restart where metadata is loaded but routing table is not yet built.
* All shards should be considered completely unassigned and the cluster should be RED.
*/
public void testActiveShardsPercentDuringClusterRestart() {
final String indexName = "test-idx";
ProjectId projectId = randomUniqueProjectId();
final IndexMetadata indexMetadata = IndexMetadata.builder(indexName)
.settings(settings(IndexVersion.current()).put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()))
.numberOfShards(3)
.numberOfReplicas(1)
.build();
// Create cluster state with index metadata but WITHOUT routing table entry
// This simulates cluster restart where metadata is loaded but routing table is not yet built
final var mdBuilder = Metadata.builder().put(ProjectMetadata.builder(projectId).put(indexMetadata, true).build());
final var rtBuilder = GlobalRoutingTable.builder().put(projectId, RoutingTable.EMPTY_ROUTING_TABLE);
ClusterState clusterState = ClusterState.builder(new ClusterName("test_cluster"))
.metadata(mdBuilder.build())
.routingTable(rtBuilder.build())
.blocks(
ClusterBlocks.builder()
.addGlobalBlock(new ClusterBlock(1, "test", true, true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL))
)
.build();
String[] concreteIndices = new String[] { indexName };
ClusterStateHealth clusterStateHealth = new ClusterStateHealth(clusterState, concreteIndices, projectId);
// The cluster should be RED because all shards are unassigned
assertThat(clusterStateHealth.getStatus(), equalTo(ClusterHealthStatus.RED));
// All shards are unassigned, so activeShardsPercent should be 0.0
assertThat(
"activeShardsPercent should be 0.0 when all shards are unassigned",
clusterStateHealth.getActiveShardsPercent(),
equalTo(0.0)
);
// Verify that totalShardCount is correctly calculated
int expectedTotalShards = indexMetadata.getTotalNumberOfShards();
assertThat("All shards should be counted as unassigned", clusterStateHealth.getUnassignedShards(), equalTo(expectedTotalShards));
// All primary shards should be unassigned
assertThat(
"All primary shards should be unassigned",
clusterStateHealth.getUnassignedPrimaryShards(),
equalTo(indexMetadata.getNumberOfShards())
);
// No active shards
assertThat(clusterStateHealth.getActiveShards(), equalTo(0));
assertThat(clusterStateHealth.getActivePrimaryShards(), equalTo(0));
}
private void assertClusterHealth(ClusterStateHealth clusterStateHealth, RoutingTableGenerator.ShardCounter counter) {
assertThat(clusterStateHealth.getStatus(), equalTo(counter.status()));
assertThat(clusterStateHealth.getActiveShards(), equalTo(counter.active));
assertThat(clusterStateHealth.getActivePrimaryShards(), equalTo(counter.primaryActive));
assertThat(clusterStateHealth.getInitializingShards(), equalTo(counter.initializing));
assertThat(clusterStateHealth.getRelocatingShards(), equalTo(counter.relocating));
assertThat(clusterStateHealth.getUnassignedShards(), equalTo(counter.unassigned));
assertThat(clusterStateHealth.getUnassignedPrimaryShards(), equalTo(counter.unassignedPrimary));
assertThat(clusterStateHealth.getActiveShardsPercent(), is(allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(100.0))));
}
}
| ClusterStateHealthTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/InterfaceWithOnlyStaticsTest.java | {
"start": 2296,
"end": 2537
} | interface ____ {
public static final int foo = 42;
}
""")
.doTest();
}
@Test
public void negative_extends() {
testHelper
.addSourceLines(
"A.java", //
" | Test |
java | elastic__elasticsearch | x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java | {
"start": 2405,
"end": 7451
} | class ____ extends AbstractSearchableSnapshotsTestCase {
private static final ShardId SHARD_ID = new ShardId(new Index("_index_name", "_index_id"), 0);
public void testRandomReads() throws IOException {
final String fileName = randomAlphaOfLength(5) + randomFileExtension();
final Tuple<String, byte[]> bytes = randomChecksumBytes(randomIntBetween(1, 100_000));
final byte[] fileData = bytes.v2();
final String checksum = bytes.v1();
final FileInfo fileInfo = new FileInfo(
randomAlphaOfLength(10),
new StoreFileMetadata(fileName, fileData.length, checksum, IndexVersion.current().luceneVersion().toString()),
ByteSizeValue.ofBytes(fileData.length)
);
final ByteSizeValue rangeSize;
if (rarely()) {
rangeSize = SharedBlobCacheService.SHARED_CACHE_RANGE_SIZE_SETTING.get(Settings.EMPTY);
} else if (randomBoolean()) {
rangeSize = ByteSizeValue.ofBytes(randomIntBetween(1, 16) * SharedBytes.PAGE_SIZE);
} else {
rangeSize = ByteSizeValue.ofBytes(randomIntBetween(1, 16000) * SharedBytes.PAGE_SIZE);
}
final ByteSizeValue regionSize;
if (rarely()) {
regionSize = SharedBlobCacheService.SHARED_CACHE_REGION_SIZE_SETTING.get(Settings.EMPTY);
} else {
regionSize = ByteSizeValue.ofBytes(randomIntBetween(1, 16) * SharedBytes.PAGE_SIZE);
}
final ByteSizeValue cacheSize;
if (rarely()) {
cacheSize = regionSize;
} else {
cacheSize = ByteSizeValue.ofBytes(randomLongBetween(1L, 10L) * regionSize.getBytes() + randomIntBetween(0, 100));
}
final Settings settings = Settings.builder()
.put(SharedBlobCacheService.SHARED_CACHE_REGION_SIZE_SETTING.getKey(), regionSize)
.put(SharedBlobCacheService.SHARED_CACHE_RANGE_SIZE_SETTING.getKey(), rangeSize)
.put(SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(), cacheSize)
// don't test mmap on Windows since we don't have code to unmap the shared cache file which trips assertions after tests
.put(SharedBlobCacheService.SHARED_CACHE_MMAP.getKey(), WINDOWS == false && randomBoolean())
.put(SharedBlobCacheService.SHARED_CACHE_COUNT_READS.getKey(), randomBoolean())
.put("path.home", createTempDir())
.build();
final Environment environment = TestEnvironment.newEnvironment(settings);
for (Path path : environment.dataDirs()) {
Files.createDirectories(path);
}
SnapshotId snapshotId = new SnapshotId("_name", "_uuid");
final Path shardDir = randomShardPath(SHARD_ID);
final ShardPath shardPath = new ShardPath(false, shardDir, shardDir, SHARD_ID);
final Path cacheDir = Files.createDirectories(resolveSnapshotCache(shardDir).resolve(snapshotId.getUUID()));
try (
NodeEnvironment nodeEnvironment = new NodeEnvironment(settings, environment);
SharedBlobCacheService<CacheKey> sharedBlobCacheService = new SharedBlobCacheService<>(
nodeEnvironment,
settings,
threadPool,
threadPool.executor(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME),
BlobCacheMetrics.NOOP
);
CacheService cacheService = randomCacheService();
TestSearchableSnapshotDirectory directory = new TestSearchableSnapshotDirectory(
sharedBlobCacheService,
cacheService,
fileInfo,
snapshotId,
fileData,
shardPath,
cacheDir
)
) {
cacheService.start();
directory.loadSnapshot(createRecoveryState(true), () -> false, ActionListener.noop());
// TODO does not test using the recovery range size
final IndexInput indexInput = directory.openInput(fileName, randomIOContext());
assertThat(indexInput, instanceOf(FrozenIndexInput.class));
assertEquals(fileData.length, indexInput.length());
assertEquals(0, indexInput.getFilePointer());
final byte[] result = randomReadAndSlice(indexInput, fileData.length);
assertArrayEquals(fileData, result);
// validate clone copies cache file object
indexInput.seek(randomLongBetween(0, fileData.length - 1));
final IndexInput indexInputClone = indexInput.clone();
if (indexInputClone instanceof FrozenIndexInput clone) {
assertThat(clone.cacheFile(), not(equalTo(((FrozenIndexInput) indexInput).cacheFile())));
assertThat(clone.getFilePointer(), equalTo(indexInput.getFilePointer()));
} else {
assertThat(indexInputClone, isA(ByteArrayIndexInput.class));
}
indexInput.close();
}
}
private | FrozenIndexInputTests |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/serde/RexNodeJsonSerdeTest.java | {
"start": 10196,
"end": 14743
} | class ____ not be stateful"));
}
@Test
public void testSystemFunction() throws Throwable {
final SerdeContext serdeContext = contradictingSerdeContext();
final ThrowingCallable callable =
() ->
testJsonRoundTrip(
serdeContext,
createFunctionCall(
serdeContext,
ContextResolvedFunction.permanent(
FUNCTION_SYS_ID, NON_SER_UDF_IMPL)),
RexNode.class);
// Missing function
assertThatThrownBy(callable)
.satisfies(
anyCauseMatches(
TableException.class,
"Could not lookup system function '" + FUNCTION_NAME + "'."));
// Module provided permanent function
serdeContext
.getFlinkContext()
.getModuleManager()
.loadModule("myModule", FunctionProvidingModule.INSTANCE);
callable.call();
}
@Test
public void testTemporarySystemFunction() throws Throwable {
final SerdeContext serdeContext = contradictingSerdeContext();
final ThrowingCallable callable =
() ->
testJsonRoundTrip(
serdeContext,
createFunctionCall(
serdeContext,
ContextResolvedFunction.temporary(
FUNCTION_SYS_ID, NON_SER_UDF_IMPL)),
RexNode.class);
// Missing function
assertThatThrownBy(callable)
.satisfies(
anyCauseMatches(
TableException.class,
"Could not lookup system function '" + FUNCTION_NAME + "'."));
// Registered temporary system function
registerTemporarySystemFunction(serdeContext);
callable.call();
}
@Test
public void testTemporaryCatalogFunction() throws Throwable {
final SerdeContext serdeContext = contradictingSerdeContext();
final ThrowingCallable callable =
() ->
testJsonRoundTrip(
serdeContext,
createFunctionCall(
serdeContext,
ContextResolvedFunction.temporary(
FUNCTION_CAT_ID, NON_SER_FUNCTION_DEF_IMPL)),
RexNode.class);
// Missing function
assertThatThrownBy(callable)
.satisfies(
anyCauseMatches(
TableException.class,
"The persisted plan does not include all required "
+ "catalog metadata for function '"
+ FUNCTION_CAT_ID.asSummaryString()
+ "'."));
// Registered temporary function
registerTemporaryFunction(serdeContext);
callable.call();
}
@Test
public void testUnsupportedLegacyFunction() {
final SerdeContext serdeContext = contradictingSerdeContext();
assertThatThrownBy(
() ->
testJsonRoundTrip(
createFunctionCall(
serdeContext,
UserDefinedFunctionUtils.createScalarSqlFunction(
FUNCTION_SYS_ID,
FUNCTION_SYS_ID.toString(),
SER_UDF_IMPL,
FACTORY)),
RexNode.class))
.satisfies(
anyCauseMatches(
TableException.class,
"Functions of the deprecated function stack are not supported."));
}
@Nested
@DisplayName("Test CatalogPlanCompilation == IDENTIFIER")
| must |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/ParallelFluxTest.java | {
"start": 2123,
"end": 35735
} | class ____ {
@RegisterExtension
public AutoDisposingExtension afterTest = new AutoDisposingExtension();
@Test
@Tag("slow")
public void sequentialMode() {
Flux<Integer> source = Flux.range(1, 1_000_000)
.hide();
for (int i = 1; i < 33; i++) {
Flux<Integer> result = ParallelFlux.from(source, i)
.map(v -> v + 1)
.sequential();
AssertSubscriber<Integer> ts = AssertSubscriber.create();
result.subscribe(ts);
ts.assertSubscribed()
.assertValueCount(1_000_000)
.assertComplete()
.assertNoError();
}
}
@Test
@Tag("slow")
public void sequentialModeFused() {
Flux<Integer> source = Flux.range(1, 1_000_000);
for (int i = 1; i < 33; i++) {
Flux<Integer> result = ParallelFlux.from(source, i)
.map(v -> v + 1)
.sequential();
AssertSubscriber<Integer> ts = AssertSubscriber.create();
result.subscribe(ts);
ts.assertSubscribed()
.assertValueCount(1_000_000)
.assertComplete()
.assertNoError();
}
}
@Test
@Tag("slow")
public void parallelMode() {
Flux<Integer> source = Flux.range(1, 1_000_000)
.hide();
int ncpu = Math.max(8,
Runtime.getRuntime()
.availableProcessors());
for (int i = 1; i < ncpu + 1; i++) {
Scheduler scheduler = Schedulers.newParallel("test", i);
try {
Flux<Integer> result = ParallelFlux.from(source, i)
.runOn(scheduler)
.map(v -> v + 1)
.sequential();
AssertSubscriber<Integer> ts = AssertSubscriber.create();
result.subscribe(ts);
ts.await(Duration.ofSeconds(10));
ts.assertSubscribed()
.assertValueCount(1_000_000)
.assertComplete()
.assertNoError();
}
finally {
scheduler.dispose();
}
}
}
@Test
@Tag("slow")
public void parallelModeFused() {
Flux<Integer> source = Flux.range(1, 1_000_000);
int ncpu = Math.max(8,
Runtime.getRuntime()
.availableProcessors());
for (int i = 1; i < ncpu + 1; i++) {
Scheduler scheduler = Schedulers.newParallel("test", i);
try {
Flux<Integer> result = ParallelFlux.from(source, i)
.runOn(scheduler)
.map(v -> v + 1)
.sequential();
AssertSubscriber<Integer> ts = AssertSubscriber.create();
result.subscribe(ts);
ts.await(Duration.ofSeconds(10));
ts.assertSubscribed()
.assertValueCount(1_000_000)
.assertComplete()
.assertNoError();
}
finally {
scheduler.dispose();
}
}
}
@Test
public void collectSortedList() {
AssertSubscriber<List<Integer>> ts = AssertSubscriber.create();
Flux.just(10, 9, 8, 7, 6, 5, 4, 3, 2, 1)
.parallel()
.collectSortedList(Comparator.naturalOrder())
.subscribe(ts);
ts.assertValues(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10));
}
@Test
public void sorted() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(0);
Flux.just(10, 9, 8, 7, 6, 5, 4, 3, 2, 1)
.parallel()
.sorted(Comparator.naturalOrder())
.subscribe(ts);
ts.assertNoValues();
ts.request(2);
ts.assertValues(1, 2);
ts.request(5);
ts.assertValues(1, 2, 3, 4, 5, 6, 7);
ts.request(3);
ts.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
}
@Test
public void groupMerge() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 10)
.parallel()
.groups()
.flatMap(v -> v)
.subscribe(ts);
ts.assertContainValues(new HashSet<>(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)))
.assertNoError()
.assertComplete();
}
@Test
public void from() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
ParallelFlux.from(Flux.range(1, 5), Flux.range(6, 5))
.sequential()
.subscribe(ts);
ts.assertContainValues(new HashSet<>(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)))
.assertNoError()
.assertComplete();
}
@Test
public void concatMapUnordered() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 5)
.parallel()
.concatMap(v -> Flux.range(v * 10 + 1, 3))
.sequential()
.subscribe(ts);
ts.assertValues(11, 12, 13, 21, 22, 23, 31, 32, 33, 41, 42, 43, 51, 52, 53)
.assertNoError()
.assertComplete();
}
@Test
public void flatMapUnordered() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 5)
.parallel()
.flatMap(v -> Flux.range(v * 10 + 1, 3))
.sequential()
.subscribe(ts);
ts.assertValues(11, 12, 13, 21, 22, 23, 31, 32, 33, 41, 42, 43, 51, 52, 53)
.assertNoError()
.assertComplete();
}
@Test
public void testDoOnEachSignal() throws InterruptedException {
List<Signal<Integer>> signals = Collections.synchronizedList(new ArrayList<>(4));
List<Integer> values = Collections.synchronizedList(new ArrayList<>(2));
ParallelFlux<Integer> flux = Flux.just(1, 2)
.parallel(3)
.doOnEach(signals::add)
.doOnEach(s -> {
if (s.isOnNext()) values.add(s.get());
});
//we use a lambda subscriber and latch to avoid using `sequential`
CountDownLatch latch = new CountDownLatch(2);
flux.subscribe(v -> {
}, e -> latch.countDown(), latch::countDown);
assertThat(latch.await(2, TimeUnit.SECONDS)).isTrue();
assertThat(signals.size()).isEqualTo(5);
assertThat(signals.get(0).get())
.as("first onNext signal isn't first value")
.isEqualTo(1);
assertThat(signals.get(1).get())
.as("second onNext signal isn't last value")
.isEqualTo(2);
assertThat(signals.get(2)
.isOnComplete()).as("onComplete for rail 1 expected").isTrue();
assertThat(signals.get(3)
.isOnComplete()).as("onComplete for rail 2 expected").isTrue();
assertThat(signals.get(4)
.isOnComplete()).as("onComplete for rail 3 expected").isTrue();
assertThat(values.get(0)).as("1st onNext value unexpected").isEqualTo(1);
assertThat(values.get(1)).as("2nd onNext value unexpected").isEqualTo(2);
}
@Test
public void testDoOnEachSignalWithError() throws InterruptedException {
List<Signal<Integer>> signals = Collections.synchronizedList(new ArrayList<>(4));
ParallelFlux<Integer> flux = Flux.<Integer>error(new IllegalArgumentException("boom")).parallel(2)
.runOn(Schedulers.parallel())
.doOnEach(signals::add);
//we use a lambda subscriber and latch to avoid using `sequential`
CountDownLatch latch = new CountDownLatch(2);
flux.subscribe(v -> {
}, e -> latch.countDown(), latch::countDown);
assertThat(latch.await(2, TimeUnit.SECONDS)).isTrue();
assertThat(signals).hasSize(2);
assertThat(signals.get(0)
.isOnError()).as("rail 1 onError expected").isTrue();
assertThat(signals.get(1)
.isOnError()).as("rail 2 onError expected").isTrue();
assertThat(signals.get(0).getThrowable()).as("plain exception rail 1 expected")
.hasMessage("boom");
assertThat(signals.get(1).getThrowable()).as("plain exception rail 2 expected")
.hasMessage("boom");
}
@Test
public void testDoOnEachSignalNullConsumer() {
assertThatExceptionOfType(NullPointerException.class).isThrownBy(() -> {
Flux.just(1)
.parallel()
.doOnEach(null);
});
}
@Test
public void testDoOnEachSignalToSubscriber() {
AssertSubscriber<Integer> peekSubscriber = AssertSubscriber.create();
ParallelFlux<Integer> flux = Flux.just(1, 2)
.parallel(3)
.doOnEach(s -> s.accept(peekSubscriber));
flux.subscribe();
peekSubscriber.assertNotSubscribed();
peekSubscriber.assertValues(1, 2);
Assertions.assertThatExceptionOfType(AssertionError.class)
.isThrownBy(peekSubscriber::assertComplete)
.withMessage("Multiple completions: 3");
}
@Test
public void transformGroups() {
Set<Integer> values = new ConcurrentSkipListSet<>();
Flux<Integer> flux = Flux.range(1, 10)
.parallel(3)
.runOn(Schedulers.parallel())
.doOnNext(values::add)
.transformGroups(p -> p.log("rail" + p.key())
.map(i -> (p.key() + 1) * 100 + i))
.sequential();
StepVerifier.create(flux.sort())
.assertNext(i -> assertThat(i - 100)
.isBetween(1, 10))
.thenConsumeWhile(i -> i / 100 == 1)
.assertNext(i -> assertThat(i - 200)
.isBetween(1, 10))
.thenConsumeWhile(i -> i / 100 == 2)
.assertNext(i -> assertThat(i - 300)
.isBetween(1, 10))
.thenConsumeWhile(i -> i / 100 == 3)
.verifyComplete();
assertThat(values)
.hasSize(10)
.contains(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
}
@Test
public void transformGroupsMaintainsParallelismAndPrefetch() {
ParallelFlux<Integer> parallelFlux = Flux.range(1, 10)
.parallel(3)
.runOn(Schedulers.parallel(), 123);
ParallelFlux<Integer> composed = parallelFlux.transformGroups(rail -> rail.map(i -> i + 2));
assertThat(composed.parallelism())
.as("maintains parallelism")
.isEqualTo(parallelFlux.parallelism())
.isEqualTo(3);
assertThat(composed.getPrefetch())
.as("maintains prefetch")
.isEqualTo(parallelFlux.getPrefetch())
.isEqualTo(123);
}
@Test
public void transformGroupsMaintainsParallelism() {
ParallelFlux<Integer> parallelFlux = Flux.range(1, 10)
.parallel(3)
.map(i -> i + 2);
ParallelFlux<Integer> composed = parallelFlux.transformGroups(rail -> rail.map(i -> i + 2));
assertThat(composed.parallelism())
.as("maintains parallelism")
.isEqualTo(parallelFlux.parallelism())
.isEqualTo(3);
assertThat(parallelFlux.getPrefetch())
.as("parallel source no prefetch")
.isEqualTo(-1);
assertThat(composed.getPrefetch())
.as("reset prefetch to default")
.isNotEqualTo(parallelFlux.getPrefetch())
.isEqualTo(Queues.SMALL_BUFFER_SIZE);
}
@Test
public void fromSourceHasCpuParallelism() {
int cpus = Runtime.getRuntime()
.availableProcessors();
ParallelFlux<Integer> parallelFlux = ParallelFlux.from(Flux.range(1, 10));
assertThat(parallelFlux.parallelism())
.isEqualTo(cpus);
}
@Test
public void fromZeroParallelismRejected() {
Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> ParallelFlux.from(Mono.just(1), 0))
.withMessage("parallelism > 0 required but it was 0");
}
@Test
public void fromNegativeParallelismRejected() {
Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> ParallelFlux.from(Mono.just(1), -1))
.withMessage("parallelism > 0 required but it was -1");
}
@Test
public void fromZeroPrefetchRejected() {
Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> ParallelFlux.from(Mono.just(1), 1, 0, Queues.small()))
.withMessage("prefetch > 0 required but it was 0");
}
@Test
public void fromNegativePrefetchRejected() {
Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> ParallelFlux.from(Mono.just(1), 1, -1, Queues.small()))
.withMessage("prefetch > 0 required but it was -1");
}
@Test
public void fromZeroPublishersRejected() {
Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> ParallelFlux.<Integer>from())
.withMessage("Zero publishers not supported");
}
@Test
@SuppressWarnings("unchecked")
public void fromZeroLengthArrayPublishersRejected() {
Publisher<Integer>[] array = new Publisher[0];
Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> ParallelFlux.from(array))
.withMessage("Zero publishers not supported");
}
@Test
public void fromNullPublisherRejected() {
Assertions.assertThatExceptionOfType(NullPointerException.class)
.isThrownBy(() -> ParallelFlux.from((Publisher<?>) null))
.withMessage("source");
}
@Test
@SuppressWarnings("unchecked")
public void fromNullPublisherArrayRejected() {
Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> ParallelFlux.from((Publisher[]) null))
.withMessage("Zero publishers not supported");
}
@Test
public void fromFuseableUsesThreadBarrier() {
final Set<String> between = new HashSet<>();
final ConcurrentHashMap<String, String> processing = new ConcurrentHashMap<>();
Flux<Integer> test = Flux.range(1, 10)
.publishOn(Schedulers.single(), false, 1)
.doOnNext(v -> between.add(Thread.currentThread()
.getName()))
.parallel(2, 1)
.runOn(Schedulers.boundedElastic(), 1)
.map(v -> {
processing.putIfAbsent(Thread.currentThread()
.getName(), "");
return v;
})
.sequential();
StepVerifier.create(test)
.expectSubscription()
.recordWith(() -> Collections.synchronizedList(new ArrayList<>(10)))
.expectNextCount(10)
.consumeRecordedWith(r -> assertThat(r).containsExactlyInAnyOrder(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))
.expectComplete()
.verify(Duration.ofSeconds(5));
assertThat(between).hasSize(1);
assertThat(between).first()
.asString()
.startsWith("single-");
assertThat(processing.keySet())
.allSatisfy(k -> assertThat(k).containsIgnoringCase("boundedElastic-"));
}
@Test
public void runOnZeroPrefetchRejected() {
ParallelFlux<Integer> validSoFar = ParallelFlux.from(Mono.just(1));
Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> validSoFar.runOn(Schedulers.parallel(), 0))
.withMessage("prefetch > 0 required but it was 0");
}
@Test
public void runOnNegativePrefetchRejected() {
ParallelFlux<Integer> validSoFar = ParallelFlux.from(Mono.just(1));
Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> validSoFar.runOn(Schedulers.parallel(), -1))
.withMessage("prefetch > 0 required but it was -1");
}
@Test
public void sequentialZeroPrefetchRejected() {
ParallelFlux<Integer> validSoFar = ParallelFlux.from(Mono.just(1));
Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> validSoFar.sequential(0))
.withMessage("prefetch > 0 required but it was 0");
}
@Test
public void sequentialNegativePrefetchRejected() {
ParallelFlux<Integer> validSoFar = ParallelFlux.from(Mono.just(1));
Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> validSoFar.sequential(-1))
.withMessage("prefetch > 0 required but it was -1");
}
@Test
public void subscribeOnNextOnErrorErrorsOnAllRails() {
LongAdder valueAdder = new LongAdder();
LongAdder errorAdder = new LongAdder();
Flux.range(1, 3)
.concatWith(Mono.error(new IllegalStateException("boom")))
.parallel(2)
.subscribe(v -> valueAdder.increment(), e -> errorAdder.increment());
assertThat(valueAdder.intValue()).isEqualTo(3);
assertThat(errorAdder.intValue()).isEqualTo(2);
}
@Test
public void validateTooFewSubscribers() {
validateSubscribers(2);
}
@Test
public void validateTooManySubscribers() {
validateSubscribers(4);
}
@SuppressWarnings("unchecked")
private void validateSubscribers(int size) {
List<Throwable> errors = Collections.synchronizedList(new ArrayList<>(size));
Subscriber<Integer>[] subs = new Subscriber[size];
for (int i = 0; i < subs.length; i++) {
subs[i] = new BaseSubscriber<Integer>() {
@Override
protected void hookOnSubscribe(Subscription subscription) { requestUnbounded(); }
@Override
protected void hookOnNext(Integer value) { }
@Override
protected void hookOnError(Throwable throwable) {
errors.add(throwable);
}
};
}
Flux.range(1, 3)
.parallel(3)
.validate(subs);
assertThat(errors)
.hasSize(size)
.allSatisfy(e -> assertThat(e).hasMessage("parallelism = 3, subscribers = " + size));
}
@Test
public void fromPublishersDefaultPrefetchIsMinusOne() {
assertThat(ParallelFlux.from(Flux.range(1, 5), Flux.range(5, 5))
.getPrefetch()).isEqualTo(-1);
}
@Test
public void fromPublisherDefaultPrefetchIsSmallBufferSize() {
assertThat(ParallelFlux.from(Flux.range(1, 5))
.getPrefetch()).isEqualTo(Queues.SMALL_BUFFER_SIZE);
}
@Test
public void fromPublishersSequentialSubscribe() {
List<Integer> values = Collections.synchronizedList(new ArrayList<>(10));
ParallelFlux.from(Flux.range(1, 3), Flux.range(4, 3))
.runOn(Schedulers.parallel())
.doOnNext(values::add)
.sequential()
.blockLast();
assertThat(values)
.hasSize(6)
.containsExactlyInAnyOrder(1, 2, 3, 4, 5, 6);
}
@Test
public void asChangesParallelism() {
assertThat(ParallelFlux.from(Flux.range(1, 10), 3)
.as(pf -> ParallelFlux.from(pf.sequential(), 5)
.log("secondParallel"))
.parallelism())
.isEqualTo(5);
}
@Test
public void transformChangesPrefetch() {
assertThat(ParallelFlux.from(Flux.range(1, 10), 3, 12, Queues.small())
.transform(pf -> pf.runOn(Schedulers.parallel(), 3)
.log()
.hide())
.getPrefetch())
.isEqualTo(3);
}
@Test
public void testPeekComplete() {
List<Signal> signals = Collections.synchronizedList(new ArrayList<>());
LongAdder subscribeCount = new LongAdder();
LongAdder valueCount = new LongAdder();
LongAdder requestCount = new LongAdder();
LongAdder completeCount = new LongAdder();
LongAdder cancelCount = new LongAdder();
LongAdder errorCount = new LongAdder();
LongAdder terminateCount = new LongAdder();
LongAdder afterTerminateCount = new LongAdder();
ParallelFlux.from(Flux.range(1, 10), 2)
.doOnEach(signals::add)
.doOnSubscribe(s -> subscribeCount.increment())
.doOnNext(v -> valueCount.increment())
.doOnRequest(r -> requestCount.increment())
.doOnComplete(completeCount::increment)
.doOnCancel(cancelCount::increment)
.doOnError(e -> errorCount.increment())
.doOnTerminate(terminateCount::increment)
.doAfterTerminate(afterTerminateCount::increment)
.subscribe(v -> {});
assertThat(signals).as("signals").hasSize(10 + 2); //2x5 onNext, 2x1 onComplete
assertThat(subscribeCount.longValue()).as("subscribe").isEqualTo(2); //1 per rail
assertThat(valueCount.longValue()).as("values").isEqualTo(10);
assertThat(requestCount.longValue()).as("request").isEqualTo(2); //1 per rail
assertThat(completeCount.longValue()).as("complete").isEqualTo(2); //1 per rail
assertThat(cancelCount.longValue()).as("cancel").isEqualTo(0);
assertThat(errorCount.longValue()).as("errors").isEqualTo(0);
assertThat(terminateCount.longValue()).as("terminate").isEqualTo(2); //1 per rail
assertThat(afterTerminateCount.longValue()).as("afterTerminate").isEqualTo(2); //1 per rail
}
@Test
public void testPeekError() {
List<Signal> signals = Collections.synchronizedList(new ArrayList<>());
LongAdder subscribeCount = new LongAdder();
LongAdder valueCount = new LongAdder();
LongAdder requestCount = new LongAdder();
LongAdder completeCount = new LongAdder();
LongAdder cancelCount = new LongAdder();
LongAdder errorCount = new LongAdder();
LongAdder terminateCount = new LongAdder();
LongAdder afterTerminateCount = new LongAdder();
ParallelFlux.from(Flux.range(1, 4).concatWith(Mono.error(new IllegalStateException("boom"))), 2)
.doOnEach(signals::add)
.doOnSubscribe(s -> subscribeCount.increment())
.doOnNext(v -> valueCount.increment())
.doOnRequest(r -> requestCount.increment())
.doOnComplete(completeCount::increment)
.doOnCancel(cancelCount::increment)
.doOnError(e -> errorCount.increment())
.doOnTerminate(terminateCount::increment)
.doAfterTerminate(afterTerminateCount::increment)
.subscribe(v -> {}, e -> {}); //error callback so that afterTerminate isn't swallowed
assertThat(signals).as("signals").hasSize(4 + 2); //2x2 onNext, 2x1 onError
assertThat(subscribeCount.longValue()).as("subscribe").isEqualTo(2); //1 per rail
assertThat(valueCount.longValue()).as("values").isEqualTo(4);
assertThat(requestCount.longValue()).as("request").isEqualTo(2); //1 per rail
assertThat(completeCount.longValue()).as("complete").isEqualTo(0);
assertThat(cancelCount.longValue()).as("cancel").isEqualTo(0);
assertThat(errorCount.longValue()).as("errors").isEqualTo(2);
assertThat(terminateCount.longValue()).as("terminate").isEqualTo(2); //1 per rail
assertThat(afterTerminateCount.longValue()).as("afterTerminate").isEqualTo(2); //1 per rail
}
@Test
public void testPeekCancel() {
List<Signal> signals = Collections.synchronizedList(new ArrayList<>());
LongAdder subscribeCount = new LongAdder();
LongAdder valueCount = new LongAdder();
LongAdder requestCount = new LongAdder();
LongAdder completeCount = new LongAdder();
LongAdder cancelCount = new LongAdder();
LongAdder errorCount = new LongAdder();
LongAdder terminateCount = new LongAdder();
LongAdder afterTerminateCount = new LongAdder();
ParallelFlux.from(Flux.range(1, 10), 2)
.doOnEach(signals::add)
.doOnSubscribe(s -> subscribeCount.increment())
.doOnNext(v -> valueCount.increment())
.doOnRequest(r -> requestCount.increment())
.doOnComplete(completeCount::increment)
.doOnCancel(cancelCount::increment)
.doOnError(e -> errorCount.increment())
.doOnTerminate(terminateCount::increment)
.doAfterTerminate(afterTerminateCount::increment)
.sequential().take(4, false).subscribe();
assertThat(signals).as("signals").hasSize(4); //2x2 onNext (+ 2 non-represented cancels)
assertThat(subscribeCount.longValue()).as("subscribe").isEqualTo(2); //1 per rail
assertThat(valueCount.longValue()).as("values").isEqualTo(4);
assertThat(requestCount.longValue()).as("request").isEqualTo(2); //1 per rail
assertThat(completeCount.longValue()).as("complete").isEqualTo(0);
assertThat(cancelCount.longValue()).as("cancel").isEqualTo(2);
assertThat(errorCount.longValue()).as("errors").isEqualTo(0);
//cancel don't trigger onTerminate/onAfterTerminate:
assertThat(terminateCount.longValue()).as("terminate").isEqualTo(0);
assertThat(afterTerminateCount.longValue()).as("afterTerminate").isEqualTo(0);
}
@Test
public void testConcatMapPrefetch() {
ParallelFlux<Integer> pf = ParallelFlux.from(Flux.range(1, 4), 2)
.concatMap(i -> Flux.just(i, 100 * i), 4);
assertThat(pf.getPrefetch()).isEqualTo(4);
StepVerifier.create(pf)
.expectNext(1, 100, 2, 200, 3, 300, 4, 400)
.verifyComplete();
}
@Test
public void testConcatMapDelayError() {
ParallelFlux<Integer> pf = ParallelFlux.from(Flux.range(1, 4), 2)
.concatMapDelayError(i -> {
if (i == 1)
return Mono.<Integer>error(new IllegalStateException("boom")).hide();
return Flux.just(i, 100 * i);
});
StepVerifier.create(pf)
.expectNext(2, 200, 3, 300, 4, 400)
.verifyErrorMessage("boom");
}
@Test
public void testConcatMapDelayErrorPrefetch() {
ParallelFlux<Integer> pf = ParallelFlux.from(Flux.range(1, 4), 2)
.concatMapDelayError(i -> {
if (i == 1)
return Mono.<Integer>error(new IllegalStateException("boom")).hide();
return Flux.just(i, 100 * i);
}, 4);
assertThat(pf.getPrefetch()).isEqualTo(4);
StepVerifier.create(pf)
.expectNext(2, 200, 3, 300, 4, 400)
.verifyErrorMessage("boom");
}
@Test
public void testConcatMapDelayErrorPrefetchDelayUntilEnd() {
ParallelFlux<Integer> pf = ParallelFlux.from(Flux.range(1, 4), 2)
.concatMapDelayError(i -> {
if (i == 1)
return Mono.error(new IllegalStateException("boom"));
return Flux.just(i, 100 * i);
}, false, 4);
assertThat(pf.getPrefetch()).isEqualTo(4);
StepVerifier.create(pf)
.verifyErrorMessage("boom");
}
@Test
public void testFlatMapDelayError() {
ParallelFlux<Integer> pf = ParallelFlux.from(Flux.range(1, 4), 2)
.flatMap(i -> {
if (i == 1)
return Mono.<Integer>error(new IllegalStateException("boom")).hide();
return Flux.just(i, 100 * i);
}, true);
StepVerifier.create(pf)
.expectNext(2, 200, 3, 300, 4, 400)
.verifyErrorMessage("boom");
}
@Test
public void testFlatMapDelayErrorMaxConcurrency() {
ParallelFlux<Integer> pf = ParallelFlux.from(Flux.range(1, 4), 2)
.flatMap(i -> {
if (i == 1)
return Mono.<Integer>error(new IllegalStateException("boom")).hide();
return Flux.just(i, 100 * i);
}, true, 2);
StepVerifier.create(pf)
.expectNext(2, 200, 3, 300, 4, 400)
.verifyErrorMessage("boom");
}
@Test
public void testPublisherSubscribeUsesSequential() {
LongAdder valueCount = new LongAdder();
ParallelFlux<Integer> pf = ParallelFlux.from(Flux.range(1, 4), 2);
pf.subscribe(new BaseSubscriber<Integer>() {
@Override
protected void hookOnSubscribe(Subscription subscription) {
requestUnbounded();
}
@Override
protected void hookOnNext(Integer value) {
valueCount.increment();
}
});
assertThat(valueCount.intValue()).isEqualTo(4);
}
@Test
public void collectSortedListBothEmpty() {
List<Integer> result = ParallelFlux.sortedMerger(Collections.emptyList(),
Collections.emptyList(),
Integer::compareTo);
assertThat(result)
.isEmpty();
}
@Test
public void collectSortedListRightLarger() {
List<Integer> left = Arrays.asList(1, 3);
List<Integer> right = Arrays.asList(2, 4, 5, 6);
List<Integer> result = ParallelFlux.sortedMerger(left, right, Integer::compareTo);
assertThat(result)
.containsExactly(1, 2, 3, 4, 5, 6);
}
@Test
public void collectSortedListLeftLarger() {
List<Integer> left = Arrays.asList(2, 4, 5, 6);
List<Integer> right = Arrays.asList(1, 3);
List<Integer> result = ParallelFlux.sortedMerger(left, right, Integer::compareTo);
assertThat(result)
.containsExactly(1, 2, 3, 4, 5, 6);
}
@Test
public void collectSortedListLeftEmpty() {
List<Integer> left = Collections.emptyList();
List<Integer> right = Arrays.asList(2, 4, 5, 6);
List<Integer> result = ParallelFlux.sortedMerger(left, right, Integer::compareTo);
assertThat(result)
.containsExactly(2, 4, 5, 6);
}
@Test
public void collectSortedListRightEmpty() {
List<Integer> left = Arrays.asList(2, 4, 5, 6);
List<Integer> right = Collections.emptyList();
List<Integer> result = ParallelFlux.sortedMerger(left, right, Integer::compareTo);
assertThat(result)
.containsExactly(2, 4, 5, 6);
}
@Test
public void testParallelism() throws Exception
{
Flux<Integer> flux = Flux.just(1, 2, 3);
Set<String> threadNames = Collections.synchronizedSet(new TreeSet<>());
AtomicInteger count = new AtomicInteger();
CountDownLatch latch = new CountDownLatch(3);
flux
// Uncomment line below for failure
.cache(1)
.parallel(3)
.runOn(afterTest.autoDispose(Schedulers.newBoundedElastic(4, 100, "TEST")))
.subscribe(i ->
{
threadNames.add(Thread.currentThread()
.getName());
count.incrementAndGet();
latch.countDown();
tryToSleep(1000);
});
latch.await();
assertThat(count.get()).as("Multithreaded count").isEqualTo(3);
assertThat(threadNames).as("Multithreaded threads").hasSize(3);
}
@Test
public void parallelSubscribeAndDispose() throws InterruptedException {
AtomicInteger nextCount = new AtomicInteger();
CountDownLatch cancelLatch = new CountDownLatch(1);
TestPublisher<Integer> source = TestPublisher.create();
Disposable d = source
.flux()
.parallel(3)
.doOnCancel(cancelLatch::countDown)
.subscribe(i -> nextCount.incrementAndGet());
source.next(1, 2, 3);
d.dispose();
source.emit(4, 5, 6);
boolean finished = cancelLatch.await(300, TimeUnit.MILLISECONDS);
assertThat(finished).as("cancelled latch").isTrue();
assertThat(d.isDisposed()).as("disposed").isTrue();
assertThat(nextCount.get()).as("received count").isEqualTo(3);
}
@Test
public void hooks() throws Exception {
String key = UUID.randomUUID().toString();
try {
Hooks.onLastOperator(key, p -> new CorePublisher<Object>() {
@Override
public void subscribe(CoreSubscriber<? super Object> subscriber) {
((CorePublisher<?>) p).subscribe(subscriber);
}
@Override
public void subscribe(Subscriber<? super Object> s) {
throw new IllegalStateException("Should not be called");
}
});
List<Integer> results = new CopyOnWriteArrayList<>();
CountDownLatch latch = new CountDownLatch(1);
Flux.just(1, 2, 3)
.parallel()
.doOnNext(results::add)
.doOnComplete(latch::countDown)
.subscribe();
latch.await(1, TimeUnit.SECONDS);
assertThat(results).containsOnly(1, 2, 3);
}
finally {
Hooks.resetOnLastOperator(key);
}
}
@Test
public void subscribeWithCoreSubscriber() throws Exception {
List<Integer> results = new CopyOnWriteArrayList<>();
CountDownLatch latch = new CountDownLatch(1);
Flux.just(1, 2, 3).parallel().subscribe(new CoreSubscriber<Integer>() {
@Override
public void onSubscribe(Subscription s) {
s.request(Long.MAX_VALUE);
}
@Override
public void onNext(Integer integer) {
results.add(integer);
}
@Override
public void onError(Throwable t) {
t.printStackTrace();
}
@Override
public void onComplete() {
latch.countDown();
}
});
latch.await(1, TimeUnit.SECONDS);
assertThat(results).containsOnly(1, 2, 3);
}
// https://github.com/reactor/reactor-core/issues/1656
@Test
public void doOnEachContext() {
List<String> results = new CopyOnWriteArrayList<>();
Flux.just(1, 2, 3)
.parallel(3)
.doOnEach(s -> {
String valueFromContext = s.getContextView()
.getOrDefault("test", null);
results.add(s + " " + valueFromContext);
})
.reduce(Integer::sum)
.contextWrite(Context.of("test", "Hello!"))
.block();
assertThat(results).containsExactlyInAnyOrder(
"onNext(1) Hello!",
"onNext(2) Hello!",
"onNext(3) Hello!",
"onComplete() Hello!",
"onComplete() Hello!",
"onComplete() Hello!"
);
}
private void tryToSleep(long value)
{
try
{
Thread.sleep(value);
}
catch(InterruptedException e)
{
e.printStackTrace();
}
}
}
| ParallelFluxTest |
java | dropwizard__dropwizard | dropwizard-migrations/src/main/java/io/dropwizard/migrations/DbTestCommand.java | {
"start": 367,
"end": 1481
} | class ____<T extends Configuration> extends AbstractLiquibaseCommand<T> {
public DbTestCommand(DatabaseConfiguration<T> strategy, Class<T> configurationClass, String migrationsFileName) {
super("test", "Apply and rollback pending change sets.", strategy, configurationClass, migrationsFileName);
}
@Override
public void configure(Subparser subparser) {
super.configure(subparser);
subparser.addArgument("-i", "--include")
.action(Arguments.append())
.dest("contexts")
.help("include change sets from the given context");
}
@Override
public void run(Namespace namespace, Liquibase liquibase) throws Exception {
liquibase.updateTestingRollback(getContext(namespace));
}
private String getContext(Namespace namespace) {
final List<Object> contexts = namespace.getList("contexts");
if (contexts == null) {
return "";
}
return contexts.stream()
.map(Object::toString)
.collect(Collectors.joining(","));
}
}
| DbTestCommand |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java | {
"start": 1250,
"end": 6232
} | class ____ extends Configured implements Tool {
static final int NUM_MAPS_DEFAULT = 1;
static final int SIMPLE_ENTITY_WRITER = 1;
static final int JOB_HISTORY_FILE_REPLAY_MAPPER = 2;
static int mapperType = SIMPLE_ENTITY_WRITER;
static final int TIMELINE_SERVICE_VERSION_1 = 1;
static final int TIMELINE_SERVICE_VERSION_2 = 2;
static int timeline_service_version = TIMELINE_SERVICE_VERSION_1;
protected static int printUsage() {
System.err.println(
"Usage: [-m <maps>] number of mappers (default: " + NUM_MAPS_DEFAULT +
")\n" +
" [-v] timeline service version (default: " +
TIMELINE_SERVICE_VERSION_1 + ")\n" +
" 1. version 1.x\n" +
" 2. version 2.x\n" +
" [-mtype <mapper type in integer>] (default: " +
SIMPLE_ENTITY_WRITER + ")\n" +
" 1. simple entity write mapper\n" +
" 2. jobhistory files replay mapper\n" +
" [-s <(KBs)test>] number of KB per put (mtype=1, default: " +
SimpleEntityWriterConstants.KBS_SENT_DEFAULT + " KB)\n" +
" [-t] package sending iterations per mapper (mtype=1, default: " +
SimpleEntityWriterConstants.TEST_TIMES_DEFAULT + ")\n" +
" [-d <path>] hdfs root path of job history files (mtype=2)\n" +
" [-r <replay mode>] (mtype=2)\n" +
" 1. write all entities for a job in one put (default)\n" +
" 2. write one entity at a time\n");
GenericOptionsParser.printGenericCommandUsage(System.err);
return -1;
}
/**
* Configure a job given argv.
*/
public static boolean parseArgs(String[] args, Job job) throws IOException {
// set the common defaults
Configuration conf = job.getConfiguration();
conf.setInt(MRJobConfig.NUM_MAPS, NUM_MAPS_DEFAULT);
for (int i = 0; i < args.length; i++) {
if (args.length == i + 1) {
System.out.println("ERROR: Required parameter missing from " + args[i]);
return printUsage() == 0;
}
try {
if ("-v".equals(args[i])) {
timeline_service_version = Integer.parseInt(args[++i]);
} else if ("-m".equals(args[i])) {
if (Integer.parseInt(args[++i]) > 0) {
job.getConfiguration()
.setInt(MRJobConfig.NUM_MAPS, Integer.parseInt(args[i]));
}
} else if ("-mtype".equals(args[i])) {
mapperType = Integer.parseInt(args[++i]);
} else if ("-s".equals(args[i])) {
if (Integer.parseInt(args[++i]) > 0) {
conf.setInt(SimpleEntityWriterConstants.KBS_SENT,
Integer.parseInt(args[i]));
}
} else if ("-t".equals(args[i])) {
if (Integer.parseInt(args[++i]) > 0) {
conf.setInt(SimpleEntityWriterConstants.TEST_TIMES,
Integer.parseInt(args[i]));
}
} else if ("-d".equals(args[i])) {
conf.set(JobHistoryFileReplayHelper.PROCESSING_PATH, args[++i]);
} else if ("-r".equals(args[i])) {
conf.setInt(JobHistoryFileReplayHelper.REPLAY_MODE,
Integer.parseInt(args[++i]));
} else {
System.out.println("Unexpected argument: " + args[i]);
return printUsage() == 0;
}
} catch (NumberFormatException except) {
System.out.println("ERROR: Integer expected instead of " + args[i]);
return printUsage() == 0;
} catch (Exception e) {
throw (IOException)new IOException().initCause(e);
}
}
// handle mapper-specific settings
switch (mapperType) {
case JOB_HISTORY_FILE_REPLAY_MAPPER:
String processingPath =
conf.get(JobHistoryFileReplayHelper.PROCESSING_PATH);
if (processingPath == null || processingPath.isEmpty()) {
System.out.println("processing path is missing while mtype = 2");
return printUsage() == 0;
}
switch (timeline_service_version) {
case TIMELINE_SERVICE_VERSION_2:
job.setMapperClass(JobHistoryFileReplayMapperV2.class);
break;
case TIMELINE_SERVICE_VERSION_1:
default:
job.setMapperClass(JobHistoryFileReplayMapperV1.class);
break;
}
break;
case SIMPLE_ENTITY_WRITER:
default:
// use the current timestamp as the "run id" of the test: this will
// be used as simulating the cluster timestamp for apps
conf.setLong(
SimpleEntityWriterConstants.TIMELINE_SERVICE_PERFORMANCE_RUN_ID,
System.currentTimeMillis());
switch (timeline_service_version) {
case TIMELINE_SERVICE_VERSION_2:
job.setMapperClass(SimpleEntityWriterV2.class);
break;
case TIMELINE_SERVICE_VERSION_1:
default:
job.setMapperClass(SimpleEntityWriterV1.class);
break;
}
break;
}
return true;
}
/**
* TimelineServer Performance counters
*/
| TimelineServicePerformance |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestInjectionForSimulatedStorage.java | {
"start": 1773,
"end": 7402
} | class ____ {
private final int checksumSize = 16;
private final int blockSize = checksumSize*2;
private final int numBlocks = 4;
private final int filesize = blockSize*numBlocks;
private final int numDataNodes = 4;
private static final Logger LOG = LoggerFactory.getLogger(
"org.apache.hadoop.hdfs.TestInjectionForSimulatedStorage");
// Waits for all of the blocks to have expected replication
private void waitForBlockReplication(String filename,
ClientProtocol namenode,
int expected, long maxWaitSec)
throws IOException {
long start = Time.monotonicNow();
//wait for all the blocks to be replicated;
LOG.info("Checking for block replication for " + filename);
LocatedBlocks blocks = namenode.getBlockLocations(filename, 0, Long.MAX_VALUE);
assertEquals(numBlocks, blocks.locatedBlockCount());
for (int i = 0; i < numBlocks; ++i) {
LOG.info("Checking for block:" + (i+1));
while (true) { // Loop to check for block i (usually when 0 is done all will be done
blocks = namenode.getBlockLocations(filename, 0, Long.MAX_VALUE);
assertEquals(numBlocks, blocks.locatedBlockCount());
LocatedBlock block = blocks.get(i);
int actual = block.getLocations().length;
if ( actual == expected ) {
LOG.info("Got enough replicas for " + (i+1) + "th block " + block.getBlock() +
", got " + actual + ".");
break;
}
LOG.info("Not enough replicas for " + (i+1) + "th block " + block.getBlock() +
" yet. Expecting " + expected + ", got " +
actual + ".");
if (maxWaitSec > 0 &&
(Time.monotonicNow() - start) > (maxWaitSec * 1000)) {
throw new IOException("Timedout while waiting for all blocks to " +
" be replicated for " + filename);
}
try {
Thread.sleep(500);
} catch (InterruptedException ignored) {}
}
}
}
/* This test makes sure that NameNode retries all the available blocks
* for under replicated blocks. This test uses simulated storage and one
* of its features to inject blocks,
*
* It creates a file with several blocks and replication of 4.
* The cluster is then shut down - NN retains its state but the DNs are
* all simulated and hence loose their blocks.
* The blocks are then injected in one of the DNs. The expected behaviour is
* that the NN will arrange for themissing replica will be copied from a valid source.
*/
@Test
public void testInjection() throws IOException {
MiniDFSCluster cluster = null;
String testFile = "/replication-test-file";
Path testPath = new Path(testFile);
byte buffer[] = new byte[1024];
for (int i=0; i<buffer.length; i++) {
buffer[i] = '1';
}
try {
Configuration conf = new HdfsConfiguration();
conf.set(DFSConfigKeys.DFS_REPLICATION_KEY, Integer.toString(numDataNodes));
conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, checksumSize);
SimulatedFSDataset.setFactory(conf);
//first time format
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDataNodes).build();
cluster.waitActive();
String bpid = cluster.getNamesystem().getBlockPoolId();
DFSClient dfsClient = new DFSClient(new InetSocketAddress("localhost",
cluster.getNameNodePort()),
conf);
DFSTestUtil.createFile(cluster.getFileSystem(), testPath, filesize,
filesize, blockSize, (short) numDataNodes, 0L);
waitForBlockReplication(testFile, dfsClient.getNamenode(), numDataNodes, 20);
List<Map<DatanodeStorage, BlockListAsLongs>> blocksList = cluster.getAllBlockReports(bpid);
cluster.shutdown();
cluster = null;
/* Start the MiniDFSCluster with more datanodes since once a writeBlock
* to a datanode node fails, same block can not be written to it
* immediately. In our case some replication attempts will fail.
*/
LOG.info("Restarting minicluster");
conf = new HdfsConfiguration();
SimulatedFSDataset.setFactory(conf);
conf.set(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, "0.0f");
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(numDataNodes * 2)
.format(false)
.build();
cluster.waitActive();
Set<Block> uniqueBlocks = new HashSet<Block>();
for(Map<DatanodeStorage, BlockListAsLongs> map : blocksList) {
for(BlockListAsLongs blockList : map.values()) {
for(Block b : blockList) {
uniqueBlocks.add(new Block(b));
}
}
}
// Insert all the blocks in the first data node
LOG.info("Inserting " + uniqueBlocks.size() + " blocks");
cluster.injectBlocks(0, uniqueBlocks, null);
dfsClient = new DFSClient(new InetSocketAddress("localhost",
cluster.getNameNodePort()),
conf);
waitForBlockReplication(testFile, dfsClient.getNamenode(), numDataNodes, -1);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
}
| TestInjectionForSimulatedStorage |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/security/CustomHttpSecurityPolicy.java | {
"start": 451,
"end": 1353
} | class ____ implements HttpSecurityPolicy {
@Inject
ResourceInfo resourceInfo;
@Override
public Uni<CheckResult> checkPermission(RoutingContext request, Uni<SecurityIdentity> identity,
AuthorizationRequestContext requestContext) {
if ("CustomPolicyResource".equals(resourceInfo.getResourceClass().getSimpleName())
&& "isUserAdmin".equals(resourceInfo.getResourceMethod().getName())) {
return identity.onItem().ifNotNull().transform(i -> {
if (i.hasRole("user")) {
return new CheckResult(true, QuarkusSecurityIdentity.builder(i).addRole("admin").build());
}
return CheckResult.PERMIT;
});
}
return Uni.createFrom().item(CheckResult.PERMIT);
}
@Override
public String name() {
return "custom";
}
}
| CustomHttpSecurityPolicy |
java | quarkusio__quarkus | integration-tests/oidc-token-propagation-reactive/src/test/java/io/quarkus/it/keycloak/TestSecurityLazyAuthTest.java | {
"start": 1852,
"end": 2030
} | interface ____ {
}
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.METHOD })
@TestSecurity(user = "user1", roles = "tester")
public @ | TestAsUser1Viewer |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestDelegationTokenRenewer.java | {
"start": 65964,
"end": 76400
} | class ____ gracefully handle
* interactions that occur when it has been stopped.
*/
@Test
public void testShutDown() {
localDtr = createNewDelegationTokenRenewer(conf, counter);
RMContext mockContext = mock(RMContext.class);
when(mockContext.getSystemCredentialsForApps()).thenReturn(
new ConcurrentHashMap<ApplicationId, SystemCredentialsForAppsProto>());
when(mockContext.getDispatcher()).thenReturn(dispatcher);
ClientRMService mockClientRMService = mock(ClientRMService.class);
when(mockContext.getClientRMService()).thenReturn(mockClientRMService);
InetSocketAddress sockAddr =
InetSocketAddress.createUnresolved("localhost", 1234);
when(mockClientRMService.getBindAddress()).thenReturn(sockAddr);
localDtr.setRMContext(mockContext);
when(mockContext.getDelegationTokenRenewer()).thenReturn(localDtr);
localDtr.init(conf);
localDtr.start();
delegationTokenRenewer.stop();
delegationTokenRenewer.applicationFinished(
BuilderUtils.newApplicationId(0, 1));
}
@Test
@Timeout(value = 10)
public void testTokenSequenceNoAfterNewTokenAndRenewal() throws Exception {
conf.setBoolean(YarnConfiguration.RM_PROXY_USER_PRIVILEGES_ENABLED, true);
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
UserGroupInformation.setConfiguration(conf);
final Credentials credsx = new Credentials();
DelegationTokenIdentifier dtId1 = new DelegationTokenIdentifier(
new Text("user1"), new Text("renewer"), new Text("user1"));
final Token<DelegationTokenIdentifier> expectedToken =
new Token<DelegationTokenIdentifier>(dtId1.getBytes(),
"password2".getBytes(), dtId1.getKind(), new Text("service2"));
// fire up the renewer
localDtr = new DelegationTokenRenewer() {
@Override
protected Token<?>[] obtainSystemTokensForUser(String user,
final Credentials credentials) throws IOException {
credentials.addToken(expectedToken.getService(), expectedToken);
return new Token<?>[] {expectedToken};
}
};
RMContext mockContext = mock(RMContext.class);
when(mockContext.getSystemCredentialsForApps()).thenReturn(
new ConcurrentHashMap<ApplicationId, SystemCredentialsForAppsProto>());
when(mockContext.getDispatcher()).thenReturn(dispatcher);
ClientRMService mockClientRMService = mock(ClientRMService.class);
when(mockContext.getClientRMService()).thenReturn(mockClientRMService);
InetSocketAddress sockAddr =
InetSocketAddress.createUnresolved("localhost", 1234);
when(mockClientRMService.getBindAddress()).thenReturn(sockAddr);
localDtr.setRMContext(mockContext);
when(mockContext.getDelegationTokenRenewer()).thenReturn(localDtr);
localDtr.init(conf);
localDtr.start();
final ApplicationId appId1 = ApplicationId.newInstance(1234, 1);
Collection<ApplicationId> appIds = new ArrayList<ApplicationId>(1);
appIds.add(appId1);
localDtr.addApplicationSync(appId1, credsx, false, "user1");
// Ensure incrTokenSequenceNo has been called for new token request
verify(mockContext, times(1)).incrTokenSequenceNo();
DelegationTokenToRenew dttr = localDtr.new DelegationTokenToRenew(appIds,
expectedToken, conf, 1000, false, "user1");
localDtr.requestNewHdfsDelegationTokenIfNeeded(dttr);
// Ensure incrTokenSequenceNo has been called for token renewal as well.
verify(mockContext, times(2)).incrTokenSequenceNo();
}
/**
* Test case to ensure token renewer threads are timed out by inducing
* artificial delay.
*
* Because of time out, retries would be attempted till it reaches max retry
* attempt and finally asserted using used threads count.
*
* @throws Exception
*/
@Test
@Timeout(value = 30)
public void testTokenThreadTimeout() throws Exception {
Configuration yarnConf = new YarnConfiguration();
yarnConf.set("override_token_expire_time", "30000");
yarnConf.setBoolean(YarnConfiguration.RM_PROXY_USER_PRIVILEGES_ENABLED,
true);
yarnConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
yarnConf.setClass(YarnConfiguration.RM_STORE, MemoryRMStateStore.class,
RMStateStore.class);
yarnConf.setTimeDuration(YarnConfiguration.RM_DT_RENEWER_THREAD_TIMEOUT, 2,
TimeUnit.SECONDS);
yarnConf.setTimeDuration(
YarnConfiguration.RM_DT_RENEWER_THREAD_RETRY_INTERVAL, 0,
TimeUnit.SECONDS);
yarnConf.setInt(YarnConfiguration.RM_DT_RENEWER_THREAD_RETRY_MAX_ATTEMPTS,
3);
UserGroupInformation.setConfiguration(yarnConf);
Text userText = new Text("user1");
DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(userText,
new Text("renewer1"), userText);
final Token<DelegationTokenIdentifier> originalToken =
new Token<>(dtId.getBytes(), "password1".getBytes(), dtId.getKind(),
new Text("service1"));
Credentials credentials = new Credentials();
credentials.addToken(userText, originalToken);
AtomicBoolean renewDelay = new AtomicBoolean(false);
// -1 is because of thread allocated to pool tracker runnable tasks
AtomicInteger threadCounter = new AtomicInteger(-1);
renewDelay.set(true);
DelegationTokenRenewer renewer = createNewDelegationTokenRenewerForTimeout(
yarnConf, threadCounter, renewDelay);
rm = new TestSecurityMockRM(yarnConf) {
@Override
protected DelegationTokenRenewer createDelegationTokenRenewer() {
return renewer;
}
};
rm.start();
MockRMAppSubmitter.submit(rm,
MockRMAppSubmissionData.Builder.createWithMemory(200, rm)
.withAppName("name")
.withUser("user")
.withAcls(new HashMap<ApplicationAccessType, String>())
.withUnmanagedAM(false)
.withQueue("default")
.withMaxAppAttempts(1)
.withCredentials(credentials)
.build());
int attempts = yarnConf.getInt(
YarnConfiguration.RM_DT_RENEWER_THREAD_RETRY_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_DT_RENEWER_THREAD_RETRY_MAX_ATTEMPTS);
GenericTestUtils.waitFor(() -> threadCounter.get() >= attempts, 100, 20000);
// Ensure no. of threads has been used in renewer service thread pool is
// higher than the configured max retry attempts
assertTrue(threadCounter.get() >= attempts);
rm.close();
}
/**
* Test case to ensure token renewer threads are running as usual and finally
* asserted only 1 thread has been used.
*
* @throws Exception
*/
@Test
@Timeout(value = 30)
public void testTokenThreadTimeoutWithoutDelay() throws Exception {
Configuration yarnConf = new YarnConfiguration();
yarnConf.setBoolean(YarnConfiguration.RM_PROXY_USER_PRIVILEGES_ENABLED,
true);
yarnConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
yarnConf.set(YarnConfiguration.RM_STORE,
MemoryRMStateStore.class.getName());
yarnConf.setTimeDuration(YarnConfiguration.RM_DT_RENEWER_THREAD_TIMEOUT, 3,
TimeUnit.SECONDS);
yarnConf.setTimeDuration(
YarnConfiguration.RM_DT_RENEWER_THREAD_RETRY_INTERVAL, 3,
TimeUnit.SECONDS);
yarnConf.setInt(YarnConfiguration.RM_DT_RENEWER_THREAD_RETRY_MAX_ATTEMPTS,
3);
UserGroupInformation.setConfiguration(yarnConf);
Text userText = new Text("user1");
DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(userText,
new Text("renewer1"), userText);
final Token<DelegationTokenIdentifier> originalToken =
new Token<>(dtId.getBytes(), "password1".getBytes(), dtId.getKind(),
new Text("service1"));
Credentials credentials = new Credentials();
credentials.addToken(userText, originalToken);
AtomicBoolean renewDelay = new AtomicBoolean(false);
// -1 is because of thread allocated to pool tracker runnable tasks
AtomicInteger threadCounter = new AtomicInteger(-1);
DelegationTokenRenewer renwer = createNewDelegationTokenRenewerForTimeout(
yarnConf, threadCounter, renewDelay);
rm = new TestSecurityMockRM(yarnConf) {
@Override
protected DelegationTokenRenewer createDelegationTokenRenewer() {
return renwer;
}
};
rm.start();
MockRMAppSubmitter.submit(rm,
MockRMAppSubmissionData.Builder.createWithMemory(200, rm)
.withAppName("name")
.withUser("user")
.withAcls(new HashMap<ApplicationAccessType, String>())
.withUnmanagedAM(false)
.withQueue("default")
.withMaxAppAttempts(1)
.withCredentials(credentials)
.build());
GenericTestUtils.waitFor(() -> threadCounter.get() == 1, 2000, 40000);
// Ensure only one thread has been used in renewer service thread pool.
assertEquals(threadCounter.get(), 1);
rm.close();
}
private DelegationTokenRenewer createNewDelegationTokenRenewerForTimeout(
Configuration config, final AtomicInteger renewerCounter,
final AtomicBoolean renewDelay) {
DelegationTokenRenewer renew = new DelegationTokenRenewer() {
@Override
protected ThreadPoolExecutor createNewThreadPoolService(
Configuration configuration) {
ThreadPoolExecutor pool = new ThreadPoolExecutor(5, 5, 3L,
TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>()) {
@Override
public Future<?> submit(Runnable r) {
renewerCounter.incrementAndGet();
return super.submit(r);
}
};
return pool;
}
@Override
protected void renewToken(final DelegationTokenToRenew dttr)
throws IOException {
try {
if (renewDelay.get()) {
// Delay for 2 times than the configured timeout
Thread.sleep(config.getTimeDuration(
YarnConfiguration.RM_DT_RENEWER_THREAD_TIMEOUT,
YarnConfiguration.DEFAULT_RM_DT_RENEWER_THREAD_TIMEOUT,
TimeUnit.MILLISECONDS) * 2);
}
super.renewToken(dttr);
} catch (InterruptedException e) {
LOG.info("Sleep Interrupted", e);
}
}
};
renew.setDelegationTokenRenewerPoolTracker(true);
return renew;
}
}
| can |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/ext/ExternalTypeIdWithCreator3045Test.java | {
"start": 763,
"end": 1892
} | class ____ extends TypeIdResolverBase {
private static final long serialVersionUID = 1L;
public ChildBaseByParentTypeResolver() {
// System.out.println("Create ChildBaseByParentTypeResolver");
}
private JavaType superType;
@Override
public void init(JavaType baseType) {
superType = baseType;
}
@Override
public Id getMechanism() {
return Id.NAME;
}
@Override
public JavaType typeFromId(DatabindContext context, String id) {
switch (id) {
case "track":
return context.constructSpecializedType(superType, MyData.class);
}
throw new IllegalArgumentException("No type with id '"+id+"'");
}
@Override
public String idFromValue(DatabindContext ctxt, Object value) {
return null;
}
@Override
public String idFromValueAndType(DatabindContext ctxt, Object value, Class<?> suggestedType) {
return null;
}
}
static | ChildBaseByParentTypeResolver |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/FederationNamenodeServiceState.java | {
"start": 989,
"end": 1055
} | enum ____ used to evaluate
* NN priority for RPC calls.
*/
public | is |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/spi/MetadataBuilderInitializer.java | {
"start": 752,
"end": 885
} | interface ____ {
void contribute(MetadataBuilder metadataBuilder, StandardServiceRegistry serviceRegistry);
}
| MetadataBuilderInitializer |
java | elastic__elasticsearch | x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStats.java | {
"start": 7967,
"end": 11726
} | class ____ {
public static final ParseField COUNT = new ParseField("count");
public static final ParseField MIN_LENGTH = new ParseField("min_length");
public static final ParseField MIN_LENGTH_AS_STRING = new ParseField("min_length_as_string");
public static final ParseField MAX_LENGTH = new ParseField("max_length");
public static final ParseField MAX_LENGTH_AS_STRING = new ParseField("max_as_string");
public static final ParseField AVG_LENGTH = new ParseField("avg_length");
public static final ParseField AVG_LENGTH_AS_STRING = new ParseField("avg_length_as_string");
public static final ParseField ENTROPY = new ParseField("entropy");
public static final ParseField ENTROPY_AS_STRING = new ParseField("entropy_string");
public static final ParseField DISTRIBUTION = new ParseField("distribution");
public static final ParseField DISTRIBUTION_AS_STRING = new ParseField("distribution_string");
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.COUNT.getPreferredName(), count);
if (count > 0) {
builder.field(Fields.MIN_LENGTH.getPreferredName(), minLength);
builder.field(Fields.MAX_LENGTH.getPreferredName(), maxLength);
builder.field(Fields.AVG_LENGTH.getPreferredName(), getAvgLength());
builder.field(Fields.ENTROPY.getPreferredName(), getEntropy());
if (showDistribution) {
builder.field(Fields.DISTRIBUTION.getPreferredName(), getDistribution());
}
if (format != DocValueFormat.RAW) {
builder.field(Fields.MIN_LENGTH_AS_STRING.getPreferredName(), format.format(getMinLength()));
builder.field(Fields.MAX_LENGTH_AS_STRING.getPreferredName(), format.format(getMaxLength()));
builder.field(Fields.AVG_LENGTH_AS_STRING.getPreferredName(), format.format(getAvgLength()));
builder.field(Fields.ENTROPY_AS_STRING.getPreferredName(), format.format(getEntropy()));
if (showDistribution) {
builder.startObject(Fields.DISTRIBUTION_AS_STRING.getPreferredName());
for (Map.Entry<String, Double> e : getDistribution().entrySet()) {
builder.field(e.getKey(), format.format(e.getValue()).toString());
}
builder.endObject();
}
}
} else {
builder.nullField(Fields.MIN_LENGTH.getPreferredName());
builder.nullField(Fields.MAX_LENGTH.getPreferredName());
builder.nullField(Fields.AVG_LENGTH.getPreferredName());
builder.field(Fields.ENTROPY.getPreferredName(), 0.0);
if (showDistribution) {
builder.nullField(Fields.DISTRIBUTION.getPreferredName());
}
}
return builder;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), count, minLength, maxLength, totalLength, charOccurrences, showDistribution);
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null || getClass() != obj.getClass()) return false;
if (super.equals(obj) == false) return false;
InternalStringStats other = (InternalStringStats) obj;
return count == other.count
&& minLength == other.minLength
&& maxLength == other.maxLength
&& totalLength == other.totalLength
&& Objects.equals(charOccurrences, other.charOccurrences)
&& showDistribution == other.showDistribution;
}
}
| Fields |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java | {
"start": 97060,
"end": 97408
} | class ____ {
private @Nullable String prop;
PotentiallyConstructorBoundProperties(String notAProperty) {
}
@Nullable String getProp() {
return this.prop;
}
void setProp(@Nullable String prop) {
this.prop = prop;
}
}
@EnableConfigurationProperties(ResourceArrayProperties.class)
static | PotentiallyConstructorBoundProperties |
java | square__retrofit | retrofit/java-test/src/test/java/retrofit2/RetrofitTest.java | {
"start": 3437,
"end": 3780
} | interface ____ {
@GET("/")
<T> Call<T> typeVariable();
@GET("/")
<T extends ResponseBody> Call<T> typeVariableUpperBound();
@GET("/")
<T> Call<List<Map<String, Set<T[]>>>> crazy();
@GET("/")
Call<?> wildcard();
@GET("/")
Call<? extends ResponseBody> wildcardUpperBound();
}
| UnresolvableResponseType |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/cache/config/EnableCachingTests.java | {
"start": 7715,
"end": 7870
} | class ____ {
@Bean
public CacheManager cm1() {
return new NoOpCacheManager();
}
}
@Configuration
@EnableCaching
static | SingleCacheManagerConfig |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/AbstractS3ACommitter.java | {
"start": 5629,
"end": 6121
} | class ____ the
* list of .pendingset files to load and then commit; that can be done
* incrementally and in parallel.
* As a side effect of this change, unless/until changed,
* the commit/abort/revert of all files uploaded by a single task will be
* serialized. This may slow down these operations if there are many files
* created by a few tasks, <i>and</i> the HTTP connection pool in the S3A
* committer was large enough for more all the parallel POST requests.
*/
public abstract | with |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy-client/runtime/src/main/java/io/quarkus/restclient/runtime/PathFeatureHandler.java | {
"start": 1142,
"end": 2080
} | class
____ = methodInfo.getDeclaringClass().getAnnotation(Path.class);
if (annotation != null) {
stringBuilder.insert(0, slashify(annotation.value()));
}
// Now make sure there is a leading path, and no duplicates
return MULTIPLE_SLASH_PATTERN.matcher('/' + stringBuilder.toString()).replaceAll("/");
}
String slashify(String path) {
// avoid doubles later. Empty for now
if (path == null || path.isEmpty() || "/".equals(path)) {
return "";
}
// remove doubles
path = MULTIPLE_SLASH_PATTERN.matcher(path).replaceAll("/");
// Label value consistency: result should not end with a slash
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
if (path.isEmpty() || path.startsWith("/")) {
return path;
}
return '/' + path;
}
}
| annotation |
java | google__guava | android/guava/src/com/google/common/primitives/UnsignedBytes.java | {
"start": 11269,
"end": 16013
} | enum ____ implements Comparator<byte[]> {
INSTANCE;
static final boolean BIG_ENDIAN = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN);
/*
* The following static final fields exist for performance reasons.
*
* In UnsignedBytesBenchmark, accessing the following objects via static final fields is the
* fastest (more than twice as fast as the Java implementation, vs ~1.5x with non-final static
* fields, on x86_32) under the Hotspot server compiler. The reason is obviously that the
* non-final fields need to be reloaded inside the loop.
*
* And, no, defining (final or not) local variables out of the loop still isn't as good
* because the null check on the theUnsafe object remains inside the loop and
* BYTE_ARRAY_BASE_OFFSET doesn't get constant-folded.
*
* The compiler can treat static final fields as compile-time constants and can constant-fold
* them while (final or not) local variables are run time values.
*/
static final Unsafe theUnsafe = getUnsafe();
/** The offset to the first element in a byte array. */
static final int BYTE_ARRAY_BASE_OFFSET = theUnsafe.arrayBaseOffset(byte[].class);
static {
// fall back to the safer pure java implementation unless we're in
// a 64-bit JVM with an 8-byte aligned field offset.
if (!(Objects.equals(System.getProperty("sun.arch.data.model"), "64")
&& (BYTE_ARRAY_BASE_OFFSET % 8) == 0
// sanity check - this should never fail
&& theUnsafe.arrayIndexScale(byte[].class) == 1)) {
throw new Error(); // force fallback to PureJavaComparator
}
}
/**
* Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package. Replace with a simple
* call to Unsafe.getUnsafe when integrating into a jdk.
*
* @return a sun.misc.Unsafe
*/
private static Unsafe getUnsafe() {
try {
return Unsafe.getUnsafe();
} catch (SecurityException e) {
// that's okay; try reflection instead
}
try {
return doPrivileged(
(PrivilegedExceptionAction<Unsafe>)
() -> {
Class<Unsafe> k = Unsafe.class;
for (Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x)) {
return k.cast(x);
}
}
throw new NoSuchFieldError("the Unsafe");
});
} catch (PrivilegedActionException e) {
throw new RuntimeException("Could not initialize intrinsics", e.getCause());
}
}
@Override
public int compare(byte[] left, byte[] right) {
int stride = 8;
int minLength = Math.min(left.length, right.length);
int strideLimit = minLength & ~(stride - 1);
int i;
/*
* Compare 8 bytes at a time. Benchmarking on x86 shows a stride of 8 bytes is no slower
* than 4 bytes even on 32-bit. On the other hand, it is substantially faster on 64-bit.
*/
for (i = 0; i < strideLimit; i += stride) {
long lw = theUnsafe.getLong(left, BYTE_ARRAY_BASE_OFFSET + (long) i);
long rw = theUnsafe.getLong(right, BYTE_ARRAY_BASE_OFFSET + (long) i);
if (lw != rw) {
if (BIG_ENDIAN) {
return Long.compareUnsigned(lw, rw);
}
/*
* We want to compare only the first index where left[index] != right[index]. This
* corresponds to the least significant nonzero byte in lw ^ rw, since lw and rw are
* little-endian. Long.numberOfTrailingZeros(diff) tells us the least significant
* nonzero bit, and zeroing out the first three bits of L.nTZ gives us the shift to get
* that least significant nonzero byte.
*/
int n = Long.numberOfTrailingZeros(lw ^ rw) & ~0x7;
return ((int) ((lw >>> n) & UNSIGNED_MASK)) - ((int) ((rw >>> n) & UNSIGNED_MASK));
}
}
// The epilogue to cover the last (minLength % stride) elements.
for (; i < minLength; i++) {
int result = UnsignedBytes.compare(left[i], right[i]);
if (result != 0) {
return result;
}
}
return left.length - right.length;
}
@Override
public String toString() {
return "UnsignedBytes.lexicographicalComparator() (sun.misc.Unsafe version)";
}
}
| UnsafeComparator |
java | quarkusio__quarkus | extensions/reactive-datasource/deployment/src/main/java/io/quarkus/reactive/datasource/deployment/AggregatedDataSourceBuildTimeConfigBuildItem.java | {
"start": 311,
"end": 1356
} | class ____ extends MultiBuildItem {
private final String name;
private final DataSourceBuildTimeConfig dataSourceConfig;
private final DataSourceReactiveBuildTimeConfig reactiveConfig;
private final String dbKind;
AggregatedDataSourceBuildTimeConfigBuildItem(String name, DataSourceBuildTimeConfig dataSourceConfig,
DataSourceReactiveBuildTimeConfig reactiveConfig,
String dbKind) {
this.name = name;
this.dataSourceConfig = dataSourceConfig;
this.reactiveConfig = reactiveConfig;
this.dbKind = dbKind;
}
public String getName() {
return name;
}
public DataSourceBuildTimeConfig getDataSourceConfig() {
return dataSourceConfig;
}
public DataSourceReactiveBuildTimeConfig getReactiveConfig() {
return reactiveConfig;
}
public boolean isDefault() {
return DataSourceUtil.isDefault(name);
}
public String getDbKind() {
return dbKind;
}
}
| AggregatedDataSourceBuildTimeConfigBuildItem |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/validation/beanvalidation/MethodValidationProxyTests.java | {
"start": 8134,
"end": 8436
} | interface ____<T> {
@NotNull
Object myValidMethod(@NotNull(groups = MyGroup.class) String arg1, @Max(10) int arg2);
@MyValid
@Async
void myValidAsyncMethod(@NotNull(groups = OtherGroup.class) String arg1, @Max(10) int arg2);
T myGenericMethod(@NotNull T value);
}
static | MyValidInterface |
java | apache__camel | test-infra/camel-test-infra-infinispan/src/main/java/org/apache/camel/test/infra/infinispan/common/InfinispanProperties.java | {
"start": 873,
"end": 1672
} | class ____ {
public static final String INFINISPAN_CONTAINER = "infinispan.container";
public static final String SERVICE_ADDRESS = "infinispan.service.address";
public static final String SERVICE_HOST = "infinispan.service.host";
public static final String SERVICE_PORT = "infinispan.service.port";
public static final String SERVICE_USERNAME = "infinispan.service.username";
public static final String SERVICE_PASSWORD = "infinispan.service.password";
public static final int DEFAULT_SERVICE_PORT = 11222;
public static final String INFINISPAN_CONTAINER_NETWORK_MODE_HOST = "infinispan.service.network.mode.host";
public static final boolean INFINISPAN_CONTAINER_NETWORK_MODE_HOST_DEFAULT = false;
private InfinispanProperties() {
}
}
| InfinispanProperties |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/engine/cache/SafeKeyGenerator.java | {
"start": 2047,
"end": 2457
} | class ____ implements FactoryPools.Poolable {
@Synthetic final MessageDigest messageDigest;
private final StateVerifier stateVerifier = StateVerifier.newInstance();
PoolableDigestContainer(MessageDigest messageDigest) {
this.messageDigest = messageDigest;
}
@NonNull
@Override
public StateVerifier getVerifier() {
return stateVerifier;
}
}
}
| PoolableDigestContainer |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SqlComponentBuilderFactory.java | {
"start": 1360,
"end": 1779
} | interface ____ {
/**
* SQL (camel-sql)
* Perform SQL queries using Spring JDBC.
*
* Category: database
* Since: 1.4
* Maven coordinates: org.apache.camel:camel-sql
*
* @return the dsl builder
*/
static SqlComponentBuilder sql() {
return new SqlComponentBuilderImpl();
}
/**
* Builder for the SQL component.
*/
| SqlComponentBuilderFactory |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveHierarchyBuildItem.java | {
"start": 13814,
"end": 14181
} | class ____ implements Predicate<DotName> {
public static IgnoreAllowListedPredicate INSTANCE = new IgnoreAllowListedPredicate();
@Override
public boolean test(DotName dotName) {
return DefaultIgnoreTypePredicate.ALLOWED_FROM_IGNORED_PACKAGES.contains(dotName.toString());
}
}
public static | IgnoreAllowListedPredicate |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/scripting/groovy/GroovyScriptFactory.java | {
"start": 3540,
"end": 4341
} | interface ____ can create a custom MetaClass
* to supply missing methods and otherwise change the behavior of the object.
* @param scriptSourceLocator a locator that points to the source of the script.
* Interpreted by the post-processor that actually creates the script.
* @param groovyObjectCustomizer a customizer that can set a custom metaclass
* or make other changes to the GroovyObject created by this factory
* (may be {@code null})
* @see GroovyObjectCustomizer#customize
*/
public GroovyScriptFactory(String scriptSourceLocator, @Nullable GroovyObjectCustomizer groovyObjectCustomizer) {
this(scriptSourceLocator);
this.groovyObjectCustomizer = groovyObjectCustomizer;
}
/**
* Create a new GroovyScriptFactory for the given script source,
* specifying a strategy | that |
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BundleChangelogsTask.java | {
"start": 1786,
"end": 10342
} | class ____ extends DefaultTask {
private static final Logger LOGGER = Logging.getLogger(BundleChangelogsTask.class);
private final ConfigurableFileCollection changelogs;
private final RegularFileProperty bundleFile;
private final DirectoryProperty changelogDirectory;
private final DirectoryProperty changelogBundlesDirectory;
private final GitWrapper gitWrapper;
@Nullable
private String branch;
@Nullable
private String bcRef;
private boolean finalize;
@Option(option = "branch", description = "Branch (or other ref) to use for generating the changelog bundle.")
public void setBranch(String branch) {
this.branch = branch;
}
@Option(
option = "bc-ref",
description = "A source ref, typically the sha of a BC, that should be used to source PRs for changelog entries. "
+ "The actual content of the changelogs will come from the 'branch' ref. "
+ "You should generally always use bc-ref."
)
public void setBcRef(String ref) {
this.bcRef = ref;
}
@Option(option = "finalize", description = "Specify that the bundle is finalized, i.e. that the version has been released.")
public void setFinalize(boolean finalize) {
this.finalize = finalize;
}
private static final ObjectMapper yamlMapper = new ObjectMapper(
new YAMLFactory().enable(YAMLGenerator.Feature.MINIMIZE_QUOTES)
.disable(YAMLGenerator.Feature.SPLIT_LINES)
.enable(YAMLGenerator.Feature.INDENT_ARRAYS_WITH_INDICATOR)
.disable(YAMLGenerator.Feature.WRITE_DOC_START_MARKER)
.enable(YAMLGenerator.Feature.LITERAL_BLOCK_STYLE)
).setSerializationInclusion(JsonInclude.Include.NON_NULL);
@Inject
public BundleChangelogsTask(ObjectFactory objectFactory, ExecOperations execOperations) {
changelogs = objectFactory.fileCollection();
bundleFile = objectFactory.fileProperty();
changelogDirectory = objectFactory.directoryProperty();
changelogBundlesDirectory = objectFactory.directoryProperty();
gitWrapper = new GitWrapper(execOperations);
}
/*
Given a branch, and possibly a build candidate commit sha
Check out the changelog yaml files from the branch/BC sha
Then, bundle them all up into one file and write it to disk, along with a timestamp and whether the release is considered released
When using a branch without a BC sha:
- Check out the changelog yaml files from the HEAD of the branch
When using a BC sha:
- Check out the changelog yaml files from the BC commit
- Update those files with any updates from the HEAD of the branch (in case the changelogs get modified later)
- Check for any changelog yaml files that were added AFTER the BC,
but whose PR was merged before the BC (in case someone adds a forgotten changelog after the fact)
*/
@TaskAction
public void executeTask() throws IOException {
if (branch == null) {
throw new IllegalArgumentException("'branch' not specified.");
}
final String upstreamRemote = gitWrapper.getUpstream();
Set<String> entriesFromBc = Set.of();
var didCheckoutChangelogs = false;
try {
var usingBcRef = bcRef != null && bcRef.isEmpty() == false;
if (usingBcRef) {
// Check out all the changelogs that existed at the time of the BC
checkoutChangelogs(gitWrapper, upstreamRemote, bcRef);
entriesFromBc = changelogDirectory.getAsFileTree().getFiles().stream().map(File::getName).collect(Collectors.toSet());
// Then add/update changelogs from the HEAD of the branch
// We do an "add" here, rather than checking out the entire directory, in case changelogs have been removed for some reason
addChangelogsFromRef(gitWrapper, upstreamRemote, branch);
} else {
checkoutChangelogs(gitWrapper, upstreamRemote, branch);
}
didCheckoutChangelogs = true;
Properties props = new Properties();
props.load(
new StringReader(
gitWrapper.runCommand("git", "show", upstreamRemote + "/" + branch + ":build-tools-internal/version.properties")
)
);
String version = props.getProperty("elasticsearch");
LOGGER.info("Finding changelog files for " + version + "...");
Set<String> finalEntriesFromBc = entriesFromBc;
List<ChangelogEntry> entries = changelogDirectory.getAsFileTree().getFiles().stream().filter(f -> {
// When not using a bc ref, we just take everything from the branch/sha passed in
if (usingBcRef == false) {
return true;
}
// If the changelog was present in the BC sha, always use it
if (finalEntriesFromBc.contains(f.getName())) {
return true;
}
// Otherwise, let's check to see if a reference to the PR exists in the commit log for the sha
// This specifically covers the case of a PR being merged into the BC with a missing changelog file, and the file added
// later.
var prNumber = f.getName().replace(".yaml", "");
var output = gitWrapper.runCommand("git", "log", bcRef, "--grep", "(#" + prNumber + ")");
return output.trim().isEmpty() == false;
}).map(ChangelogEntry::parse).sorted(Comparator.comparing(ChangelogEntry::getPr)).collect(toList());
ChangelogBundle bundle = new ChangelogBundle(version, finalize, Instant.now().toString(), entries);
yamlMapper.writeValue(new File("docs/release-notes/changelog-bundles/" + version + ".yml"), bundle);
} finally {
if (didCheckoutChangelogs) {
gitWrapper.runCommand("git", "restore", "-s@", "-SW", "--", changelogDirectory.get().toString());
}
}
}
private void checkoutChangelogs(GitWrapper gitWrapper, String upstream, String ref) {
gitWrapper.updateRemote(upstream);
// If the changelog directory contains modified/new files, we should error out instead of wiping them out silently
var output = gitWrapper.runCommand("git", "status", "--porcelain", changelogDirectory.get().toString()).trim();
if (output.isEmpty() == false) {
throw new IllegalStateException(
"Changelog directory contains changes that will be wiped out by this task:\n" + changelogDirectory.get() + "\n" + output
);
}
gitWrapper.runCommand("rm", "-rf", changelogDirectory.get().toString());
var refSpec = upstream + "/" + ref;
if (ref.contains("upstream/")) {
refSpec = ref.replace("upstream/", upstream + "/");
} else if (ref.matches("^[0-9a-f]+$")) {
refSpec = ref;
}
gitWrapper.runCommand("git", "checkout", refSpec, "--", changelogDirectory.get().toString());
}
private void addChangelogsFromRef(GitWrapper gitWrapper, String upstream, String ref) {
var refSpec = upstream + "/" + ref;
if (ref.contains("upstream/")) {
refSpec = ref.replace("upstream/", upstream + "/");
} else if (ref.matches("^[0-9a-f]+$")) {
refSpec = ref;
}
gitWrapper.runCommand("git", "checkout", refSpec, "--", changelogDirectory.get() + "/*.yaml");
}
@InputDirectory
public DirectoryProperty getChangelogDirectory() {
return changelogDirectory;
}
public void setChangelogDirectory(Directory dir) {
this.changelogDirectory.set(dir);
}
@InputDirectory
public DirectoryProperty getChangelogBundlesDirectory() {
return changelogBundlesDirectory;
}
public void setChangelogBundlesDirectory(Directory dir) {
this.changelogBundlesDirectory.set(dir);
}
@InputFiles
public FileCollection getChangelogs() {
return changelogs;
}
public void setChangelogs(FileCollection files) {
this.changelogs.setFrom(files);
}
@OutputFile
public RegularFileProperty getBundleFile() {
return bundleFile;
}
public void setBundleFile(RegularFile file) {
this.bundleFile.set(file);
}
}
| BundleChangelogsTask |
java | apache__camel | core/camel-xml-jaxp/src/generated/java/org/apache/camel/converter/jaxp/CamelXmlJaxpBulkConverterLoader.java | {
"start": 898,
"end": 46810
} | class ____ implements TypeConverterLoader, BulkTypeConverters, CamelContextAware {
private CamelContext camelContext;
public CamelXmlJaxpBulkConverterLoader() {
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public int size() {
return 101;
}
@Override
public void load(TypeConverterRegistry registry) throws TypeConverterLoaderException {
registry.addBulkTypeConverters(this);
doRegistration(registry);
}
@Override
public <T> T convertTo(Class<?> from, Class<T> to, Exchange exchange, Object value) throws TypeConversionException {
try {
Object obj = doConvertTo(from, to, exchange, value);
return (T) obj;
} catch (TypeConversionException e) {
throw e;
} catch (Exception e) {
throw new TypeConversionException(value, to, e);
}
}
private Object doConvertTo(Class<?> from, Class<?> to, Exchange exchange, Object value) throws Exception {
if (to == byte[].class) {
if (value instanceof org.w3c.dom.NodeList) {
return getDomConverter().toByteArray((org.w3c.dom.NodeList) value, exchange);
}
if (value instanceof javax.xml.transform.Source) {
return getXmlConverter().toByteArray((javax.xml.transform.Source) value, exchange);
}
} else if (to == java.io.InputStream.class) {
if (value instanceof org.w3c.dom.NodeList) {
return getDomConverter().toInputStream((org.w3c.dom.NodeList) value, exchange);
}
if (value instanceof javax.xml.stream.XMLStreamReader) {
return getStaxConverter().createInputStream((javax.xml.stream.XMLStreamReader) value, exchange);
}
if (value instanceof javax.xml.transform.dom.DOMSource) {
return getXmlConverter().toInputStream((javax.xml.transform.dom.DOMSource) value, exchange);
}
if (value instanceof org.w3c.dom.Document) {
return getXmlConverter().toInputStream((org.w3c.dom.Document) value, exchange);
}
if (value instanceof javax.xml.transform.stream.StreamSource) {
return getXmlConverter().toInputStream((javax.xml.transform.stream.StreamSource) value);
}
} else if (to == java.io.Reader.class) {
if (value instanceof javax.xml.stream.XMLStreamReader) {
return getStaxConverter().createReader((javax.xml.stream.XMLStreamReader) value, exchange);
}
if (value instanceof javax.xml.transform.stream.StreamSource) {
return getXmlConverter().toReader((javax.xml.transform.stream.StreamSource) value);
}
if (value instanceof javax.xml.transform.Source) {
return getXmlConverter().toReaderFromSource((javax.xml.transform.Source) value, exchange);
}
} else if (to == java.io.Serializable.class) {
if (value instanceof org.apache.camel.StreamCache) {
return org.apache.camel.util.xml.StreamSourceConverter.convertToSerializable((org.apache.camel.StreamCache) value, exchange);
}
} else if (to == java.lang.Boolean.class || to == boolean.class) {
if (value instanceof org.w3c.dom.NodeList) {
return getXmlConverter().toBoolean((org.w3c.dom.NodeList) value);
}
} else if (to == java.lang.Integer.class || to == int.class) {
if (value instanceof org.w3c.dom.NodeList) {
return org.apache.camel.converter.jaxp.DomConverter.toInteger((org.w3c.dom.NodeList) value);
}
} else if (to == java.lang.Long.class || to == long.class) {
if (value instanceof org.w3c.dom.NodeList) {
return org.apache.camel.converter.jaxp.DomConverter.toLong((org.w3c.dom.NodeList) value);
}
} else if (to == java.lang.String.class) {
if (value instanceof org.w3c.dom.NodeList) {
return getDomConverter().toString((org.w3c.dom.NodeList) value, exchange);
}
if (value instanceof org.w3c.dom.Node) {
return getDomConverter().toString((org.w3c.dom.Node) value, exchange);
}
if (value instanceof javax.xml.transform.Source) {
return getXmlConverter().toString((javax.xml.transform.Source) value, exchange);
}
} else if (to == java.util.List.class) {
if (value instanceof org.w3c.dom.NodeList) {
return org.apache.camel.converter.jaxp.DomConverter.toList((org.w3c.dom.NodeList) value);
}
} else if (to == javax.xml.namespace.QName.class) {
if (value instanceof java.lang.String) {
return getXmlConverter().toQName((java.lang.String) value);
}
} else if (to == javax.xml.stream.XMLEventReader.class) {
if (value instanceof java.io.InputStream) {
return getStaxConverter().createXMLEventReader((java.io.InputStream) value, exchange);
}
if (value instanceof java.io.File) {
return getStaxConverter().createXMLEventReader((java.io.File) value, exchange);
}
if (value instanceof java.io.Reader) {
return getStaxConverter().createXMLEventReader((java.io.Reader) value);
}
if (value instanceof javax.xml.stream.XMLStreamReader) {
return getStaxConverter().createXMLEventReader((javax.xml.stream.XMLStreamReader) value);
}
if (value instanceof javax.xml.transform.Source) {
return getStaxConverter().createXMLEventReader((javax.xml.transform.Source) value);
}
} else if (to == javax.xml.stream.XMLEventWriter.class) {
if (value instanceof java.io.OutputStream) {
return getStaxConverter().createXMLEventWriter((java.io.OutputStream) value, exchange);
}
if (value instanceof java.io.Writer) {
return getStaxConverter().createXMLEventWriter((java.io.Writer) value);
}
if (value instanceof javax.xml.transform.Result) {
return getStaxConverter().createXMLEventWriter((javax.xml.transform.Result) value);
}
} else if (to == javax.xml.stream.XMLStreamReader.class) {
if (value instanceof java.io.InputStream) {
return getStaxConverter().createXMLStreamReader((java.io.InputStream) value, exchange);
}
if (value instanceof java.io.File) {
return getStaxConverter().createXMLStreamReader((java.io.File) value, exchange);
}
if (value instanceof java.io.Reader) {
return getStaxConverter().createXMLStreamReader((java.io.Reader) value);
}
if (value instanceof javax.xml.transform.Source) {
return getStaxConverter().createXMLStreamReader((javax.xml.transform.Source) value);
}
if (value instanceof java.lang.String) {
return getStaxConverter().createXMLStreamReader((java.lang.String) value);
}
} else if (to == javax.xml.stream.XMLStreamWriter.class) {
if (value instanceof java.io.OutputStream) {
return getStaxConverter().createXMLStreamWriter((java.io.OutputStream) value, exchange);
}
if (value instanceof java.io.Writer) {
return getStaxConverter().createXMLStreamWriter((java.io.Writer) value);
}
if (value instanceof javax.xml.transform.Result) {
return getStaxConverter().createXMLStreamWriter((javax.xml.transform.Result) value);
}
} else if (to == javax.xml.transform.Source.class) {
if (value instanceof java.lang.String) {
return getXmlConverter().toSource((java.lang.String) value);
}
if (value instanceof byte[]) {
return getXmlConverter().toSource((byte[]) value);
}
if (value instanceof org.w3c.dom.Document) {
return getXmlConverter().toSource((org.w3c.dom.Document) value);
}
if (value instanceof org.apache.camel.StreamCache) {
return getXmlConverter().toSource((org.apache.camel.StreamCache) value, exchange);
}
} else if (to == javax.xml.transform.dom.DOMSource.class) {
if (value instanceof org.w3c.dom.Document) {
return getXmlConverter().toDOMSource((org.w3c.dom.Document) value);
}
if (value instanceof org.w3c.dom.Node) {
return getXmlConverter().toDOMSource((org.w3c.dom.Node) value);
}
if (value instanceof java.lang.String) {
return getXmlConverter().toDOMSource((java.lang.String) value);
}
if (value instanceof byte[]) {
return getXmlConverter().toDOMSource((byte[]) value);
}
if (value instanceof org.apache.camel.StreamCache) {
return getXmlConverter().toDOMSource((org.apache.camel.StreamCache) value, exchange);
}
if (value instanceof java.io.InputStream) {
return getXmlConverter().toDOMSource((java.io.InputStream) value, exchange);
}
if (value instanceof java.io.File) {
return getXmlConverter().toDOMSource((java.io.File) value, exchange);
}
if (value instanceof java.nio.file.Path) {
return getXmlConverter().toDOMSource((java.nio.file.Path) value, exchange);
}
if (value instanceof javax.xml.transform.stream.StreamSource) {
return getXmlConverter().toDOMSourceFromStream((javax.xml.transform.stream.StreamSource) value, exchange);
}
if (value instanceof javax.xml.transform.sax.SAXSource) {
return getXmlConverter().toDOMSourceFromSAX((javax.xml.transform.sax.SAXSource) value);
}
if (value instanceof javax.xml.transform.stax.StAXSource) {
return getXmlConverter().toDOMSourceFromStAX((javax.xml.transform.stax.StAXSource) value);
}
if (value instanceof javax.xml.transform.Source) {
return getXmlConverter().toDOMSource((javax.xml.transform.Source) value, exchange);
}
} else if (to == javax.xml.transform.sax.SAXSource.class) {
if (value instanceof java.lang.String) {
return getXmlConverter().toSAXSource((java.lang.String) value, exchange);
}
if (value instanceof java.io.InputStream) {
return getXmlConverter().toSAXSource((java.io.InputStream) value, exchange);
}
if (value instanceof byte[]) {
return getXmlConverter().toSAXSource((byte[]) value, exchange);
}
if (value instanceof java.io.File) {
return getXmlConverter().toSAXSource((java.io.File) value, exchange);
}
if (value instanceof java.nio.file.Path) {
return getXmlConverter().toSAXSource((java.nio.file.Path) value, exchange);
}
if (value instanceof javax.xml.transform.stream.StreamSource) {
return getXmlConverter().toSAXSourceFromStream((javax.xml.transform.stream.StreamSource) value, exchange);
}
if (value instanceof javax.xml.transform.dom.DOMSource) {
return getXmlConverter().toSAXSourceFromDOM((javax.xml.transform.dom.DOMSource) value, exchange);
}
if (value instanceof javax.xml.transform.stax.StAXSource) {
return getXmlConverter().toSAXSourceFromStAX((javax.xml.transform.stax.StAXSource) value, exchange);
}
if (value instanceof javax.xml.transform.Source) {
return getXmlConverter().toSAXSource((javax.xml.transform.Source) value, exchange);
}
} else if (to == javax.xml.transform.stax.StAXSource.class) {
if (value instanceof java.lang.String) {
return getXmlConverter().toStAXSource((java.lang.String) value, exchange);
}
if (value instanceof byte[]) {
return getXmlConverter().toStAXSource((byte[]) value, exchange);
}
if (value instanceof java.io.InputStream) {
return getXmlConverter().toStAXSource((java.io.InputStream) value, exchange);
}
if (value instanceof java.io.File) {
return getXmlConverter().toStAXSource((java.io.File) value, exchange);
}
if (value instanceof java.nio.file.Path) {
return getXmlConverter().toStAXSource((java.nio.file.Path) value, exchange);
}
} else if (to == javax.xml.transform.stream.StreamSource.class) {
if (value instanceof java.lang.String) {
return getXmlConverter().toStreamSource((java.lang.String) value);
}
if (value instanceof java.io.InputStream) {
return getXmlConverter().toStreamSource((java.io.InputStream) value);
}
if (value instanceof java.io.Reader) {
return getXmlConverter().toStreamSource((java.io.Reader) value);
}
if (value instanceof java.io.File) {
return getXmlConverter().toStreamSource((java.io.File) value);
}
if (value instanceof java.nio.file.Path) {
return getXmlConverter().toStreamSource((java.nio.file.Path) value);
}
if (value instanceof byte[]) {
return getXmlConverter().toStreamSource((byte[]) value, exchange);
}
if (value instanceof java.nio.ByteBuffer) {
return getXmlConverter().toStreamSource((java.nio.ByteBuffer) value, exchange);
}
if (value instanceof javax.xml.transform.sax.SAXSource) {
return getXmlConverter().toStreamSourceFromSAX((javax.xml.transform.sax.SAXSource) value, exchange);
}
if (value instanceof javax.xml.transform.dom.DOMSource) {
return getXmlConverter().toStreamSourceFromDOM((javax.xml.transform.dom.DOMSource) value, exchange);
}
if (value instanceof javax.xml.transform.stax.StAXSource) {
return getXmlConverter().toStreamSourceFromStAX((javax.xml.transform.stax.StAXSource) value, exchange);
}
if (value instanceof javax.xml.transform.Source) {
return getXmlConverter().toStreamSource((javax.xml.transform.Source) value, exchange);
}
} else if (to == org.apache.camel.StreamCache.class) {
if (value instanceof org.apache.camel.util.xml.BytesSource) {
return org.apache.camel.util.xml.StreamSourceConverter.convertToStreamCache((org.apache.camel.util.xml.BytesSource) value);
}
if (value instanceof javax.xml.transform.stream.StreamSource) {
return org.apache.camel.util.xml.StreamSourceConverter.convertToStreamCache((javax.xml.transform.stream.StreamSource) value, exchange);
}
if (value instanceof javax.xml.transform.sax.SAXSource) {
return org.apache.camel.util.xml.StreamSourceConverter.convertToStreamCache((javax.xml.transform.sax.SAXSource) value, exchange);
}
} else if (to == org.apache.camel.util.xml.BytesSource.class) {
if (value instanceof byte[]) {
return getXmlConverter().toBytesSource((byte[]) value);
}
} else if (to == org.apache.camel.util.xml.StringSource.class) {
if (value instanceof java.lang.String) {
return getXmlConverter().toStringSource((java.lang.String) value);
}
} else if (to == org.w3c.dom.Document.class) {
if (value instanceof org.w3c.dom.Node) {
return getXmlConverter().toDOMDocument((org.w3c.dom.Node) value);
}
if (value instanceof byte[]) {
return getXmlConverter().toDOMDocument((byte[]) value, exchange);
}
if (value instanceof org.apache.camel.StreamCache) {
return getXmlConverter().toDOMDocument((org.apache.camel.StreamCache) value, exchange);
}
if (value instanceof java.io.InputStream) {
return getXmlConverter().toDOMDocument((java.io.InputStream) value, exchange);
}
if (value instanceof java.io.Reader) {
return getXmlConverter().toDOMDocument((java.io.Reader) value, exchange);
}
if (value instanceof org.xml.sax.InputSource) {
return getXmlConverter().toDOMDocument((org.xml.sax.InputSource) value, exchange);
}
if (value instanceof java.lang.String) {
return getXmlConverter().toDOMDocument((java.lang.String) value, exchange);
}
if (value instanceof java.io.File) {
return getXmlConverter().toDOMDocument((java.io.File) value, exchange);
}
if (value instanceof java.nio.file.Path) {
return getXmlConverter().toDOMDocument((java.nio.file.Path) value, exchange);
}
if (value instanceof javax.xml.transform.Source) {
return getXmlConverter().toDOMDocument((javax.xml.transform.Source) value);
}
if (value instanceof org.w3c.dom.NodeList) {
Object obj = getXmlConverter().toDOMDocumentFromSingleNodeList((org.w3c.dom.NodeList) value);
if (obj == null) {
return Void.class;
} else {
return obj;
}
}
} else if (to == org.w3c.dom.Element.class) {
if (value instanceof javax.xml.transform.Source) {
return getXmlConverter().toDOMElement((javax.xml.transform.Source) value);
}
if (value instanceof org.w3c.dom.Node) {
return getXmlConverter().toDOMElement((org.w3c.dom.Node) value);
}
} else if (to == org.w3c.dom.Node.class) {
if (value instanceof javax.xml.transform.sax.SAXSource) {
return getXmlConverter().toDOMNodeFromSAX((javax.xml.transform.sax.SAXSource) value);
}
if (value instanceof javax.xml.transform.stax.StAXSource) {
return getXmlConverter().toDOMNodeFromStAX((javax.xml.transform.stax.StAXSource) value);
}
if (value instanceof org.w3c.dom.NodeList) {
Object obj = getXmlConverter().toDOMNodeFromSingleNodeList((org.w3c.dom.NodeList) value);
if (obj == null) {
return Void.class;
} else {
return obj;
}
}
if (value instanceof javax.xml.transform.Source) {
Object obj = getXmlConverter().toDOMNode((javax.xml.transform.Source) value);
if (obj == null) {
return Void.class;
} else {
return obj;
}
}
} else if (to == org.xml.sax.InputSource.class) {
if (value instanceof java.io.InputStream) {
return getXmlConverter().toInputSource((java.io.InputStream) value, exchange);
}
if (value instanceof java.io.File) {
return getXmlConverter().toInputSource((java.io.File) value, exchange);
}
if (value instanceof java.nio.file.Path) {
return getXmlConverter().toInputSource((java.nio.file.Path) value, exchange);
}
}
return null;
}
private void doRegistration(TypeConverterRegistry registry) {
registry.addConverter(new TypeConvertible<>(org.w3c.dom.NodeList.class, byte[].class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Source.class, byte[].class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.NodeList.class, java.io.InputStream.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.stream.XMLStreamReader.class, java.io.InputStream.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.dom.DOMSource.class, java.io.InputStream.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.Document.class, java.io.InputStream.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.stream.StreamSource.class, java.io.InputStream.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.stream.XMLStreamReader.class, java.io.Reader.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.stream.StreamSource.class, java.io.Reader.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Source.class, java.io.Reader.class), this);
registry.addConverter(new TypeConvertible<>(org.apache.camel.StreamCache.class, java.io.Serializable.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.NodeList.class, java.lang.Boolean.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.NodeList.class, java.lang.Integer.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.NodeList.class, java.lang.Long.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.NodeList.class, java.lang.String.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.Node.class, java.lang.String.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Source.class, java.lang.String.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.NodeList.class, java.util.List.class), this);
registry.addConverter(new TypeConvertible<>(java.lang.String.class, javax.xml.namespace.QName.class), this);
registry.addConverter(new TypeConvertible<>(java.io.InputStream.class, javax.xml.stream.XMLEventReader.class), this);
registry.addConverter(new TypeConvertible<>(java.io.File.class, javax.xml.stream.XMLEventReader.class), this);
registry.addConverter(new TypeConvertible<>(java.io.Reader.class, javax.xml.stream.XMLEventReader.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.stream.XMLStreamReader.class, javax.xml.stream.XMLEventReader.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Source.class, javax.xml.stream.XMLEventReader.class), this);
registry.addConverter(new TypeConvertible<>(java.io.OutputStream.class, javax.xml.stream.XMLEventWriter.class), this);
registry.addConverter(new TypeConvertible<>(java.io.Writer.class, javax.xml.stream.XMLEventWriter.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Result.class, javax.xml.stream.XMLEventWriter.class), this);
registry.addConverter(new TypeConvertible<>(java.io.InputStream.class, javax.xml.stream.XMLStreamReader.class), this);
registry.addConverter(new TypeConvertible<>(java.io.File.class, javax.xml.stream.XMLStreamReader.class), this);
registry.addConverter(new TypeConvertible<>(java.io.Reader.class, javax.xml.stream.XMLStreamReader.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Source.class, javax.xml.stream.XMLStreamReader.class), this);
registry.addConverter(new TypeConvertible<>(java.lang.String.class, javax.xml.stream.XMLStreamReader.class), this);
registry.addConverter(new TypeConvertible<>(java.io.OutputStream.class, javax.xml.stream.XMLStreamWriter.class), this);
registry.addConverter(new TypeConvertible<>(java.io.Writer.class, javax.xml.stream.XMLStreamWriter.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Result.class, javax.xml.stream.XMLStreamWriter.class), this);
registry.addConverter(new TypeConvertible<>(java.lang.String.class, javax.xml.transform.Source.class), this);
registry.addConverter(new TypeConvertible<>(byte[].class, javax.xml.transform.Source.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.Document.class, javax.xml.transform.Source.class), this);
registry.addConverter(new TypeConvertible<>(org.apache.camel.StreamCache.class, javax.xml.transform.Source.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.Document.class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.Node.class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(java.lang.String.class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(byte[].class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(org.apache.camel.StreamCache.class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(java.io.InputStream.class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(java.io.File.class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(java.nio.file.Path.class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.stream.StreamSource.class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.sax.SAXSource.class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.stax.StAXSource.class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Source.class, javax.xml.transform.dom.DOMSource.class), this);
registry.addConverter(new TypeConvertible<>(java.lang.String.class, javax.xml.transform.sax.SAXSource.class), this);
registry.addConverter(new TypeConvertible<>(java.io.InputStream.class, javax.xml.transform.sax.SAXSource.class), this);
registry.addConverter(new TypeConvertible<>(byte[].class, javax.xml.transform.sax.SAXSource.class), this);
registry.addConverter(new TypeConvertible<>(java.io.File.class, javax.xml.transform.sax.SAXSource.class), this);
registry.addConverter(new TypeConvertible<>(java.nio.file.Path.class, javax.xml.transform.sax.SAXSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.stream.StreamSource.class, javax.xml.transform.sax.SAXSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.dom.DOMSource.class, javax.xml.transform.sax.SAXSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.stax.StAXSource.class, javax.xml.transform.sax.SAXSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Source.class, javax.xml.transform.sax.SAXSource.class), this);
registry.addConverter(new TypeConvertible<>(java.lang.String.class, javax.xml.transform.stax.StAXSource.class), this);
registry.addConverter(new TypeConvertible<>(byte[].class, javax.xml.transform.stax.StAXSource.class), this);
registry.addConverter(new TypeConvertible<>(java.io.InputStream.class, javax.xml.transform.stax.StAXSource.class), this);
registry.addConverter(new TypeConvertible<>(java.io.File.class, javax.xml.transform.stax.StAXSource.class), this);
registry.addConverter(new TypeConvertible<>(java.nio.file.Path.class, javax.xml.transform.stax.StAXSource.class), this);
registry.addConverter(new TypeConvertible<>(java.lang.String.class, javax.xml.transform.stream.StreamSource.class), this);
registry.addConverter(new TypeConvertible<>(java.io.InputStream.class, javax.xml.transform.stream.StreamSource.class), this);
registry.addConverter(new TypeConvertible<>(java.io.Reader.class, javax.xml.transform.stream.StreamSource.class), this);
registry.addConverter(new TypeConvertible<>(java.io.File.class, javax.xml.transform.stream.StreamSource.class), this);
registry.addConverter(new TypeConvertible<>(java.nio.file.Path.class, javax.xml.transform.stream.StreamSource.class), this);
registry.addConverter(new TypeConvertible<>(byte[].class, javax.xml.transform.stream.StreamSource.class), this);
registry.addConverter(new TypeConvertible<>(java.nio.ByteBuffer.class, javax.xml.transform.stream.StreamSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.sax.SAXSource.class, javax.xml.transform.stream.StreamSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.dom.DOMSource.class, javax.xml.transform.stream.StreamSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.stax.StAXSource.class, javax.xml.transform.stream.StreamSource.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Source.class, javax.xml.transform.stream.StreamSource.class), this);
registry.addConverter(new TypeConvertible<>(org.apache.camel.util.xml.BytesSource.class, org.apache.camel.StreamCache.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.stream.StreamSource.class, org.apache.camel.StreamCache.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.sax.SAXSource.class, org.apache.camel.StreamCache.class), this);
registry.addConverter(new TypeConvertible<>(byte[].class, org.apache.camel.util.xml.BytesSource.class), this);
registry.addConverter(new TypeConvertible<>(java.lang.String.class, org.apache.camel.util.xml.StringSource.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.Node.class, org.w3c.dom.Document.class), this);
registry.addConverter(new TypeConvertible<>(byte[].class, org.w3c.dom.Document.class), this);
registry.addConverter(new TypeConvertible<>(org.apache.camel.StreamCache.class, org.w3c.dom.Document.class), this);
registry.addConverter(new TypeConvertible<>(java.io.InputStream.class, org.w3c.dom.Document.class), this);
registry.addConverter(new TypeConvertible<>(java.io.Reader.class, org.w3c.dom.Document.class), this);
registry.addConverter(new TypeConvertible<>(org.xml.sax.InputSource.class, org.w3c.dom.Document.class), this);
registry.addConverter(new TypeConvertible<>(java.lang.String.class, org.w3c.dom.Document.class), this);
registry.addConverter(new TypeConvertible<>(java.io.File.class, org.w3c.dom.Document.class), this);
registry.addConverter(new TypeConvertible<>(java.nio.file.Path.class, org.w3c.dom.Document.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Source.class, org.w3c.dom.Document.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.NodeList.class, org.w3c.dom.Document.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Source.class, org.w3c.dom.Element.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.Node.class, org.w3c.dom.Element.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.sax.SAXSource.class, org.w3c.dom.Node.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.stax.StAXSource.class, org.w3c.dom.Node.class), this);
registry.addConverter(new TypeConvertible<>(org.w3c.dom.NodeList.class, org.w3c.dom.Node.class), this);
registry.addConverter(new TypeConvertible<>(javax.xml.transform.Source.class, org.w3c.dom.Node.class), this);
registry.addConverter(new TypeConvertible<>(java.io.InputStream.class, org.xml.sax.InputSource.class), this);
registry.addConverter(new TypeConvertible<>(java.io.File.class, org.xml.sax.InputSource.class), this);
registry.addConverter(new TypeConvertible<>(java.nio.file.Path.class, org.xml.sax.InputSource.class), this);
}
public TypeConverter lookup(Class<?> to, Class<?> from) {
if (to == byte[].class) {
if (from == org.w3c.dom.NodeList.class) {
return this;
}
if (from == javax.xml.transform.Source.class) {
return this;
}
} else if (to == java.io.InputStream.class) {
if (from == org.w3c.dom.NodeList.class) {
return this;
}
if (from == javax.xml.stream.XMLStreamReader.class) {
return this;
}
if (from == javax.xml.transform.dom.DOMSource.class) {
return this;
}
if (from == org.w3c.dom.Document.class) {
return this;
}
if (from == javax.xml.transform.stream.StreamSource.class) {
return this;
}
} else if (to == java.io.Reader.class) {
if (from == javax.xml.stream.XMLStreamReader.class) {
return this;
}
if (from == javax.xml.transform.stream.StreamSource.class) {
return this;
}
if (from == javax.xml.transform.Source.class) {
return this;
}
} else if (to == java.io.Serializable.class) {
if (from == org.apache.camel.StreamCache.class) {
return this;
}
} else if (to == java.lang.Boolean.class || to == boolean.class) {
if (from == org.w3c.dom.NodeList.class) {
return this;
}
} else if (to == java.lang.Integer.class || to == int.class) {
if (from == org.w3c.dom.NodeList.class) {
return this;
}
} else if (to == java.lang.Long.class || to == long.class) {
if (from == org.w3c.dom.NodeList.class) {
return this;
}
} else if (to == java.lang.String.class) {
if (from == org.w3c.dom.NodeList.class) {
return this;
}
if (from == org.w3c.dom.Node.class) {
return this;
}
if (from == javax.xml.transform.Source.class) {
return this;
}
} else if (to == java.util.List.class) {
if (from == org.w3c.dom.NodeList.class) {
return this;
}
} else if (to == javax.xml.namespace.QName.class) {
if (from == java.lang.String.class) {
return this;
}
} else if (to == javax.xml.stream.XMLEventReader.class) {
if (from == java.io.InputStream.class) {
return this;
}
if (from == java.io.File.class) {
return this;
}
if (from == java.io.Reader.class) {
return this;
}
if (from == javax.xml.stream.XMLStreamReader.class) {
return this;
}
if (from == javax.xml.transform.Source.class) {
return this;
}
} else if (to == javax.xml.stream.XMLEventWriter.class) {
if (from == java.io.OutputStream.class) {
return this;
}
if (from == java.io.Writer.class) {
return this;
}
if (from == javax.xml.transform.Result.class) {
return this;
}
} else if (to == javax.xml.stream.XMLStreamReader.class) {
if (from == java.io.InputStream.class) {
return this;
}
if (from == java.io.File.class) {
return this;
}
if (from == java.io.Reader.class) {
return this;
}
if (from == javax.xml.transform.Source.class) {
return this;
}
if (from == java.lang.String.class) {
return this;
}
} else if (to == javax.xml.stream.XMLStreamWriter.class) {
if (from == java.io.OutputStream.class) {
return this;
}
if (from == java.io.Writer.class) {
return this;
}
if (from == javax.xml.transform.Result.class) {
return this;
}
} else if (to == javax.xml.transform.Source.class) {
if (from == java.lang.String.class) {
return this;
}
if (from == byte[].class) {
return this;
}
if (from == org.w3c.dom.Document.class) {
return this;
}
if (from == org.apache.camel.StreamCache.class) {
return this;
}
} else if (to == javax.xml.transform.dom.DOMSource.class) {
if (from == org.w3c.dom.Document.class) {
return this;
}
if (from == org.w3c.dom.Node.class) {
return this;
}
if (from == java.lang.String.class) {
return this;
}
if (from == byte[].class) {
return this;
}
if (from == org.apache.camel.StreamCache.class) {
return this;
}
if (from == java.io.InputStream.class) {
return this;
}
if (from == java.io.File.class) {
return this;
}
if (from == java.nio.file.Path.class) {
return this;
}
if (from == javax.xml.transform.stream.StreamSource.class) {
return this;
}
if (from == javax.xml.transform.sax.SAXSource.class) {
return this;
}
if (from == javax.xml.transform.stax.StAXSource.class) {
return this;
}
if (from == javax.xml.transform.Source.class) {
return this;
}
} else if (to == javax.xml.transform.sax.SAXSource.class) {
if (from == java.lang.String.class) {
return this;
}
if (from == java.io.InputStream.class) {
return this;
}
if (from == byte[].class) {
return this;
}
if (from == java.io.File.class) {
return this;
}
if (from == java.nio.file.Path.class) {
return this;
}
if (from == javax.xml.transform.stream.StreamSource.class) {
return this;
}
if (from == javax.xml.transform.dom.DOMSource.class) {
return this;
}
if (from == javax.xml.transform.stax.StAXSource.class) {
return this;
}
if (from == javax.xml.transform.Source.class) {
return this;
}
} else if (to == javax.xml.transform.stax.StAXSource.class) {
if (from == java.lang.String.class) {
return this;
}
if (from == byte[].class) {
return this;
}
if (from == java.io.InputStream.class) {
return this;
}
if (from == java.io.File.class) {
return this;
}
if (from == java.nio.file.Path.class) {
return this;
}
} else if (to == javax.xml.transform.stream.StreamSource.class) {
if (from == java.lang.String.class) {
return this;
}
if (from == java.io.InputStream.class) {
return this;
}
if (from == java.io.Reader.class) {
return this;
}
if (from == java.io.File.class) {
return this;
}
if (from == java.nio.file.Path.class) {
return this;
}
if (from == byte[].class) {
return this;
}
if (from == java.nio.ByteBuffer.class) {
return this;
}
if (from == javax.xml.transform.sax.SAXSource.class) {
return this;
}
if (from == javax.xml.transform.dom.DOMSource.class) {
return this;
}
if (from == javax.xml.transform.stax.StAXSource.class) {
return this;
}
if (from == javax.xml.transform.Source.class) {
return this;
}
} else if (to == org.apache.camel.StreamCache.class) {
if (from == org.apache.camel.util.xml.BytesSource.class) {
return this;
}
if (from == javax.xml.transform.stream.StreamSource.class) {
return this;
}
if (from == javax.xml.transform.sax.SAXSource.class) {
return this;
}
} else if (to == org.apache.camel.util.xml.BytesSource.class) {
if (from == byte[].class) {
return this;
}
} else if (to == org.apache.camel.util.xml.StringSource.class) {
if (from == java.lang.String.class) {
return this;
}
} else if (to == org.w3c.dom.Document.class) {
if (from == org.w3c.dom.Node.class) {
return this;
}
if (from == byte[].class) {
return this;
}
if (from == org.apache.camel.StreamCache.class) {
return this;
}
if (from == java.io.InputStream.class) {
return this;
}
if (from == java.io.Reader.class) {
return this;
}
if (from == org.xml.sax.InputSource.class) {
return this;
}
if (from == java.lang.String.class) {
return this;
}
if (from == java.io.File.class) {
return this;
}
if (from == java.nio.file.Path.class) {
return this;
}
if (from == javax.xml.transform.Source.class) {
return this;
}
if (from == org.w3c.dom.NodeList.class) {
return this;
}
} else if (to == org.w3c.dom.Element.class) {
if (from == javax.xml.transform.Source.class) {
return this;
}
if (from == org.w3c.dom.Node.class) {
return this;
}
} else if (to == org.w3c.dom.Node.class) {
if (from == javax.xml.transform.sax.SAXSource.class) {
return this;
}
if (from == javax.xml.transform.stax.StAXSource.class) {
return this;
}
if (from == org.w3c.dom.NodeList.class) {
return this;
}
if (from == javax.xml.transform.Source.class) {
return this;
}
} else if (to == org.xml.sax.InputSource.class) {
if (from == java.io.InputStream.class) {
return this;
}
if (from == java.io.File.class) {
return this;
}
if (from == java.nio.file.Path.class) {
return this;
}
}
return null;
}
private volatile org.apache.camel.converter.jaxp.DomConverter domConverter;
private org.apache.camel.converter.jaxp.DomConverter getDomConverter() {
if (domConverter == null) {
domConverter = new org.apache.camel.converter.jaxp.DomConverter();
CamelContextAware.trySetCamelContext(domConverter, camelContext);
}
return domConverter;
}
private volatile org.apache.camel.converter.jaxp.StaxConverter staxConverter;
private org.apache.camel.converter.jaxp.StaxConverter getStaxConverter() {
if (staxConverter == null) {
staxConverter = new org.apache.camel.converter.jaxp.StaxConverter();
CamelContextAware.trySetCamelContext(staxConverter, camelContext);
}
return staxConverter;
}
private volatile org.apache.camel.converter.jaxp.XmlConverter xmlConverter;
private org.apache.camel.converter.jaxp.XmlConverter getXmlConverter() {
if (xmlConverter == null) {
xmlConverter = new org.apache.camel.converter.jaxp.XmlConverter();
CamelContextAware.trySetCamelContext(xmlConverter, camelContext);
}
return xmlConverter;
}
}
| CamelXmlJaxpBulkConverterLoader |
java | quarkusio__quarkus | extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/service/internalcache/QuarkusInternalCacheFactoryInitiator.java | {
"start": 1723,
"end": 2138
} | class ____ implements InternalCacheFactory {
@Override
public <K, V> InternalCache<K, V> createInternalCache(int intendedApproximateSize) {
final Cache<K, V> caffeineCache = Caffeine.newBuilder()
.maximumSize(intendedApproximateSize)
.build();
return new QuarkusInternalCache<>(caffeineCache);
}
}
}
| QuarkusInternalCacheFactory |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/fields/RecursiveComparisonAssert_isEqualTo_with_enums_Test.java | {
"start": 7814,
"end": 8006
} | class ____ {
String name;
String jobTitle;
public Employee(String name, String jobTitle) {
this.name = name;
this.jobTitle = jobTitle;
}
}
public static | Employee |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/TestLogicalType.java | {
"start": 1494,
"end": 13929
} | class ____ {
@Test
void decimalFromSchema() {
Schema schema = Schema.createFixed("aFixed", null, null, 4);
schema.addProp("logicalType", "decimal");
schema.addProp("precision", 9);
schema.addProp("scale", 2);
LogicalType logicalType = LogicalTypes.fromSchemaIgnoreInvalid(schema);
assertTrue(logicalType instanceof LogicalTypes.Decimal, "Should be a Decimal");
LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) logicalType;
assertEquals(9, decimal.getPrecision(), "Should have correct precision");
assertEquals(2, decimal.getScale(), "Should have correct scale");
}
@Test
void invalidLogicalTypeIgnored() {
final Schema schema = Schema.createFixed("aFixed", null, null, 2);
schema.addProp("logicalType", "decimal");
schema.addProp("precision", 9);
schema.addProp("scale", 2);
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Should ignore invalid logical type");
}
@Test
void decimalWithNonByteArrayTypes() {
final LogicalType decimal = LogicalTypes.decimal(5, 2);
// test simple types
Schema[] nonBytes = new Schema[] { Schema.createRecord("Record", null, null, false),
Schema.createArray(Schema.create(Schema.Type.BYTES)), Schema.createMap(Schema.create(Schema.Type.BYTES)),
Schema.createEnum("Enum", null, null, Arrays.asList("a", "b")),
Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.BYTES), Schema.createFixed("fixed", null, null, 4))),
Schema.create(Schema.Type.BOOLEAN), Schema.create(Schema.Type.INT), Schema.create(Schema.Type.LONG),
Schema.create(Schema.Type.FLOAT), Schema.create(Schema.Type.DOUBLE), Schema.create(Schema.Type.NULL),
Schema.create(Schema.Type.STRING) };
for (final Schema schema : nonBytes) {
assertThrows("Should reject type: " + schema.getType(), IllegalArgumentException.class,
"Logical type decimal must be backed by fixed or bytes", () -> {
decimal.addToSchema(schema);
return null;
});
}
}
@Test
void unknownFromJsonNode() {
Schema schema = Schema.create(Schema.Type.STRING);
schema.addProp("logicalType", "unknown");
schema.addProp("someProperty", 34);
LogicalType logicalType = LogicalTypes.fromSchemaIgnoreInvalid(schema);
assertNull(logicalType, "Should not return a LogicalType instance");
}
@Test
void decimalBytesHasNoPrecisionLimit() {
Schema schema = Schema.create(Schema.Type.BYTES);
// precision is not limited for bytes
LogicalTypes.decimal(Integer.MAX_VALUE).addToSchema(schema);
assertEquals(Integer.MAX_VALUE,
((LogicalTypes.Decimal) LogicalTypes.fromSchemaIgnoreInvalid(schema)).getPrecision(),
"Precision should be an Integer.MAX_VALUE");
}
@Test
void decimalFixedPrecisionLimit() {
// 4 bytes can hold up to 9 digits of precision
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
assertThrows("Should reject precision", IllegalArgumentException.class, "fixed(4) cannot store 10 digits (max 9)",
() -> {
LogicalTypes.decimal(10).addToSchema(schema);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Invalid logical type should not be set on schema");
// 129 bytes can hold up to 310 digits of precision
final Schema schema129 = Schema.createFixed("aDecimal", null, null, 129);
assertThrows("Should reject precision", IllegalArgumentException.class,
"fixed(129) cannot store 311 digits (max 310)", () -> {
LogicalTypes.decimal(311).addToSchema(schema129);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema129), "Invalid logical type should not be set on schema");
}
@Test
void decimalFailsWithZeroPrecision() {
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
assertThrows("Should reject precision", IllegalArgumentException.class,
"Invalid decimal precision: 0 (must be positive)", () -> {
LogicalTypes.decimal(0).addToSchema(schema);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Invalid logical type should not be set on schema");
}
@Test
void decimalFailsWithNegativePrecision() {
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
assertThrows("Should reject precision", IllegalArgumentException.class,
"Invalid decimal precision: -9 (must be positive)", () -> {
LogicalTypes.decimal(-9).addToSchema(schema);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Invalid logical type should not be set on schema");
}
@Test
void decimalScaleBoundedByPrecision() {
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
assertThrows("Should reject precision", IllegalArgumentException.class,
"Invalid decimal scale: 10 (greater than precision: 9)", () -> {
LogicalTypes.decimal(9, 10).addToSchema(schema);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Invalid logical type should not be set on schema");
}
@Test
void decimalFailsWithNegativeScale() {
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
assertThrows("Should reject precision", IllegalArgumentException.class,
"Invalid decimal scale: -2 (must be positive)", () -> {
LogicalTypes.decimal(9, -2).addToSchema(schema);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Invalid logical type should not be set on schema");
}
@Test
void schemaRejectsSecondLogicalType() {
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
LogicalTypes.decimal(9).addToSchema(schema);
assertThrows("Should reject second logical type", AvroRuntimeException.class, "Can't overwrite property: scale",
() -> {
LogicalTypes.decimal(9, 2).addToSchema(schema);
return null;
});
assertEquals(LogicalTypes.decimal(9), LogicalTypes.fromSchemaIgnoreInvalid(schema),
"First logical type should still be set on schema");
}
@Test
void decimalDefaultScale() {
Schema schema = Schema.createFixed("aDecimal", null, null, 4);
// 4 bytes can hold up to 9 digits of precision
LogicalTypes.decimal(9).addToSchema(schema);
assertEquals(0, ((LogicalTypes.Decimal) LogicalTypes.fromSchemaIgnoreInvalid(schema)).getScale(),
"Scale should be a 0");
}
@Test
void fixedDecimalToFromJson() {
Schema schema = Schema.createFixed("aDecimal", null, null, 4);
LogicalTypes.decimal(9, 2).addToSchema(schema);
Schema parsed = new Schema.Parser().parse(schema.toString(true));
assertEquals(schema, parsed, "Constructed and parsed schemas should match");
}
@Test
void bytesDecimalToFromJson() {
Schema schema = Schema.create(Schema.Type.BYTES);
LogicalTypes.decimal(9, 2).addToSchema(schema);
Schema parsed = new Schema.Parser().parse(schema.toString(true));
assertEquals(schema, parsed, "Constructed and parsed schemas should match");
}
@Test
void uuidExtendsString() {
Schema uuidSchema = LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.STRING));
assertEquals(LogicalTypes.uuid(), uuidSchema.getLogicalType());
assertThrows("UUID requires a string", IllegalArgumentException.class,
"Uuid can only be used with an underlying string or fixed type",
() -> LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.INT)));
}
@Test
void durationExtendsFixed12() {
Schema durationSchema = LogicalTypes.duration().addToSchema(Schema.createFixed("f", null, null, 12));
assertEquals(LogicalTypes.duration(), LogicalTypes.fromSchema(durationSchema));
assertThrows("Duration requires a fixed(12)", IllegalArgumentException.class,
"Duration can only be used with an underlying fixed type of size 12.",
() -> LogicalTypes.duration().addToSchema(Schema.create(Schema.Type.INT)));
assertThrows("Duration requires a fixed(12)", IllegalArgumentException.class,
"Duration can only be used with an underlying fixed type of size 12.",
() -> LogicalTypes.duration().addToSchema(Schema.createFixed("wrong", null, null, 42)));
}
@Test
void logicalTypeEquals() {
LogicalTypes.Decimal decimal90 = LogicalTypes.decimal(9);
LogicalTypes.Decimal decimal80 = LogicalTypes.decimal(8);
LogicalTypes.Decimal decimal92 = LogicalTypes.decimal(9, 2);
assertEqualsTrue("Same decimal", LogicalTypes.decimal(9, 0), decimal90);
assertEqualsTrue("Same decimal", LogicalTypes.decimal(8, 0), decimal80);
assertEqualsTrue("Same decimal", LogicalTypes.decimal(9, 2), decimal92);
assertEqualsFalse("Different logical type", LogicalTypes.uuid(), decimal90);
assertEqualsFalse("Different precision", decimal90, decimal80);
assertEqualsFalse("Different scale", decimal90, decimal92);
}
@Test
void logicalTypeInSchemaEquals() {
Schema schema1 = Schema.createFixed("aDecimal", null, null, 4);
Schema schema2 = Schema.createFixed("aDecimal", null, null, 4);
Schema schema3 = Schema.createFixed("aDecimal", null, null, 4);
assertNotSame(schema1, schema2);
assertNotSame(schema1, schema3);
assertEqualsTrue("No logical types", schema1, schema2);
assertEqualsTrue("No logical types", schema1, schema3);
LogicalTypes.decimal(9).addToSchema(schema1);
assertEqualsFalse("Two has no logical type", schema1, schema2);
LogicalTypes.decimal(9).addToSchema(schema2);
assertEqualsTrue("Same logical types", schema1, schema2);
LogicalTypes.decimal(9, 2).addToSchema(schema3);
assertEqualsFalse("Different logical type", schema1, schema3);
}
@Test
void registerLogicalTypeThrowsIfTypeNameNotProvided() {
assertThrows("Should error if type name was not provided", UnsupportedOperationException.class,
"LogicalTypeFactory TypeName has not been provided", () -> {
LogicalTypes.register(schema -> LogicalTypes.date());
return null;
});
}
@Test
void registerLogicalTypeWithName() {
final LogicalTypes.LogicalTypeFactory factory = new LogicalTypes.LogicalTypeFactory() {
@Override
public LogicalType fromSchema(Schema schema) {
return LogicalTypes.date();
}
@Override
public String getTypeName() {
return "typename";
}
};
LogicalTypes.register("registered", factory);
assertThat(LogicalTypes.getCustomRegisteredTypes(), IsMapContaining.hasEntry("registered", factory));
}
@Test
void registerLogicalTypeWithFactoryName() {
final LogicalTypes.LogicalTypeFactory factory = new LogicalTypes.LogicalTypeFactory() {
@Override
public LogicalType fromSchema(Schema schema) {
return LogicalTypes.date();
}
@Override
public String getTypeName() {
return "factory";
}
};
LogicalTypes.register(factory);
assertThat(LogicalTypes.getCustomRegisteredTypes(), IsMapContaining.hasEntry("factory", factory));
}
@Test
void registerLogicalTypeWithFactoryNameNotProvided() {
final LogicalTypes.LogicalTypeFactory factory = schema -> LogicalTypes.date();
LogicalTypes.register("logicalTypeName", factory);
assertThat(LogicalTypes.getCustomRegisteredTypes(), IsMapContaining.hasEntry("logicalTypeName", factory));
}
@Test
public void testRegisterLogicalTypeFactoryByServiceLoader() {
assertThat(LogicalTypes.getCustomRegisteredTypes(),
IsMapContaining.hasEntry(equalTo("custom"), instanceOf(LogicalTypes.LogicalTypeFactory.class)));
}
public static void assertEqualsTrue(String message, Object o1, Object o2) {
assertEquals(o1, o2, "Should be equal (forward): " + message);
assertEquals(o2, o1, "Should be equal (reverse): " + message);
}
public static void assertEqualsFalse(String message, Object o1, Object o2) {
assertNotEquals(o1, o2, "Should be equal (forward): " + message);
assertNotEquals(o2, o1, "Should be equal (reverse): " + message);
}
/**
* A convenience method to avoid a large number of @Test(expected=...) tests
*
* @param message A String message to describe this assertion
* @param expected An Exception | TestLogicalType |
java | spring-projects__spring-framework | spring-context-support/src/main/java/org/springframework/mail/MailException.java | {
"start": 773,
"end": 884
} | class ____ all mail exceptions.
*
* @author Dmitriy Kopylenko
*/
@SuppressWarnings("serial")
public abstract | for |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/project/InvalidProjectVersionException.java | {
"start": 997,
"end": 2128
} | class ____ extends ProjectBuildingException {
private final String locationInPom;
private final String offendingVersion;
public InvalidProjectVersionException(
String projectId,
String locationInPom,
String offendingVersion,
File pomFile,
InvalidVersionSpecificationException cause) {
super(projectId, formatMessage(projectId, locationInPom, offendingVersion, cause), pomFile, cause);
this.locationInPom = locationInPom;
this.offendingVersion = offendingVersion;
}
private static String formatMessage(
String projectId,
String locationInPom,
String offendingVersion,
InvalidVersionSpecificationException cause) {
return "Invalid version: " + offendingVersion + " found for: " + locationInPom + " in project: " + projectId
+ ". Reason: " + cause.getMessage();
}
public String getOffendingVersion() {
return offendingVersion;
}
public String getLocationInPom() {
return locationInPom;
}
}
| InvalidProjectVersionException |
java | spring-projects__spring-boot | module/spring-boot-http-client/src/main/java/org/springframework/boot/http/client/autoconfigure/ClientHttpRequestFactoryBuilderCustomizer.java | {
"start": 981,
"end": 1232
} | interface ____<B extends ClientHttpRequestFactoryBuilder<?>> {
/**
* Customize the given builder.
* @param builder the builder to customize
* @return the customized builder
*/
B customize(B builder);
}
| ClientHttpRequestFactoryBuilderCustomizer |
java | square__moshi | moshi/src/test/java/com/squareup/moshi/internal/ClassJsonAdapterTest.java | {
"start": 12810,
"end": 13545
} | class ____ extends SimpleTimeZone {
int a;
public ExtendsPlatformClassWithPrivateField() {
super(0, "FOO");
}
}
@Test
public void platformSuperclassPrivateFieldIsExcluded() throws Exception {
ExtendsPlatformClassWithPrivateField value = new ExtendsPlatformClassWithPrivateField();
value.a = 4;
String toJson = toJson(ExtendsPlatformClassWithPrivateField.class, value);
assertThat(toJson).isEqualTo("{\"a\":4}");
ExtendsPlatformClassWithPrivateField fromJson =
fromJson(ExtendsPlatformClassWithPrivateField.class, "{\"a\":4,\"ID\":\"BAR\"}");
assertThat(fromJson.a).isEqualTo(4);
assertThat(fromJson.getID()).isEqualTo("FOO");
}
static | ExtendsPlatformClassWithPrivateField |
java | apache__flink | flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlReset.java | {
"start": 1458,
"end": 2372
} | class ____ extends SqlCall {
public static final SqlSpecialOperator OPERATOR =
new SqlSpecialOperator("RESET", SqlKind.OTHER);
@Nullable private final SqlNode key;
public SqlReset(SqlParserPos pos, @Nullable SqlNode key) {
super(pos);
this.key = key;
}
@Override
@Nonnull
public SqlOperator getOperator() {
return OPERATOR;
}
@Override
@Nonnull
public List<SqlNode> getOperandList() {
return ImmutableNullableList.of(key);
}
public @Nullable SqlNode getKey() {
return key;
}
public @Nullable String getKeyString() {
return SqlParseUtils.extractString(key);
}
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("RESET");
if (key != null) {
key.unparse(writer, leftPrec, rightPrec);
}
}
}
| SqlReset |
java | elastic__elasticsearch | x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/SourceDestValidations.java | {
"start": 1139,
"end": 3090
} | class ____ {
private SourceDestValidations() {}
private static final SourceDestValidation REMOTE_SOURCE_VALIDATION =
new SourceDestValidator.RemoteSourceEnabledAndRemoteLicenseValidation("transform");
private static final List<SourceDestValidation> PREVIEW_VALIDATIONS = Arrays.asList(
SOURCE_MISSING_VALIDATION,
REMOTE_SOURCE_VALIDATION,
DESTINATION_PIPELINE_MISSING_VALIDATION
);
private static final List<SourceDestValidation> ALL_VALIDATIONS = Arrays.asList(
SOURCE_MISSING_VALIDATION,
REMOTE_SOURCE_VALIDATION,
DESTINATION_IN_SOURCE_VALIDATION,
DESTINATION_SINGLE_INDEX_VALIDATION,
DESTINATION_PIPELINE_MISSING_VALIDATION
);
private static final List<SourceDestValidation> NON_DEFERABLE_VALIDATIONS = Collections.singletonList(
DESTINATION_SINGLE_INDEX_VALIDATION
);
public static List<SourceDestValidation> getValidations(boolean isDeferValidation, List<SourceDestValidation> additionalValidations) {
return getValidations(isDeferValidation, ALL_VALIDATIONS, additionalValidations);
}
public static List<SourceDestValidation> getValidationsForPreview(List<SourceDestValidation> additionalValidations) {
return getValidations(false, PREVIEW_VALIDATIONS, additionalValidations);
}
private static List<SourceDestValidation> getValidations(
boolean isDeferValidation,
List<SourceDestValidation> primaryValidations,
List<SourceDestValidation> additionalValidations
) {
if (isDeferValidation) {
return SourceDestValidations.NON_DEFERABLE_VALIDATIONS;
}
if (additionalValidations.isEmpty()) {
return primaryValidations;
}
List<SourceDestValidation> validations = new ArrayList<>(primaryValidations);
validations.addAll(additionalValidations);
return validations;
}
}
| SourceDestValidations |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/indices/dangling/DanglingIndexInfo.java | {
"start": 879,
"end": 2130
} | class ____ implements Writeable {
private final String nodeId;
private final String indexName;
private final String indexUUID;
private final long creationDateMillis;
public DanglingIndexInfo(String nodeId, String indexName, String indexUUID, long creationDateMillis) {
this.nodeId = nodeId;
this.indexName = indexName;
this.indexUUID = indexUUID;
this.creationDateMillis = creationDateMillis;
}
public DanglingIndexInfo(StreamInput in) throws IOException {
this.nodeId = in.readString();
this.indexName = in.readString();
this.indexUUID = in.readString();
this.creationDateMillis = in.readLong();
}
public String getIndexName() {
return indexName;
}
public String getIndexUUID() {
return indexUUID;
}
public String getNodeId() {
return this.nodeId;
}
public long getCreationDateMillis() {
return creationDateMillis;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(this.nodeId);
out.writeString(this.indexName);
out.writeString(this.indexUUID);
out.writeLong(this.creationDateMillis);
}
}
| DanglingIndexInfo |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/stat/spi/StatisticsImplementor.java | {
"start": 548,
"end": 7773
} | interface ____ extends Statistics, Service {
/**
* Callback about a session being opened.
*/
void openSession();
/**
* Callback about a session being closed.
*/
void closeSession();
/**
* Callback about a flush occurring
*/
void flush();
/**
* Callback about a connection being obtained from {@link org.hibernate.engine.jdbc.connections.spi.ConnectionProvider}
*/
void connect();
/**
* Callback about a statement being prepared.
*/
void prepareStatement();
/**
* Callback about a statement being closed.
*/
void closeStatement();
/**
* Callback about a transaction completing.
*
* @param success Was the transaction successful?
*/
void endTransaction(boolean success);
/**
* Callback about an entity being loaded. This might indicate a proxy or a fully initialized entity, but in either
* case it means without a separate SQL query being needed.
*
* @param entityName The name of the entity loaded.
*/
void loadEntity(String entityName);
/**
* Callback about an entity being fetched. Unlike {@link #loadEntity} this indicates a separate query being
* performed.
*
* @param entityName The name of the entity fetched.
*/
void fetchEntity(String entityName);
/**
* Callback about an entity being updated.
*
* @param entityName The name of the entity updated.
*/
void updateEntity(String entityName);
/**
* Callback about an entity being upserted.
*
* @param entityName The name of the entity upserted.
*/
void upsertEntity(String entityName);
/**
* Callback about an entity being inserted
*
* @param entityName The name of the entity inserted
*/
void insertEntity(String entityName);
/**
* Callback about an entity being deleted.
*
* @param entityName The name of the entity deleted.
*/
void deleteEntity(String entityName);
/**
* Callback about an optimistic lock failure on an entity
*
* @param entityName The name of the entity.
*/
void optimisticFailure(String entityName);
/**
* Callback about a collection loading. This might indicate a lazy collection or an initialized collection being
* created, but in either case it means without a separate SQL query being needed.
*
* @param role The collection role.
*/
void loadCollection(String role);
/**
* Callback to indicate a collection being fetched. Unlike {@link #loadCollection}, this indicates a separate
* query was needed.
*
* @param role The collection role.
*/
void fetchCollection(String role);
/**
* Callback indicating a collection was updated.
*
* @param role The collection role.
*/
void updateCollection(String role);
/**
* Callback indicating a collection recreation (full deletion + full (re-)insertion).
*
* @param role The collection role.
*/
void recreateCollection(String role);
/**
* Callback indicating a collection removal.
*
* @param role The collection role.
*/
void removeCollection(String role);
/**
* Callback indicating a put into second level cache.
*
* @apiNote {@code entityName} should be the root entity name
*/
void entityCachePut(NavigableRole entityName, String regionName);
/**
* Callback indicating a get from second level cache resulted in a hit.
*
* @apiNote {@code entityName} should be the root entity name
*/
void entityCacheHit(NavigableRole entityName, String regionName);
/**
* Callback indicating a get from second level cache resulted in a miss.
*
* @apiNote {@code entityName} should be the root entity name
*/
void entityCacheMiss(NavigableRole entityName, String regionName);
/**
* Callback indicating a removal from second level cache.
*
* @apiNote {@code entityName} should be the root entity name
*/
void entityCacheRemove(NavigableRole rootEntityRole, String name);
/**
* Callback indicating a put into second level cache.
*
* @param collectionRole The collection's "path"
* @param regionName The name of the cache region
*/
void collectionCachePut(NavigableRole collectionRole, String regionName);
/**
* Callback indicating a get from second level cache resulted in a hit.
*
* @param collectionRole The collection's "path"
* @param regionName The name of the cache region
*/
void collectionCacheHit(NavigableRole collectionRole, String regionName);
/**
* Callback indicating a get from second level cache resulted in a miss.
*
* @param collectionRole The collection's "path"
* @param regionName The name of the cache region
*/
void collectionCacheMiss(NavigableRole collectionRole, String regionName);
/**
* Callback indicating a put into natural id cache.
*/
void naturalIdCachePut(NavigableRole rootEntityName, String regionName);
/**
* Callback indicating a get from natural id cache resulted in a hit.
*/
void naturalIdCacheHit(NavigableRole rootEntityName, String regionName);
/**
* Callback indicating a get from natural id cache resulted in a miss.
*/
void naturalIdCacheMiss(NavigableRole rootEntityName, String regionName);
/**
* Callback indicating execution of a natural id query
*/
void naturalIdQueryExecuted(String rootEntityName, long executionTime);
/**
* Callback indicating a put into the query cache.
*
* @param hql The query
* @param regionName The cache region
*/
void queryCachePut(String hql, String regionName);
/**
* Callback indicating a get from the query cache resulted in a hit.
*
* @param hql The query
* @param regionName The name of the cache region
*/
void queryCacheHit(String hql, String regionName);
/**
* Callback indicating a get from the query cache resulted in a miss.
*
* @param hql The query
* @param regionName The name of the cache region
*/
void queryCacheMiss(String hql, String regionName);
/**
* Callback indicating execution of a SQL or HQL query
*
* @param query The query
* @param rows Number of rows returned
* @param time execution time
*/
void queryExecuted(String query, int rows, long time);
/**
* Callback indicating a hit to the timestamp cache
*/
void updateTimestampsCacheHit();
/**
* Callback indicating a miss to the timestamp cache
*/
void updateTimestampsCacheMiss();
/**
* Callback indicating a put to the timestamp cache
*/
void updateTimestampsCachePut();
/**
* Callback indicating a get from the query plan cache resulted in a hit.
*
* @param query The query
*/
default void queryPlanCacheHit(String query) {
//For backward compatibility
}
/**
* Callback indicating a get from the query plan cache resulted in a miss.
*
* @param query The query
*/
default void queryPlanCacheMiss(String query) {
//For backward compatibility
}
/**
* Callback indicating compilation of a sql/hql query
*
* @param hql The query
* @param microseconds execution time
*/
default void queryCompiled(String hql, long microseconds) {
//For backward compatibility
}
/**
* Register the execution of a slow SQL query.
*/
default void slowQuery(String sql, long executionTime) {
//For backward compatibility
}
@Override
default Map<String, Long> getSlowQueries() {
//For backward compatibility
return emptyMap();
}
}
| StatisticsImplementor |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/visitor/OracleParameterizedOutputVisitorTest_01.java | {
"start": 318,
"end": 2703
} | class ____ extends TestCase {
public void test_for_parameterize() throws Exception {
String sql = "SELECT dd.synonym_name table_name,'EAIREP' as schema_name, aa.num_rows,bb.comments description,to_char(cc.created,'YYYY-MM-DD HH24:MI:SS') createTime,to_char(cc.last_ddl_time,'YYYY-MM-DD HH24:MI:SS') last_ddl_time, 'SYNONYM' table_type ,lower(aa.owner || '.' || aa.table_name) ref_info\n" +
" from dba_synonyms dd,\n" +
" dba_objects cc,\n" +
" dba_tables aa\n" +
" left join dba_tab_comments bb\n" +
" on aa.owner = bb.owner\n" +
" and aa.table_name = bb.table_name\n" +
" where aa.owner=cc.owner\n" +
" and aa.table_name=cc.object_name\n" +
" and cc.subobject_name is null\n" +
" and cc.object_type='TABLE'\n" +
" and aa.owner = dd.table_owner\n" +
" and aa.table_name = dd.table_name and dd.owner = :1 ";
List<Object> parameters = new ArrayList<Object>();
parameters.add("EAIREP");
List<SQLStatement> stmts = SQLUtils.parseStatements(sql, JdbcConstants.ORACLE);
String tempResult = SQLUtils.toSQLString(stmts, com.alibaba.druid.util.JdbcConstants.ORACLE, parameters);
assertEquals("SELECT dd.synonym_name AS table_name, 'EAIREP' AS schema_name, aa.num_rows, bb.comments AS description\n" +
"\t, to_char(cc.created, 'YYYY-MM-DD HH24:MI:SS') AS createTime\n" +
"\t, to_char(cc.last_ddl_time, 'YYYY-MM-DD HH24:MI:SS') AS last_ddl_time, 'SYNONYM' AS table_type\n" +
"\t, lower(aa.owner || '.' || aa.table_name) AS ref_info\n" +
"FROM dba_synonyms dd, dba_objects cc, dba_tables aa\n" +
"\tLEFT JOIN dba_tab_comments bb ON aa.owner = bb.owner\n" +
"\tAND aa.table_name = bb.table_name \n" +
"WHERE aa.owner = cc.owner\n" +
"\tAND aa.table_name = cc.object_name\n" +
"\tAND cc.subobject_name IS NULL\n" +
"\tAND cc.object_type = 'TABLE'\n" +
"\tAND aa.owner = dd.table_owner\n" +
"\tAND aa.table_name = dd.table_name\n" +
"\tAND dd.owner = 'EAIREP'", tempResult);
}
}
| OracleParameterizedOutputVisitorTest_01 |
java | apache__camel | components/camel-http/src/test/java/org/apache/camel/component/http/HttpCharsetTest.java | {
"start": 1219,
"end": 3895
} | class ____ extends BaseHttpTest {
// default content encoding of the local test server
private final String charset = "ISO-8859-1";
private HttpServer localServer;
@Override
public void setupResources() throws Exception {
localServer = ServerBootstrap.bootstrap()
.setCanonicalHostName("localhost").setHttpProcessor(getBasicHttpProcessor())
.setConnectionReuseStrategy(getConnectionReuseStrategy()).setResponseFactory(getHttpResponseFactory())
.setSslContext(getSSLContext())
.register("/", new BasicValidationHandler(POST.name(), null, getBody(), getExpectedContent())).create();
localServer.start();
}
@Override
public void cleanupResources() throws Exception {
if (localServer != null) {
localServer.stop();
}
}
@Test
public void sendCharsetInExchangeProperty() {
Exchange exchange = template.request(
"http://localhost:" + localServer.getLocalPort() + "/", exchange1 -> {
exchange1.getIn().setHeader(Exchange.CONTENT_TYPE, "text/plain;charset=iso8859-1");
exchange1.getIn().setBody(getBody());
});
assertExchange(exchange);
}
@Test
public void sendByteArrayCharsetInExchangeProperty() {
Exchange exchange = template.request(
"http://localhost:" + localServer.getLocalPort() + "/", exchange1 -> {
exchange1.getIn().setHeader(Exchange.CONTENT_TYPE, "text/plain;charset=iso8859-1");
exchange1.getIn().setBody(getBody().getBytes(charset));
});
assertExchange(exchange);
}
@Test
public void sendInputStreamCharsetInExchangeProperty() {
Exchange exchange = template.request(
"http://localhost:" + localServer.getLocalPort() + "/", exchange1 -> {
exchange1.getIn().setHeader(Exchange.CONTENT_TYPE, "text/plain;charset=iso8859-1");
exchange1.getIn().setBody(new ByteArrayInputStream(getBody().getBytes(charset)));
});
assertExchange(exchange);
}
protected String getBody() {
char latinSmallLetterAWithDiaeresis = 0x00E4;
char latinSmallLetterOWithDiaeresis = 0x00F6;
char latinSmallLetterUWithDiaeresis = 0x00FC;
char latinSmallLetterSharpS = 0x00DF;
return "hl=de&q=camel+"
+ latinSmallLetterAWithDiaeresis
+ latinSmallLetterOWithDiaeresis
+ latinSmallLetterUWithDiaeresis
+ latinSmallLetterSharpS;
}
}
| HttpCharsetTest |
java | apache__camel | components/camel-ldap/src/test/java/org/apache/directory/server/core/integ5/ServerAnnotationProcessor.java | {
"start": 9719,
"end": 12045
} | class ____ be loaded
*/
public static ReplicationConsumer createConsumer() throws ClassNotFoundException {
Object instance = AnnotationUtils.getInstance(CreateConsumer.class);
ReplicationConsumer consumer = null;
if (instance != null) {
CreateConsumer createConsumer = (CreateConsumer) instance;
consumer = createConsumer(createConsumer);
}
return consumer;
}
/**
* creates an LdapServer and starts before returning the instance
*
* @param createLdapServer the annotation containing the custom configuration
* @param directoryService the directory service
* @return a running LdapServer instance
*/
private static LdapServer createLdapServer(CreateLdapServer createLdapServer, DirectoryService directoryService) {
LdapServer ldapServer = instantiateLdapServer(createLdapServer, directoryService);
if (ldapServer == null) {
return null;
}
// Launch the server
try {
ldapServer.start();
} catch (Exception e) {
LOG.warn("Failed to start the LDAP server: {}", e.getMessage(), e);
}
return ldapServer;
}
/**
* Create a new instance of LdapServer
*
* @param description A description for the created LdapServer
* @param directoryService The associated DirectoryService
* @return An LdapServer instance
*/
public static LdapServer createLdapServer(Description description, DirectoryService directoryService) {
CreateLdapServer createLdapServer = description.getAnnotation(CreateLdapServer.class);
// Ok, we have found a CreateLdapServer annotation. Process it now.
return createLdapServer(createLdapServer, directoryService);
}
@SuppressWarnings("unchecked")
private static Annotation getAnnotation(Class annotationClass) throws Exception {
// Get the caller by inspecting the stackTrace
StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
// In Java5 the 0th stacktrace element is: java.lang.Thread.dumpThreads(Native Method)
int index = stackTrace[0].getMethodName().equals("dumpThreads") ? 4 : 3;
// Get the enclosing | cannot |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/Level.java | {
"start": 5204,
"end": 11859
} | enum ____ the standard Levels.
*/
public StandardLevel getStandardLevel() {
return standardLevel;
}
/**
* Compares this level against the levels passed as arguments and returns true if this level is in between the given
* levels.
*
* @param minLevel The minimum level to test.
* @param maxLevel The maximum level to test.
* @return True true if this level is in between the given levels
* @since 2.4
*/
public boolean isInRange(final Level minLevel, final Level maxLevel) {
return this.intLevel >= minLevel.intLevel && this.intLevel <= maxLevel.intLevel;
}
/**
* Compares this level against the level passed as an argument and returns true if this level is the same or is less
* specific.
* <p>
* Concretely, {@link #ALL} is less specific than {@link #TRACE}, which is less specific than {@link #DEBUG}, which
* is less specific than {@link #INFO}, which is less specific than {@link #WARN}, which is less specific than
* {@link #ERROR}, which is less specific than {@link #FATAL}, and finally {@link #OFF}, which is the most specific
* standard level.
* </p>
*
* @param level
* The level to test.
* @return True if this level Level is less specific or the same as the given Level.
*/
public boolean isLessSpecificThan(final Level level) {
return this.intLevel >= level.intLevel;
}
/**
* Compares this level against the level passed as an argument and returns true if this level is the same or is more
* specific.
* <p>
* Concretely, {@link #FATAL} is more specific than {@link #ERROR}, which is more specific than {@link #WARN},
* etc., until {@link #TRACE}, and finally {@link #ALL}, which is the least specific standard level.
* The most specific level is {@link #OFF}.
* </p>
*
* @param level The level to test.
* @return True if this level Level is more specific or the same as the given Level.
*/
public boolean isMoreSpecificThan(final Level level) {
return this.intLevel <= level.intLevel;
}
@Override
@SuppressWarnings("CloneDoesntCallSuperClone")
// CHECKSTYLE:OFF
public Level clone() throws CloneNotSupportedException {
throw new CloneNotSupportedException();
}
// CHECKSTYLE:ON
@Override
public int compareTo(final Level other) {
return intLevel < other.intLevel ? -1 : (intLevel > other.intLevel ? 1 : 0);
}
@Override
public boolean equals(final Object other) {
return other instanceof Level && other == this;
}
public Class<Level> getDeclaringClass() {
return Level.class;
}
@Override
public int hashCode() {
return this.name.hashCode();
}
/**
* Gets the symbolic name of this Level. Equivalent to calling {@link #toString()}.
*
* @return the name of this Level.
*/
public String name() {
return this.name;
}
@Override
public String toString() {
return this.name;
}
/**
* Retrieves an existing Level or creates on if it didn't previously exist.
*
* @param name The name of the level.
* @param intValue The integer value for the Level. If the level was previously created this value is ignored.
* @return The Level.
* @throws java.lang.IllegalArgumentException if the name is null or intValue is less than zero.
*/
public static Level forName(final String name, final int intValue) {
if (Strings.isEmpty(name)) {
throw new IllegalArgumentException("Illegal null or empty Level name.");
}
final String normalizedName = toRootUpperCase(name.trim());
final Level level = LEVELS.get(normalizedName);
if (level != null) {
return level;
}
try {
// use original capitalization
return new Level(name, intValue);
} catch (final IllegalStateException ex) {
// The level was added by something else so just return that one.
return LEVELS.get(normalizedName);
}
}
/**
* Return the Level associated with the name or null if the Level cannot be found.
*
* @param name The name of the Level.
* @return The Level or null.
* @throws java.lang.IllegalArgumentException if the name is null.
*/
public static Level getLevel(final String name) {
if (Strings.isEmpty(name)) {
throw new IllegalArgumentException("Illegal null or empty Level name.");
}
return LEVELS.get(toRootUpperCase(name.trim()));
}
/**
* Converts the string passed as argument to a level. If the conversion fails, then this method returns
* {@link #DEBUG}.
*
* @param level The name of the desired Level.
* @return The Level associated with the String.
*/
public static Level toLevel(final String level) {
return toLevel(level, Level.DEBUG);
}
/**
* Converts the string passed as argument to a level. If the conversion fails, then this method returns the value of
* <code>defaultLevel</code>.
*
* @param name The name of the desired Level.
* @param defaultLevel The Level to use if the String is invalid.
* @return The Level associated with the String.
*/
public static Level toLevel(final String name, final Level defaultLevel) {
if (name == null) {
return defaultLevel;
}
final Level level = LEVELS.get(toRootUpperCase(name.trim()));
return level == null ? defaultLevel : level;
}
/**
* Return an array of all the Levels that have been registered.
*
* @return An array of Levels.
*/
public static Level[] values() {
return Level.LEVELS.values().toArray(EMPTY_ARRAY);
}
/**
* Return the Level associated with the name.
*
* @param name The name of the Level to return.
* @return The Level.
* @throws java.lang.NullPointerException if the Level name is {@code null}.
* @throws java.lang.IllegalArgumentException if the Level name is not registered.
*/
public static Level valueOf(final String name) {
Objects.requireNonNull(name, "No level name given.");
final String levelName = toRootUpperCase(name.trim());
final Level level = LEVELS.get(levelName);
if (level != null) {
return level;
}
throw new IllegalArgumentException("Unknown level constant [" + levelName + "].");
}
/**
* Returns the | of |
java | apache__camel | components/camel-docker/src/main/java/org/apache/camel/component/docker/DockerConstants.java | {
"start": 3233,
"end": 16440
} | class ____ of the DockerCmdExecFactory implementation to use",
javaType = "String")
public static final String DOCKER_CMD_EXEC_FACTORY = "CamelDockerCmdExecFactory";
/**
* List Images *
*/
@Metadata(description = "With label filter", javaType = "String")
public static final String DOCKER_FILTER = "CamelDockerFilter";
@Metadata(description = "With show all flag", javaType = "Boolean")
public static final String DOCKER_SHOW_ALL = "CamelDockerShowAll";
/**
* Common *
*/
@Metadata(description = "The id of the container", javaType = "String")
public static final String DOCKER_CONTAINER_ID = "CamelDockerContainerId";
@Metadata(description = "The Image ID", javaType = "String")
public static final String DOCKER_IMAGE_ID = "CamelDockerImageId";
/**
* Auth *
*/
@Metadata(description = "The email address associated with the user", javaType = "String")
public static final String DOCKER_EMAIL = "CamelDockerEmail";
@Metadata(description = "The password to authenticate with", javaType = "String")
public static final String DOCKER_PASSWORD = "CamelDockerPassword";
@Metadata(description = "The server address for docker registry", javaType = "String")
public static final String DOCKER_SERVER_ADDRESS = "CamelDockerServerAddress";
@Metadata(description = "The user name to authenticate with", javaType = "String")
public static final String DOCKER_USERNAME = "CamelDockerUsername";
/**
* Pull *
*/
@Metadata(description = "The registry", javaType = "String")
public static final String DOCKER_REGISTRY = "CamelDockerRegistry";
@Metadata(description = "The repository", javaType = "String")
public static final String DOCKER_REPOSITORY = "CamelDockerRepository";
@Metadata(description = "The tag", javaType = "String")
public static final String DOCKER_TAG = "CamelDockerTag";
/**
* Push *
*/
@Metadata(description = "The image name", javaType = "String")
public static final String DOCKER_NAME = "CamelDockerName";
/**
* Search *
*/
@Metadata(description = "The term to search", javaType = "String")
public static final String DOCKER_TERM = "CamelDockerTerm";
/**
* Remove *
*/
@Metadata(description = "With force flag", javaType = "Boolean")
public static final String DOCKER_FORCE = "CamelDockerForce";
@Metadata(description = "With no prune flag", javaType = "Boolean")
public static final String DOCKER_NO_PRUNE = "CamelDockerNoPrune";
/**
* Events *
*/
@Metadata(description = "The initial range", javaType = "Long")
public static final String DOCKER_INITIAL_RANGE = "CamelDockerInitialRange";
/**
* List Container *
*/
@Metadata(description = "With before", javaType = "String")
public static final String DOCKER_BEFORE = "CamelDockerBefore";
@Metadata(description = "With limit", javaType = "Integer")
public static final String DOCKER_LIMIT = "CamelDockerLimit";
@Metadata(description = "With show size flag", javaType = "Boolean")
public static final String DOCKER_SHOW_SIZE = "CamelDockerShowSize";
@Metadata(description = "With since", javaType = "String")
public static final String DOCKER_SINCE = "CamelDockerSince";
/**
* Remove Container *
*/
@Metadata(description = "With remove volumes flag", javaType = "Boolean")
public static final String DOCKER_REMOVE_VOLUMES = "CamelDockerRemoveVolumes";
/**
* Attach Container *
*/
@Metadata(description = "With follow stream flag", javaType = "Boolean")
public static final String DOCKER_FOLLOW_STREAM = "CamelDockerFollowStream";
@Metadata(description = "With logs flag", javaType = "Boolean")
public static final String DOCKER_LOGS = "CamelDockerLogs";
@Metadata(description = "With stdErr flag", javaType = "Boolean")
public static final String DOCKER_STD_ERR = "CamelDockerStdErr";
@Metadata(description = "With stdOut flag", javaType = "Boolean")
public static final String DOCKER_STD_OUT = "CamelDockerStdOut";
@Metadata(description = "With timestamps flag", javaType = "Boolean")
public static final String DOCKER_TIMESTAMPS = "CamelDockerTimestamps";
/**
* Logs *
*/
@Metadata(description = "With Tail", javaType = "Integer")
public static final String DOCKER_TAIL = "CamelDockerTail";
@Metadata(description = "With tail all flag", javaType = "Boolean")
public static final String DOCKER_TAIL_ALL = "CamelDockerTailAll";
/**
* Copy *
*/
@Metadata(description = "The host path", javaType = "String")
public static final String DOCKER_HOST_PATH = "CamelDockerHostPath";
@Metadata(description = "The resource", javaType = "String")
public static final String DOCKER_RESOURCE = "CamelDockerResource";
/**
* Diff Container *
*/
@Metadata(description = "With container id for diff container request", javaType = "String")
public static final String DOCKER_CONTAINER_ID_DIFF = "CamelDockerContainerIdDiff";
/**
* Stop Container *
*/
@Metadata(description = "With timeout", javaType = "Integer")
public static final String DOCKER_TIMEOUT = "CamelDockerTimeout";
/**
* Kill Container *
*/
@Metadata(description = "With signal", javaType = "String")
public static final String DOCKER_SIGNAL = "CamelDockerSignal";
/**
* Top Container *
*/
@Metadata(description = "With ps args", javaType = "String")
public static final String DOCKER_PS_ARGS = "CamelDockerPsArgs";
/**
* Build Image *
*/
@Metadata(description = "With no cache flag", javaType = "Boolean")
public static final String DOCKER_NO_CACHE = "CamelDockerNoCache";
@Metadata(description = "With quiet flag", javaType = "Boolean")
public static final String DOCKER_QUIET = "CamelDockerQuiet";
@Metadata(description = "With remove flag", javaType = "Boolean")
public static final String DOCKER_REMOVE = "CamelDockerRemove";
public static final String DOCKER_TAR_INPUT_STREAM = "CamelDockerTarInputStream";
/**
* Commit Container *
*/
@Metadata(description = "With attach StdErr flag", javaType = "Boolean")
public static final String DOCKER_ATTACH_STD_ERR = "CamelDockerAttachStdErr";
@Metadata(description = "With attach StdIn flag", javaType = "Boolean")
public static final String DOCKER_ATTACH_STD_IN = "CamelDockerAttachStdIn";
@Metadata(description = "With attach StdOut flag", javaType = "Boolean")
public static final String DOCKER_ATTACH_STD_OUT = "CamelDockerAttachStdOut";
@Metadata(description = "The author", javaType = "String")
public static final String DOCKER_AUTHOR = "CamelDockerAuthor";
@Metadata(description = "With cmd", javaType = "String or String[]")
public static final String DOCKER_CMD = "CamelDockerCmd";
public static final String DOCKER_COMMENT = "CamelDockerComment";
@Metadata(description = "With disable network flag", javaType = "Boolean")
public static final String DOCKER_DISABLE_NETWORK = "CamelDockerDisableNetwork";
@Metadata(description = "With env", javaType = "String or String[]")
public static final String DOCKER_ENV = "CamelDockerEnv";
@Metadata(description = "The exposed ports", javaType = "ExposedPorts or ExposedPorts[]")
public static final String DOCKER_EXPOSED_PORTS = "CamelDockerExposedPorts";
@Metadata(description = "The hostname", javaType = "String")
public static final String DOCKER_HOSTNAME = "CamelDockerHostname";
@Metadata(description = "The message", javaType = "String")
public static final String DOCKER_MESSAGE = "CamelDockerMessage";
@Metadata(description = "With memory", javaType = "Integer")
public static final String DOCKER_MEMORY = "CamelDockerMemory";
@Metadata(description = "With memory swap", javaType = "Long or Integer")
public static final String DOCKER_MEMORY_SWAP = "CamelDockerMemorySwap";
@Metadata(description = "With open StdIn flag", javaType = "Boolean")
public static final String DOCKER_OPEN_STD_IN = "CamelDockerOpenStdIn";
@Metadata(description = "With pause flag", javaType = "Boolean")
public static final String DOCKER_PAUSE = "CamelDockerPause";
@Metadata(description = "With port specs", javaType = "String or String[]")
public static final String DOCKER_PORT_SPECS = "CamelDockerPortSpecs";
@Metadata(description = "With StdIn in once flag", javaType = "Boolean")
public static final String DOCKER_STD_IN_ONCE = "CamelDockerStdInOnce";
@Metadata(description = "With TTY flag", javaType = "Boolean")
public static final String DOCKER_TTY = "CamelDockerTty";
@Metadata(description = "With user", javaType = "String")
public static final String DOCKER_USER = "CamelDockerUser";
@Metadata(description = "With volumes", javaType = "Volume or Volume[]")
public static final String DOCKER_VOLUMES = "CamelDockerVolumes";
@Metadata(description = "With working directory", javaType = "String")
public static final String DOCKER_WORKING_DIR = "CamelDockerWorkingDir";
/**
* Create Container *
*/
@Metadata(description = "With CPU shares", javaType = "Integer")
public static final String DOCKER_CPU_SHARES = "CamelDockerCpuShares";
@Metadata(description = "With dns", javaType = "String or String[]")
public static final String DOCKER_DNS = "CamelDockerDns";
@Metadata(description = "With entrypoint", javaType = "String or String[]")
public static final String DOCKER_ENTRYPOINT = "CamelDockerEntryPoint";
@Metadata(description = "With host config", javaType = "com.github.dockerjava.api.model.HostConfig")
public static final String DOCKER_HOST_CONFIG = "CamelDockerHostConfig";
@Metadata(description = "The docker image", javaType = "String")
public static final String DOCKER_IMAGE = "CamelDockerImage";
@Metadata(description = "With memory limit", javaType = "Long")
public static final String DOCKER_MEMORY_LIMIT = "CamelDockerMemoryLimit";
@Metadata(description = "With StdIn in open flag", javaType = "Boolean")
public static final String DOCKER_STD_IN_OPEN = "CamelDockerStdInOpen";
@Metadata(description = "With volumes from", javaType = "VolumesFrom or VolumesFrom[]")
public static final String DOCKER_VOLUMES_FROM = "CamelDockerVolumesFrom";
@Metadata(description = "With domain name", javaType = "String")
public static final String DOCKER_DOMAIN_NAME = "CamelDockerDomainName";
@Metadata(description = "With binds", javaType = "Bind or Bind[]")
public static final String DOCKER_BINDS = "CamelDockerBinds";
/**
* Start Container *
*/
@Metadata(description = "With cap add", javaType = "Capability or Capability[]")
public static final String DOCKER_CAP_ADD = "CamelDockerCapAdd";
@Metadata(description = "With cap drop", javaType = "Capability or Capability[]")
public static final String DOCKER_CAP_DROP = "CamelDockerCapDrop";
public static final String DOCKER_DEVICES = "CamelDockeDevices";
public static final String DOCKER_DNS_SEARCH = "CamelDockerDnsSearch";
public static final String DOCKER_LINKS = "CamelDockerLinks";
public static final String DOCKER_LXC_CONF = "CamelDockerLxcConf";
public static final String DOCKER_NETWORK_MODE = "CamelNetworkMode";
public static final String DOCKER_PORT_BINDINGS = "CamelDockerPortBinding";
public static final String DOCKER_PORTS = "CamelDockerPorts";
public static final String DOCKER_PRIVILEGED = "CamelDockerDnsPrivileged";
public static final String DOCKER_PUBLISH_ALL_PORTS = "CamelDockerPublishAllPorts";
public static final String DOCKER_RESTART_POLICY = "CamelDockerRestartPolicy";
/**
* Create Network * Attach to Network * Remove Network *
*/
@Metadata(description = "The network name", javaType = "String")
public static final String DOCKER_NETWORK = "CamelDockerNetwork";
/**
* Exec *
*/
@Metadata(description = "With detach flag", javaType = "Boolean")
public static final String DOCKER_DETACH = "CamelDockerDetach";
@Metadata(description = "The Exec ID", javaType = "String")
public static final String DOCKER_EXEC_ID = "CamelDockerExecId";
static {
DOCKER_DEFAULT_PARAMETERS.put(DOCKER_CERT_PATH, String.class);
DOCKER_DEFAULT_PARAMETERS.put(DOCKER_CLIENT_PROFILE, String.class);
DOCKER_DEFAULT_PARAMETERS.put(DOCKER_EMAIL, String.class);
DOCKER_DEFAULT_PARAMETERS.put(DOCKER_HOST, String.class);
DOCKER_DEFAULT_PARAMETERS.put(DOCKER_PASSWORD, String.class);
DOCKER_DEFAULT_PARAMETERS.put(DOCKER_PORT, Integer.class);
DOCKER_DEFAULT_PARAMETERS.put(DOCKER_SECURE, Boolean.class);
DOCKER_DEFAULT_PARAMETERS.put(DOCKER_SERVER_ADDRESS, String.class);
DOCKER_DEFAULT_PARAMETERS.put(DOCKER_USERNAME, String.class);
DOCKER_DEFAULT_PARAMETERS.put(DOCKER_CMD_EXEC_FACTORY, String.class);
}
private DockerConstants() {
// Helper class
}
}
| name |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/core/Flowable.java | {
"start": 5296,
"end": 7594
} | class ____ due
* to the large amounts of <a href="https://github.com/reactive-streams/reactive-streams-jvm#specification">Reactive Streams</a>
* rules to be followed to the letter. See <a href="https://github.com/ReactiveX/RxJava/wiki/Writing-operators-for-2.0">the wiki</a> for
* some guidance if such custom implementations are necessary.
* <p>
* The recommended way of creating custom {@code Flowable}s is by using the {@link #create(FlowableOnSubscribe, BackpressureStrategy)} factory method:
* <pre><code>
* Flowable<String> source = Flowable.create(new FlowableOnSubscribe<String>() {
* @Override
* public void subscribe(FlowableEmitter<String> emitter) throws Exception {
*
* // signal an item
* emitter.onNext("Hello");
*
* // could be some blocking operation
* Thread.sleep(1000);
*
* // the consumer might have cancelled the flow
* if (emitter.isCancelled()) {
* return;
* }
*
* emitter.onNext("World");
*
* Thread.sleep(1000);
*
* // the end-of-sequence has to be signaled, otherwise the
* // consumers may never finish
* emitter.onComplete();
* }
* }, BackpressureStrategy.BUFFER);
*
* System.out.println("Subscribe!");
*
* source.subscribe(System.out::println);
*
* System.out.println("Done!");
* </code></pre>
* <p>
* RxJava reactive sources, such as {@code Flowable}, are generally synchronous and sequential in nature. In the ReactiveX design, the location (thread)
* where operators run is <i>orthogonal</i> to when the operators can work with data. This means that asynchrony and parallelism
* has to be explicitly expressed via operators such as {@link #subscribeOn(Scheduler)}, {@link #observeOn(Scheduler)} and {@link #parallel()}. In general,
* operators featuring a {@link Scheduler} parameter are introducing this type of asynchrony into the flow.
* <p>
* For more information see the <a href="http://reactivex.io/documentation/Publisher.html">ReactiveX documentation</a>.
*
* @param <T>
* the type of the items emitted by the {@code Flowable}
* @see Observable
* @see ParallelFlowable
* @see io.reactivex.rxjava3.subscribers.DisposableSubscriber
*/
public abstract | directly |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/GoogleStorageComponentBuilderFactory.java | {
"start": 4028,
"end": 17061
} | class ____ use when creating the new buckets.
*
* The option is a:
* <code>com.google.cloud.storage.StorageClass</code> type.
*
* Default: STANDARD
* Group: common
*
* @param storageClass the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder storageClass(com.google.cloud.storage.StorageClass storageClass) {
doSetProperty("storageClass", storageClass);
return this;
}
/**
* The storage client.
*
* The option is a:
* <code>com.google.cloud.storage.Storage</code> type.
*
* Group: common
*
* @param storageClient the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder storageClient(com.google.cloud.storage.Storage storageClient) {
doSetProperty("storageClient", storageClient);
return this;
}
/**
* The Cloud Storage location to use when creating the new buckets.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: US-EAST1
* Group: common
*
* @param storageLocation the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder storageLocation(java.lang.String storageLocation) {
doSetProperty("storageLocation", storageLocation);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Delete objects from the bucket after they have been retrieved. The
* delete is only performed if the Exchange is committed. If a rollback
* occurs, the object is not deleted. If this option is false, then the
* same objects will be retrieve over and over again on the polls.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param deleteAfterRead the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder deleteAfterRead(boolean deleteAfterRead) {
doSetProperty("deleteAfterRead", deleteAfterRead);
return this;
}
/**
* Define the destination bucket where an object must be moved when
* moveAfterRead is set to true.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param destinationBucket the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder destinationBucket(java.lang.String destinationBucket) {
doSetProperty("destinationBucket", destinationBucket);
return this;
}
/**
* The folder or filename to use when downloading the blob. By default,
* this specifies the folder name, and the name of the file is the blob
* name. For example, setting this to mydownload will be the same as
* setting mydownload/${file:name}. You can use dynamic expressions for
* fine-grained control. For example, you can specify
* ${date:now:yyyyMMdd}/${file:name} to store the blob in sub folders
* based on today's day. Only ${file:name} and ${file:name.noext} is
* supported as dynamic tokens for the blob name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param downloadFileName the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder downloadFileName(java.lang.String downloadFileName) {
doSetProperty("downloadFileName", downloadFileName);
return this;
}
/**
* A regular expression to include only blobs with name matching it.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param filter the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder filter(java.lang.String filter) {
doSetProperty("filter", filter);
return this;
}
/**
* If it is true, the Object exchange will be consumed and put into the
* body. If false the Object stream will be put raw into the body and
* the headers will be set with the object metadata.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param includeBody the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder includeBody(boolean includeBody) {
doSetProperty("includeBody", includeBody);
return this;
}
/**
* If it is true, the folders/directories will be consumed. If it is
* false, they will be ignored, and Exchanges will not be created for
* those.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param includeFolders the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder includeFolders(boolean includeFolders) {
doSetProperty("includeFolders", includeFolders);
return this;
}
/**
* Move objects from the origin bucket to a different bucket after they
* have been retrieved. To accomplish the operation the
* destinationBucket option must be set. The copy bucket operation is
* only performed if the Exchange is committed. If a rollback occurs,
* the object is not moved.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param moveAfterRead the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder moveAfterRead(boolean moveAfterRead) {
doSetProperty("moveAfterRead", moveAfterRead);
return this;
}
/**
* The prefix which is used in the BlobListOptions to only consume
* objects we are interested in.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param prefix the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder prefix(java.lang.String prefix) {
doSetProperty("prefix", prefix);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* The Object name inside the bucket.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param objectName the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder objectName(java.lang.String objectName) {
doSetProperty("objectName", objectName);
return this;
}
/**
* Set the operation for the producer.
*
* The option is a:
* <code>org.apache.camel.component.google.storage.GoogleCloudStorageOperations</code> type.
*
* Group: producer
*
* @param operation the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder operation(org.apache.camel.component.google.storage.GoogleCloudStorageOperations operation) {
doSetProperty("operation", operation);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* Used for enabling or disabling all consumer based health checks from
* this component.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: health
*
* @param healthCheckConsumerEnabled the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder healthCheckConsumerEnabled(boolean healthCheckConsumerEnabled) {
doSetProperty("healthCheckConsumerEnabled", healthCheckConsumerEnabled);
return this;
}
/**
* Used for enabling or disabling all producer based health checks from
* this component. Notice: Camel has by default disabled all producer
* based health-checks. You can turn on producer checks globally by
* setting camel.health.producersEnabled=true.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: health
*
* @param healthCheckProducerEnabled the value to set
* @return the dsl builder
*/
default GoogleStorageComponentBuilder healthCheckProducerEnabled(boolean healthCheckProducerEnabled) {
doSetProperty("healthCheckProducerEnabled", healthCheckProducerEnabled);
return this;
}
}
| to |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/sequence/SQLServer16SequenceSupport.java | {
"start": 182,
"end": 484
} | class ____ extends SQLServerSequenceSupport{
public static final SequenceSupport INSTANCE = new SQLServer16SequenceSupport();
@Override
public String getDropSequenceString(String sequenceName) throws MappingException {
return "drop sequence if exists " + sequenceName;
}
}
| SQLServer16SequenceSupport |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java | {
"start": 61239,
"end": 61828
} | class ____ [[%s], [%s], %s]",
typeToCanonicalTypeName(javaMethod.getReturnType()),
typeToCanonicalTypeName(returnType),
targetClass.getCanonicalName(),
methodName,
typesToCanonicalTypeNames(typeParameters)
);
}
String painlessMethodKey = buildPainlessMethodKey(methodName, constructorParameterTypes.length + methodParameterTypes.length);
if (painlessMethodKeysToImportedPainlessMethods.containsKey(painlessMethodKey)) {
throw new IllegalArgumentException(" | binding |
java | google__guice | core/src/com/google/inject/Stage.java | {
"start": 714,
"end": 1317
} | enum ____ {
/**
* We're running in a tool (an IDE plugin for example). We need binding meta data but not a
* functioning Injector. Do not inject members of instances. Do not load eager singletons. Do as
* little as possible so our tools run nice and snappy. Injectors created in this stage cannot be
* used to satisfy injections.
*/
TOOL,
/**
* We want fast startup times at the expense of runtime performance and some up front error
* checking.
*/
DEVELOPMENT,
/** We want to catch errors as early as possible and take performance hits up front. */
PRODUCTION
}
| Stage |
java | elastic__elasticsearch | x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessorTests.java | {
"start": 725,
"end": 2203
} | class ____ extends AbstractWireSerializingTestCase<CastProcessor> {
public static CastProcessor randomCastProcessor() {
return new CastProcessor(randomFrom(SqlConverter.values()));
}
@Override
protected CastProcessor createTestInstance() {
return randomCastProcessor();
}
@Override
protected Reader<CastProcessor> instanceReader() {
return CastProcessor::new;
}
@Override
protected CastProcessor mutateInstance(CastProcessor instance) {
return new CastProcessor(randomValueOtherThan(instance.converter(), () -> randomFrom(SqlConverter.values())));
}
public void testApply() {
{
CastProcessor proc = new CastProcessor(DefaultConverter.STRING_TO_INT);
assertEquals(null, proc.process(null));
assertEquals(1, proc.process("1"));
Exception e = expectThrows(InvalidArgumentException.class, () -> proc.process("1.2"));
assertEquals("cannot cast [1.2] to [integer]", e.getMessage());
}
{
CastProcessor proc = new CastProcessor(DefaultConverter.BOOL_TO_INT);
assertEquals(null, proc.process(null));
assertEquals(1, proc.process(true));
assertEquals(0, proc.process(false));
}
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(Processors.getNamedWriteables());
}
}
| CastProcessorTests |
java | quarkusio__quarkus | integration-tests/picocli-native/src/main/java/io/quarkus/it/picocli/LocalizedCommandOne.java | {
"start": 199,
"end": 297
} | class ____ {
@CommandLine.Option(names = "--first")
String firstOption;
}
| LocalizedCommandOne |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/authentication/preauth/websphere/WebSpherePreAuthenticatedProcessingFilter.java | {
"start": 1433,
"end": 2388
} | class ____ be
* used.
*/
public WebSpherePreAuthenticatedProcessingFilter() {
this(new DefaultWASUsernameAndGroupsExtractor());
}
WebSpherePreAuthenticatedProcessingFilter(WASUsernameAndGroupsExtractor wasHelper) {
this.wasHelper = wasHelper;
setAuthenticationDetailsSource(new WebSpherePreAuthenticatedWebAuthenticationDetailsSource());
}
/**
* Return the WebSphere user name.
*/
@Override
protected @Nullable Object getPreAuthenticatedPrincipal(HttpServletRequest httpRequest) {
Object principal = this.wasHelper.getCurrentUserName();
this.logger.debug(LogMessage.format("PreAuthenticated WebSphere principal: %s", principal));
return principal;
}
/**
* For J2EE container-based authentication there is no generic way to retrieve the
* credentials, as such this method returns a fixed dummy value.
*/
@Override
protected Object getPreAuthenticatedCredentials(HttpServletRequest httpRequest) {
return "N/A";
}
}
| to |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/FastGeometric.java | {
"start": 623,
"end": 701
} | class ____ randomly sampling values from the geometric distribution
*/
public | for |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/internal/annotations/AdditionalManagedResourcesImpl.java | {
"start": 1191,
"end": 2779
} | class ____ implements ManagedResources {
private final Collection<Class<?>> knownClasses;
private final Collection<ClassDetails> classDetails;
private final Collection<String> packageNames;
private final Collection<Binding<? extends JaxbBindableMappingDescriptor>> xmlMappings;
public AdditionalManagedResourcesImpl(
Collection<Class<?>> knownClasses,
Collection<ClassDetails> classDetails,
Collection<String> packageNames,
Collection<Binding<? extends JaxbBindableMappingDescriptor>> xmlMappings) {
this.knownClasses = knownClasses;
this.classDetails = classDetails;
this.packageNames = packageNames;
this.xmlMappings = xmlMappings;
}
@Override
public Collection<ConverterDescriptor<?,?>> getAttributeConverterDescriptors() {
return emptyList();
}
@Override
public Collection<Class<?>> getAnnotatedClassReferences() {
return knownClasses == null ? emptyList() : knownClasses;
}
@Override
public Collection<String> getAnnotatedClassNames() {
if ( isNotEmpty( classDetails ) ) {
return classDetails.stream().map( ClassDetails::getName ).toList();
}
return emptyList();
}
@Override
public Collection<String> getAnnotatedPackageNames() {
return packageNames == null ? emptyList() : packageNames;
}
@Override
public Collection<Binding<? extends JaxbBindableMappingDescriptor>> getXmlMappingBindings() {
if ( xmlMappings == null ) {
return emptyList();
}
return xmlMappings;
}
@Override
public Map<String, Class<?>> getExtraQueryImports() {
return Collections.emptyMap();
}
public static | AdditionalManagedResourcesImpl |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/DefaultGeneratedValueIdentityTest.java | {
"start": 7311,
"end": 7786
} | class ____ implements OnExecutionGenerator {
@Override
public EnumSet<EventType> getEventTypes() {
return EnumSet.of( EventType.INSERT );
}
@Override
public boolean referenceColumnsInSql(Dialect dialect) {
return true;
}
@Override
public boolean writePropertyValue() {
return false;
}
@Override
public String[] getReferencedColumnValues(Dialect dialect) {
return new String[] { "current_timestamp" };
}
}
}
| FunctionCreationValueGeneration |
java | google__guava | android/guava/src/com/google/common/collect/Synchronized.java | {
"start": 25754,
"end": 27581
} | class ____ private and J2KT cannot change return types
* in overrides, so we declare `@Nullable Object[]` as the return type.
*/
return ObjectArrays.toArrayImpl(delegate());
}
}
@Override
@SuppressWarnings("nullness") // b/192354773 in our checker affects toArray declarations
public <T extends @Nullable Object> T[] toArray(T[] array) {
synchronized (mutex) {
return ObjectArrays.toArrayImpl(delegate(), array);
}
}
@Override
public boolean contains(@Nullable Object o) {
synchronized (mutex) {
return Maps.containsEntryImpl(delegate(), o);
}
}
@Override
public boolean containsAll(Collection<?> c) {
synchronized (mutex) {
return Collections2.containsAllImpl(delegate(), c);
}
}
@Override
public boolean equals(@Nullable Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return Sets.equalsImpl(delegate(), o);
}
}
@Override
public boolean remove(@Nullable Object o) {
synchronized (mutex) {
return Maps.removeEntryImpl(delegate(), o);
}
}
@Override
public boolean removeAll(Collection<?> c) {
synchronized (mutex) {
return Iterators.removeAll(delegate().iterator(), c);
}
}
@Override
public boolean retainAll(Collection<?> c) {
synchronized (mutex) {
return Iterators.retainAll(delegate().iterator(), c);
}
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
@VisibleForTesting
static <K extends @Nullable Object, V extends @Nullable Object> Map<K, V> map(
Map<K, V> map, @Nullable Object mutex) {
return new SynchronizedMap<>(map, mutex);
}
private static | is |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/servlet/client/DefaultRestTestClientBuilder.java | {
"start": 6221,
"end": 6968
} | class ____<S extends RestTestClient.Builder<S>, M extends MockMvcBuilder>
extends DefaultRestTestClientBuilder<S> implements MockMvcSetupBuilder<S, M> {
private final M mockMvcBuilder;
public AbstractMockMvcSetupBuilder(M mockMvcBuilder) {
this.mockMvcBuilder = mockMvcBuilder;
}
@Override
public <T extends S> T configureServer(Consumer<M> consumer) {
consumer.accept(this.mockMvcBuilder);
return self();
}
@Override
public RestTestClient build() {
MockMvc mockMvc = this.mockMvcBuilder.build();
setClientHttpRequestFactory(new MockMvcClientHttpRequestFactory(mockMvc));
return super.build();
}
}
/**
* Default implementation of {@link StandaloneSetupBuilder}.
*/
static | AbstractMockMvcSetupBuilder |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DockerEndpointBuilderFactory.java | {
"start": 20776,
"end": 26897
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedDockerEndpointProducerBuilder advanced() {
return (AdvancedDockerEndpointProducerBuilder) this;
}
/**
* Email address associated with the user.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param email the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder email(String email) {
doSetProperty("email", email);
return this;
}
/**
* Docker host.
*
* The option is a: <code>java.lang.String</code> type.
*
* Required: true
* Default: localhost
* Group: common
*
* @param host the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder host(String host) {
doSetProperty("host", host);
return this;
}
/**
* Docker port.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 2375
* Group: common
*
* @param port the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder port(Integer port) {
doSetProperty("port", port);
return this;
}
/**
* Docker port.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Default: 2375
* Group: common
*
* @param port the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder port(String port) {
doSetProperty("port", port);
return this;
}
/**
* Request timeout for response (in seconds).
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: common
*
* @param requestTimeout the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder requestTimeout(Integer requestTimeout) {
doSetProperty("requestTimeout", requestTimeout);
return this;
}
/**
* Request timeout for response (in seconds).
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Group: common
*
* @param requestTimeout the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder requestTimeout(String requestTimeout) {
doSetProperty("requestTimeout", requestTimeout);
return this;
}
/**
* Location containing the SSL certificate chain.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param certPath the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder certPath(String certPath) {
doSetProperty("certPath", certPath);
return this;
}
/**
* Password to authenticate with.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* Use HTTPS communication.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param secure the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder secure(boolean secure) {
doSetProperty("secure", secure);
return this;
}
/**
* Use HTTPS communication.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param secure the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder secure(String secure) {
doSetProperty("secure", secure);
return this;
}
/**
* Check TLS.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param tlsVerify the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder tlsVerify(boolean tlsVerify) {
doSetProperty("tlsVerify", tlsVerify);
return this;
}
/**
* Check TLS.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param tlsVerify the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder tlsVerify(String tlsVerify) {
doSetProperty("tlsVerify", tlsVerify);
return this;
}
/**
* User name to authenticate with.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param username the value to set
* @return the dsl builder
*/
default DockerEndpointProducerBuilder username(String username) {
doSetProperty("username", username);
return this;
}
}
/**
* Advanced builder for endpoint producers for the Docker component.
*/
public | DockerEndpointProducerBuilder |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/FunctionalInterfaceClashTest.java | {
"start": 9022,
"end": 9486
} | class ____ extends BaseClass {
@Override
void bar(Consumer<String> c) {}
}
""")
.doTest();
}
@Test
public void positive_overriddenAndNewClashingMethod() {
testHelper
.addSourceLines(
"pkg2/BaseClass.java",
"""
package pkg2;
import java.util.function.Function;
import java.util.function.Consumer;
public | DerivedClass |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java | {
"start": 1360,
"end": 1891
} | class ____ extends ActionType<GetCheckpointAction.Response> {
public static final GetCheckpointAction INSTANCE = new GetCheckpointAction();
// note: this is an index action and requires `monitor` or `view_index_metadata`
public static final String NAME = "indices:monitor/transform/checkpoint";
public static final RemoteClusterActionType<Response> REMOTE_TYPE = new RemoteClusterActionType<>(NAME, Response::new);
private GetCheckpointAction() {
super(NAME);
}
public static | GetCheckpointAction |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/ValueSerializerModifierTest.java | {
"start": 2405,
"end": 2988
} | class ____ extends ValueSerializerModifier
{
@Override
public List<BeanPropertyWriter> orderProperties(SerializationConfig config,
BeanDescription.Supplier beanDesc, List<BeanPropertyWriter> beanProperties)
{
TreeMap<String,BeanPropertyWriter> props = new TreeMap<String,BeanPropertyWriter>();
for (BeanPropertyWriter bpw : beanProperties) {
props.put(bpw.getName(), bpw);
}
return new ArrayList<BeanPropertyWriter>(props.values());
}
}
static | ReorderingModifier |
java | spring-projects__spring-framework | spring-oxm/src/test/java/org/springframework/oxm/jaxb/BinaryObject.java | {
"start": 931,
"end": 1632
} | class ____ {
@XmlElement(namespace = "http://springframework.org/spring-ws")
private byte[] bytes;
@XmlElement(namespace = "http://springframework.org/spring-ws")
private DataHandler dataHandler;
@XmlElement(namespace = "http://springframework.org/spring-ws")
@XmlAttachmentRef
private DataHandler swaDataHandler;
public BinaryObject() {
}
public BinaryObject(byte[] bytes, DataHandler dataHandler) {
this.bytes = bytes;
this.dataHandler = dataHandler;
swaDataHandler = dataHandler;
}
public byte[] getBytes() {
return bytes;
}
public DataHandler getDataHandler() {
return dataHandler;
}
public DataHandler getSwaDataHandler() {
return swaDataHandler;
}
}
| BinaryObject |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java | {
"start": 21104,
"end": 35026
} | class ____ {
final ActionListener<?> listener;
Cell next;
Cell(ActionListener<?> listener, Cell next) {
this.listener = listener;
this.next = next;
}
}
private record SuccessResult<T>(T result) {
public void complete(ActionListener<T> listener) {
try {
listener.onResponse(result);
} catch (Exception exception) {
logger.error(Strings.format("exception thrown while handling response in listener [%s]", listener), exception);
assert false : exception;
// nothing more can be done here
}
}
}
private record FailureResult(Exception exception, Exception wrappedException) {
public void complete(ActionListener<?> listener) {
try {
listener.onFailure(wrappedException);
} catch (Exception innerException) {
if (wrappedException != innerException) {
innerException.addSuppressed(wrappedException);
}
logger.error(
Strings.format("exception thrown while handling another exception in listener [%s]", listener),
innerException
);
assert false : innerException;
// nothing more can be done here
}
}
}
/**
* Creates and returns a new {@link SubscribableListener} {@code L} and subscribes {@code nextStep} to this listener such that if this
* listener is completed successfully then the result is discarded and {@code nextStep} is invoked with argument {@code L}. If this
* listener is completed with exception {@code E} then so is {@code L}.
* <p>
* This can be used to construct a sequence of async actions, each ignoring the result of the previous ones:
* <pre>
* l.andThen(l1 -> forkAction1(args1, l1)).andThen(l2 -> forkAction2(args2, l2)).addListener(finalListener);
* </pre>
* After creating this chain, completing {@code l} with a successful response will call {@code forkAction1}, which will on completion
* call {@code forkAction2}, which will in turn pass its response to {@code finalListener}. A failure of any step will bypass the
* remaining steps and ultimately fail {@code finalListener}.
* <p>
* The threading of the {@code nextStep} callback is the same as for listeners added with {@link #addListener}: if this listener is
* already complete then {@code nextStep} is invoked on the thread calling {@link #andThen} and in its thread context, but if this
* listener is incomplete then {@code nextStep} is invoked on the completing thread and in its thread context. In other words, if you
* want to ensure that {@code nextStep} is invoked using a particular executor, then you must do both of:
* <ul>
* <li>Ensure that this {@link SubscribableListener} is always completed using that executor, and</li>
* <li>Invoke {@link #andThen} using that executor.</li>
* </ul>
* <p>
* The listener passed to {@code nextStep} is the returned {@link SubscribableListener}. In particular, it is valid to complete this
* listener more than once, but all results after the first completion will be silently ignored.
*/
public <U> SubscribableListener<U> andThen(CheckedConsumer<ActionListener<U>, ? extends Exception> nextStep) {
return newForked(l -> addListener(l.delegateFailureIgnoreResponseAndWrap(nextStep)));
}
/**
* Creates and returns a new {@link SubscribableListener} {@code L} and subscribes {@code nextStep} to this listener such that if this
* listener is completed successfully with result {@code R} then {@code nextStep} is invoked with arguments {@code L} and {@code R}. If
* this listener is completed with exception {@code E} then so is {@code L}.
* <p>
* This can be used to construct a sequence of async actions, each invoked with the result of the previous one:
* <pre>
* l.andThen((l1, o1) -> forkAction1(o1, args1, l1)).andThen((l2, o2) -> forkAction2(o2, args2, l2)).addListener(finalListener);
* </pre>
* After creating this chain, completing {@code l} with a successful response will pass the response to {@code forkAction1}, which will
* on completion pass its response to {@code forkAction2}, which will in turn pass its response to {@code finalListener}. A failure of
* any step will bypass the remaining steps and ultimately fail {@code finalListener}.
* <p>
* The threading of the {@code nextStep} callback is the same as for listeners added with {@link #addListener}: if this listener is
* already complete then {@code nextStep} is invoked on the thread calling {@link #andThen} and in its thread context, but if this
* listener is incomplete then {@code nextStep} is invoked on the completing thread and in its thread context. In other words, if you
* want to ensure that {@code nextStep} is invoked using a particular executor, then you must do
* both of:
* <ul>
* <li>Ensure that this {@link SubscribableListener} is always completed using that executor, and</li>
* <li>Invoke {@link #andThen} using that executor.</li>
* </ul>
* <p>
* The listener passed to {@code nextStep} is the returned {@link SubscribableListener}. In particular, it is valid to complete this
* listener more than once, but all results after the first completion will be silently ignored.
*/
public <U> SubscribableListener<U> andThen(CheckedBiConsumer<ActionListener<U>, T, ? extends Exception> nextStep) {
return andThen(EsExecutors.DIRECT_EXECUTOR_SERVICE, null, nextStep);
}
/**
* Creates and returns a new {@link SubscribableListener} {@code L} and subscribes {@code nextStep} to this listener such that if this
* listener is completed successfully with result {@code R} then {@code nextStep} is invoked with arguments {@code L} and {@code R}. If
* this listener is completed with exception {@code E} then so is {@code L}.
* <p>
* This can be used to construct a sequence of async actions, each invoked with the result of the previous one:
* <pre>
* l.andThen(x, t, (l1,o1) -> forkAction1(o1,args1,l1)).andThen(x, t, (l2,o2) -> forkAction2(o2,args2,l2)).addListener(finalListener);
* </pre>
* After creating this chain, completing {@code l} with a successful response will pass the response to {@code forkAction1}, which will
* on completion pass its response to {@code forkAction2}, which will in turn pass its response to {@code finalListener}. A failure of
* any step will bypass the remaining steps and ultimately fail {@code finalListener}.
* <p>
* The threading of the {@code nextStep} callback is the same as for listeners added with {@link #addListener}: if this listener is
* already complete then {@code nextStep} is invoked on the thread calling {@link #andThen} and in its thread context, but if this
* listener is incomplete then {@code nextStep} is invoked using {@code executor}, in a thread context captured when {@link #andThen}
* was called. This behaviour may seem complex at first sight but it is like this to allow callers to ensure that {@code nextStep} runs
* using a particular executor much more cheaply than simply always forking its execution. To ensure that {@code nextStep} is invoked
* using a particular executor, do both of the following:
* <ul>
* <li>Pass the desired executor in as {@code executor}, and</li>
* <li>Invoke {@link #andThen} using that executor.</li>
* </ul>
* <p>
* If you really want to fork the execution of the next step in the sequence to a specific executor in all circumstances, explicitly
* call {@link Executor#execute} within {@code nextStep} yourself. But do note that this can be surprisingly expensive, and it's almost
* always not the right approach, so it is deliberate that there is no convenient method on {@link SubscribableListener} which does this
* for you.
* <p>
* If {@code executor} rejects the execution of {@code nextStep} then the result is discarded and the returned listener is completed
* with a rejection exception on the thread which completes this listener. Likewise if this listener is completed exceptionally but
* {@code executor} rejects the execution of the completion of the returned listener then the returned listener is completed with a
* rejection exception on the thread which completes this listener.
* <p>
* The listener passed to {@code nextStep} is the returned {@link SubscribableListener}. In particular, it is valid to complete this
* listener more than once, but all results after the first completion will be silently ignored.
*/
public <U> SubscribableListener<U> andThen(
Executor executor,
@Nullable ThreadContext threadContext,
CheckedBiConsumer<ActionListener<U>, T, ? extends Exception> nextStep
) {
return newForked(l -> addListener(l.delegateFailureAndWrap(nextStep), executor, threadContext));
}
/**
* Creates and returns a new {@link SubscribableListener} {@code L} such that if this listener is completed successfully with result
* {@code R} then {@code fn} is invoked with argument {@code R}, and {@code L} is completed with the result of that invocation. If this
* listener is completed exceptionally, or {@code fn} throws an exception, then {@code L} is completed with that exception.
* <p>
* This is essentially a shorthand for a call to {@link #andThen} with a {@code nextStep} argument that is fully synchronous.
* <p>
* The threading of the {@code fn} invocation is the same as for listeners added with {@link #addListener}: if this listener is
* already complete then {@code fn} is invoked on the thread calling {@link #andThenApply} and in its thread context, but if this
* listener is incomplete then {@code fn} is invoked on the thread, and in the thread context, on which this listener is completed.
*/
public <U> SubscribableListener<U> andThenApply(CheckedFunction<T, U, Exception> fn) {
return newForked(l -> addListener(l.map(fn)));
}
/**
* Creates and returns a new {@link SubscribableListener} {@code L} such that if this listener is completed successfully with result
* {@code R} then {@code consumer} is applied to argument {@code R}, and {@code L} is completed with {@code null} when {@code
* consumer} returns. If this listener is completed exceptionally, or {@code consumer} throws an exception, then {@code L} is
* completed with that exception.
* <p>
* This is essentially a shorthand for a call to {@link #andThen} with a {@code nextStep} argument that is fully synchronous.
* <p>
* The threading of the {@code consumer} invocation is the same as for listeners added with {@link #addListener}: if this listener is
* already complete then {@code consumer} is invoked on the thread calling {@link #andThenAccept} and in its thread context, but if
* this listener is incomplete then {@code consumer} is invoked on the thread, and in the thread context, on which this listener is
* completed.
*/
public SubscribableListener<Void> andThenAccept(CheckedConsumer<T, Exception> consumer) {
return newForked(l -> addListener(l.map(r -> {
consumer.accept(r);
return null;
})));
}
/**
* Adds a timeout to this listener, such that if the timeout elapses before the listener is completed then it will be completed with an
* {@link ElasticsearchTimeoutException}.
* <p>
* The process which is racing against this timeout should stop and clean up promptly when the timeout occurs to avoid unnecessary
* work. For instance, it could check that the race is not lost by calling {@link #isDone} whenever appropriate, or it could subscribe
* another listener which performs any necessary cleanup steps.
*/
public void addTimeout(TimeValue timeout, ThreadPool threadPool, Executor timeoutExecutor) {
if (isDone()) {
return;
}
addListener(ActionListener.running(scheduleTimeout(timeout, threadPool, timeoutExecutor)));
}
private Runnable scheduleTimeout(TimeValue timeout, ThreadPool threadPool, Executor timeoutExecutor) {
try {
final var cancellable = threadPool.schedule(
() -> onFailure(new ElasticsearchTimeoutException(Strings.format("timed out after [%s/%dms]", timeout, timeout.millis()))),
timeout,
timeoutExecutor
);
return cancellable::cancel;
} catch (Exception e) {
onFailure(e);
return () -> {};
}
}
private static final VarHandle VH_STATE_FIELD;
static {
try {
VH_STATE_FIELD = MethodHandles.lookup()
.in(SubscribableListener.class)
.findVarHandle(SubscribableListener.class, "state", Object.class);
} catch (NoSuchFieldException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
private Object compareAndExchangeState(Object expectedValue, Object newValue) {
return VH_STATE_FIELD.compareAndExchange(this, expectedValue, newValue);
}
@SuppressWarnings("rawtypes")
private static final SubscribableListener NULL_SUCCESS = newSucceeded(null);
/**
* Same as {@link #newSucceeded(Object)} but always returns the same instance with result value {@code null}.
*/
@SuppressWarnings("unchecked")
public static <T> SubscribableListener<T> nullSuccess() {
return NULL_SUCCESS;
}
}
| Cell |
java | quarkusio__quarkus | extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/interceptors/MyFirstInterceptor.java | {
"start": 518,
"end": 1733
} | class ____ implements ServerInterceptor, Prioritized {
public static Context.Key<String> KEY_1 = Context.key("X-TEST_1");
public static Context.Key<Integer> KEY_2 = Context.keyWithDefault("X-TEST_2", -1);
private volatile long callTime;
private AtomicInteger counter = new AtomicInteger();
@Override
public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> serverCall,
Metadata metadata, ServerCallHandler<ReqT, RespT> serverCallHandler) {
Context ctx = Context.current().withValue(KEY_1, "k1").withValue(KEY_2, counter.incrementAndGet());
ctx.attach(); // Make sure the context is attached to the current duplicated context.
return Contexts.interceptCall(ctx, new ForwardingServerCall.SimpleForwardingServerCall<>(serverCall) {
@Override
public void close(Status status, Metadata trailers) {
callTime = System.nanoTime();
super.close(status, trailers);
}
}, metadata, serverCallHandler);
}
public long getLastCall() {
return callTime;
}
@Override
public int getPriority() {
return 10;
}
}
| MyFirstInterceptor |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.