language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-kafka/src/test/java/org/apache/camel/component/kafka/integration/KafkaSagaIT.java | {
"start": 2648,
"end": 3048
} | class ____ {
public static String id;
public static Boolean isSame = false;
private SagaBean() {
}
public static void checkId(Exchange exchange) {
String sagaId = exchange.getIn().getHeader(Exchange.SAGA_LONG_RUNNING_ACTION, String.class);
if (id == null) {
id = sagaId;
} else {
isSame = id.equals(sagaId);
}
}
}
| SagaBean |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsRecoveryTests.java | {
"start": 2662,
"end": 12657
} | class ____ extends EngineTestCase {
private final Model model1;
private final Model model2;
private final ChunkingSettings chunkingSettings;
private final boolean useSynthetic;
private final boolean useIncludesExcludes;
public SemanticInferenceMetadataFieldsRecoveryTests(boolean useSynthetic, boolean useIncludesExcludes) {
this.model1 = TestModel.createRandomInstance(TaskType.TEXT_EMBEDDING, List.of(SimilarityMeasure.DOT_PRODUCT));
this.model2 = TestModel.createRandomInstance(TaskType.SPARSE_EMBEDDING);
this.chunkingSettings = generateRandomChunkingSettings();
this.useSynthetic = useSynthetic;
this.useIncludesExcludes = useIncludesExcludes;
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return List.of(new Object[] { false, false }, new Object[] { false, true }, new Object[] { true, false });
}
@Override
protected List<MapperPlugin> extraMappers() {
return List.of(new InferencePlugin(Settings.EMPTY));
}
@Override
protected Settings indexSettings() {
var builder = Settings.builder().put(super.indexSettings());
if (useSynthetic) {
builder.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.name());
builder.put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true);
}
return builder.build();
}
@Override
protected String defaultMapping() {
try {
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
if (useIncludesExcludes) {
builder.startObject(SourceFieldMapper.NAME).array("excludes", "field").endObject();
}
builder.field("dynamic", false);
builder.startObject("properties");
builder.startObject("field");
builder.field("type", "keyword");
builder.endObject();
builder.startObject("semantic_1");
builder.field("type", "semantic_text");
builder.field("inference_id", model1.getInferenceEntityId());
builder.startObject("model_settings");
builder.field("task_type", model1.getTaskType().name());
builder.field("dimensions", model1.getServiceSettings().dimensions());
builder.field("similarity", model1.getServiceSettings().similarity().name());
builder.field("element_type", model1.getServiceSettings().elementType().name());
builder.field("service", model1.getConfigurations().getService());
builder.endObject();
if (chunkingSettings != null) {
builder.field("chunking_settings");
chunkingSettings.toXContent(builder, null);
}
builder.endObject();
builder.startObject("semantic_2");
builder.field("type", "semantic_text");
builder.field("inference_id", model2.getInferenceEntityId());
builder.startObject("model_settings");
builder.field("task_type", model2.getTaskType().name());
builder.field("service", model2.getConfigurations().getService());
builder.endObject();
if (chunkingSettings != null) {
builder.field("chunking_settings");
chunkingSettings.toXContent(builder, null);
}
builder.endObject();
builder.endObject();
builder.endObject();
return BytesReference.bytes(builder).utf8ToString();
} catch (IOException exc) {
throw new RuntimeException(exc);
}
}
public void testSnapshotRecovery() throws IOException {
List<Translog.Index> expectedOperations = new ArrayList<>();
int size = randomIntBetween(10, 50);
for (int i = 0; i < size; i++) {
var source = randomSource();
var sourceToParse = new SourceToParse(Integer.toString(i), source, XContentType.JSON, null);
var doc = mapperService.documentMapper().parse(sourceToParse);
assertNull(doc.dynamicMappingsUpdate());
if (useSynthetic) {
assertNull(doc.rootDoc().getField(SourceFieldMapper.RECOVERY_SOURCE_NAME));
assertNotNull(doc.rootDoc().getField(SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME));
} else {
if (useIncludesExcludes) {
assertNotNull(doc.rootDoc().getField(SourceFieldMapper.RECOVERY_SOURCE_NAME));
var originalSource = new BytesArray(doc.rootDoc().getField(SourceFieldMapper.RECOVERY_SOURCE_NAME).binaryValue());
var map = XContentHelper.convertToMap(originalSource, false, XContentType.JSON);
assertThat(map.v2().size(), equalTo(1));
assertNull(map.v2().remove(InferenceMetadataFieldsMapper.NAME));
} else {
assertNull(doc.rootDoc().getField(SourceFieldMapper.RECOVERY_SOURCE_NAME));
}
}
var op = indexForDoc(doc);
var result = engine.index(op);
expectedOperations.add(
new Translog.Index(
result.getId(),
result.getSeqNo(),
result.getTerm(),
result.getVersion(),
op.source(),
op.routing(),
op.getAutoGeneratedIdTimestamp()
)
);
if (frequently()) {
engine.flush();
}
}
engine.flush();
var searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL);
try (
var snapshot = newRandomSnapshot(
engine.config().getMapperService(),
searcher,
SearchBasedChangesSnapshot.DEFAULT_BATCH_SIZE,
0,
size - 1,
true,
randomBoolean(),
randomBoolean(),
IndexVersion.current()
)
) {
var asserter = TranslogOperationAsserter.withEngineConfig(engine.config());
for (int i = 0; i < size; i++) {
var op = snapshot.next();
assertThat(op.opType(), equalTo(Translog.Operation.Type.INDEX));
Translog.Index indexOp = (Translog.Index) op;
asserter.assertSameIndexOperation(indexOp, expectedOperations.get(i));
}
assertNull(snapshot.next());
}
}
private Translog.Snapshot newRandomSnapshot(
MapperService mapperService,
Engine.Searcher engineSearcher,
int searchBatchSize,
long fromSeqNo,
long toSeqNo,
boolean requiredFullRange,
boolean singleConsumer,
boolean accessStats,
IndexVersion indexVersionCreated
) throws IOException {
if (useSynthetic) {
return new LuceneSyntheticSourceChangesSnapshot(
mapperService,
engineSearcher,
searchBatchSize,
randomLongBetween(0, ByteSizeValue.ofBytes(Integer.MAX_VALUE).getBytes()),
fromSeqNo,
toSeqNo,
requiredFullRange,
accessStats,
indexVersionCreated
);
} else {
return new LuceneChangesSnapshot(
mapperService,
engineSearcher,
searchBatchSize,
fromSeqNo,
toSeqNo,
requiredFullRange,
singleConsumer,
accessStats,
indexVersionCreated
);
}
}
private BytesReference randomSource() throws IOException {
var builder = JsonXContent.contentBuilder().startObject();
builder.field("field", randomAlphaOfLengthBetween(10, 30));
if (rarely()) {
return BytesReference.bytes(builder.endObject());
}
SemanticTextFieldMapperTests.addSemanticTextInferenceResults(
false,
builder,
List.of(
randomSemanticText(false, "semantic_2", model2, chunkingSettings, randomInputs(), XContentType.JSON),
randomSemanticText(false, "semantic_1", model1, chunkingSettings, randomInputs(), XContentType.JSON)
)
);
builder.endObject();
return BytesReference.bytes(builder);
}
private static SemanticTextField randomSemanticText(
boolean useLegacyFormat,
String fieldName,
Model model,
ChunkingSettings chunkingSettings,
List<String> inputs,
XContentType contentType
) throws IOException {
ChunkedInference results = switch (model.getTaskType()) {
case TEXT_EMBEDDING -> switch (model.getServiceSettings().elementType()) {
case FLOAT, BFLOAT16 -> randomChunkedInferenceEmbeddingFloat(model, inputs);
case BYTE, BIT -> randomChunkedInferenceEmbeddingByte(model, inputs);
};
case SPARSE_EMBEDDING -> randomChunkedInferenceEmbeddingSparse(inputs, false);
default -> throw new AssertionError("invalid task type: " + model.getTaskType().name());
};
return semanticTextFieldFromChunkedInferenceResults(
useLegacyFormat,
fieldName,
model,
chunkingSettings,
inputs,
results,
contentType
);
}
private static List<String> randomInputs() {
int size = randomIntBetween(1, 5);
List<String> resp = new ArrayList<>();
for (int i = 0; i < size; i++) {
resp.add(randomAlphaOfLengthBetween(10, 50));
}
return resp;
}
}
| SemanticInferenceMetadataFieldsRecoveryTests |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java | {
"start": 1134,
"end": 6824
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ReplaceEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator str;
private final EvalOperator.ExpressionEvaluator regex;
private final EvalOperator.ExpressionEvaluator newStr;
private final DriverContext driverContext;
private Warnings warnings;
public ReplaceEvaluator(Source source, EvalOperator.ExpressionEvaluator str,
EvalOperator.ExpressionEvaluator regex, EvalOperator.ExpressionEvaluator newStr,
DriverContext driverContext) {
this.source = source;
this.str = str;
this.regex = regex;
this.newStr = newStr;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (BytesRefBlock strBlock = (BytesRefBlock) str.eval(page)) {
try (BytesRefBlock regexBlock = (BytesRefBlock) regex.eval(page)) {
try (BytesRefBlock newStrBlock = (BytesRefBlock) newStr.eval(page)) {
BytesRefVector strVector = strBlock.asVector();
if (strVector == null) {
return eval(page.getPositionCount(), strBlock, regexBlock, newStrBlock);
}
BytesRefVector regexVector = regexBlock.asVector();
if (regexVector == null) {
return eval(page.getPositionCount(), strBlock, regexBlock, newStrBlock);
}
BytesRefVector newStrVector = newStrBlock.asVector();
if (newStrVector == null) {
return eval(page.getPositionCount(), strBlock, regexBlock, newStrBlock);
}
return eval(page.getPositionCount(), strVector, regexVector, newStrVector);
}
}
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += str.baseRamBytesUsed();
baseRamBytesUsed += regex.baseRamBytesUsed();
baseRamBytesUsed += newStr.baseRamBytesUsed();
return baseRamBytesUsed;
}
public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock regexBlock,
BytesRefBlock newStrBlock) {
try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) {
BytesRef strScratch = new BytesRef();
BytesRef regexScratch = new BytesRef();
BytesRef newStrScratch = new BytesRef();
position: for (int p = 0; p < positionCount; p++) {
switch (strBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
switch (regexBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
switch (newStrBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
BytesRef str = strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch);
BytesRef regex = regexBlock.getBytesRef(regexBlock.getFirstValueIndex(p), regexScratch);
BytesRef newStr = newStrBlock.getBytesRef(newStrBlock.getFirstValueIndex(p), newStrScratch);
try {
result.appendBytesRef(Replace.process(str, regex, newStr));
} catch (IllegalArgumentException e) {
warnings().registerException(e);
result.appendNull();
}
}
return result.build();
}
}
public BytesRefBlock eval(int positionCount, BytesRefVector strVector, BytesRefVector regexVector,
BytesRefVector newStrVector) {
try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) {
BytesRef strScratch = new BytesRef();
BytesRef regexScratch = new BytesRef();
BytesRef newStrScratch = new BytesRef();
position: for (int p = 0; p < positionCount; p++) {
BytesRef str = strVector.getBytesRef(p, strScratch);
BytesRef regex = regexVector.getBytesRef(p, regexScratch);
BytesRef newStr = newStrVector.getBytesRef(p, newStrScratch);
try {
result.appendBytesRef(Replace.process(str, regex, newStr));
} catch (IllegalArgumentException e) {
warnings().registerException(e);
result.appendNull();
}
}
return result.build();
}
}
@Override
public String toString() {
return "ReplaceEvaluator[" + "str=" + str + ", regex=" + regex + ", newStr=" + newStr + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(str, regex, newStr);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | ReplaceEvaluator |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit/jupiter/nested/TestExecutionListenersNestedTests.java | {
"start": 4293,
"end": 4520
} | class ____ extends AbstractTestExecutionListener {
protected abstract String name();
@Override
public final void beforeTestClass(TestContext testContext) {
listeners.add(name());
}
}
static | BaseTestExecutionListener |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/DeploymentOptions.java | {
"start": 790,
"end": 9784
} | class ____ {
public static final ThreadingModel DEFAULT_MODE = ThreadingModel.EVENT_LOOP;
public static final boolean DEFAULT_WORKER = false;
public static final boolean DEFAULT_HA = false;
public static final int DEFAULT_INSTANCES = 1;
private JsonObject config;
private ThreadingModel threadingModel;
private boolean ha;
private int instances;
private ClassLoader classLoader;
private String workerPoolName;
private int workerPoolSize;
private long maxWorkerExecuteTime;
private TimeUnit maxWorkerExecuteTimeUnit;
/**
* Default constructor
*/
public DeploymentOptions() {
this.threadingModel = DEFAULT_MODE;
this.config = null;
this.ha = DEFAULT_HA;
this.instances = DEFAULT_INSTANCES;
this.workerPoolSize = VertxOptions.DEFAULT_WORKER_POOL_SIZE;
this.maxWorkerExecuteTime = VertxOptions.DEFAULT_MAX_WORKER_EXECUTE_TIME;
this.maxWorkerExecuteTimeUnit = VertxOptions.DEFAULT_MAX_WORKER_EXECUTE_TIME_UNIT;
}
/**
* Copy constructor
*
* @param other the instance to copy
*/
public DeploymentOptions(DeploymentOptions other) {
this.config = other.getConfig() == null ? null : other.getConfig().copy();
this.threadingModel = other.getThreadingModel();
this.ha = other.isHa();
this.instances = other.instances;
this.workerPoolName = other.workerPoolName;
this.workerPoolSize = other.workerPoolSize;
this.maxWorkerExecuteTime = other.maxWorkerExecuteTime;
this.maxWorkerExecuteTimeUnit = other.maxWorkerExecuteTimeUnit;
}
/**
* Constructor for creating a instance from JSON
*
* @param json the JSON
*/
public DeploymentOptions(JsonObject json) {
this();
DeploymentOptionsConverter.fromJson(json, this);
}
/**
* Get the JSON configuration that will be passed to the verticle(s) when deployed.
*
* @return the JSON config
*/
public JsonObject getConfig() {
return config;
}
/**
* Set the JSON configuration that will be passed to the verticle(s) when it's deployed
*
* @param config the JSON config
* @return a reference to this, so the API can be used fluently
*/
public DeploymentOptions setConfig(JsonObject config) {
this.config = config;
return this;
}
/**
* Which threading model the verticle(s) should use?
*
* @return the verticle threading model
*/
public ThreadingModel getThreadingModel() {
return threadingModel;
}
/**
* Set the verticle(s) verticle(s) threading model, e.g. a worker or a virtual thread verticle
*
* @param threadingModel the threading model
* @return a reference to this, so the API can be used fluently
*/
public DeploymentOptions setThreadingModel(ThreadingModel threadingModel) {
this.threadingModel = threadingModel;
return this;
}
/**
* Will the verticle(s) be deployed as HA (highly available) ?
*
* @return true if HA, false otherwise
*/
public boolean isHa() {
return ha;
}
/**
* Set whether the verticle(s) will be deployed as HA.
*
* @param ha true if to be deployed as HA, false otherwise
* @return a reference to this, so the API can be used fluently
*/
public DeploymentOptions setHa(boolean ha) {
this.ha = ha;
return this;
}
/**
* Get the number of instances that should be deployed.
*
* @return the number of instances
*/
public int getInstances() {
return instances;
}
/**
* Set the number of instances that should be deployed.
*
* @param instances the number of instances
* @return a reference to this, so the API can be used fluently
*/
public DeploymentOptions setInstances(int instances) {
this.instances = instances;
return this;
}
/**
* @return the worker pool name
*/
public String getWorkerPoolName() {
return workerPoolName;
}
/**
* Set the worker pool name to use for this verticle. When no name is set, the Vert.x
* worker pool will be used, when a name is set, the verticle will use a named worker pool.
*
* @param workerPoolName the worker pool name
* @return a reference to this, so the API can be used fluently
*/
public DeploymentOptions setWorkerPoolName(String workerPoolName) {
this.workerPoolName = workerPoolName;
return this;
}
/**
* Get the maximum number of worker threads to be used by the worker pool when the verticle is deployed
* with a {@link #setWorkerPoolName}.
* <p>
* Worker threads are used for running blocking code and worker verticles.
* <p>
* When the verticle does not use a {@link #getWorkerPoolName() named worker pool}, this option has no effect.
*
* @return the maximum number of worker threads
*/
public int getWorkerPoolSize() {
return workerPoolSize;
}
/**
* Set the maximum number of worker threads to be used by the Vert.x instance.
* <p>
* When the verticle does not use a {@link #getWorkerPoolName() named worker pool}, this option has no effect.
*
* @param workerPoolSize the number of threads
* @return a reference to this, so the API can be used fluently
*/
public DeploymentOptions setWorkerPoolSize(int workerPoolSize) {
if (workerPoolSize < 1) {
throw new IllegalArgumentException("size must be > 0");
}
this.workerPoolSize = workerPoolSize;
return this;
}
/**
* Get the value of max worker execute time, in {@link DeploymentOptions#setMaxWorkerExecuteTimeUnit maxWorkerExecuteTimeUnit}.
* <p>
* Vert.x will automatically log a warning if it detects that worker threads haven't returned within this time.
* <p>
* This can be used to detect where the user is blocking a worker thread for too long. Although worker threads
* can be blocked longer than event loop threads, they shouldn't be blocked for long periods of time.
* <p>
* When the verticle does not use a {@link #getWorkerPoolName() named worker pool}, this option has no effect.
*
* @return The value of max worker execute time, the default value of {@link DeploymentOptions#setMaxWorkerExecuteTimeUnit} {@code maxWorkerExecuteTimeUnit} is {@link TimeUnit#NANOSECONDS}
*/
public long getMaxWorkerExecuteTime() {
return maxWorkerExecuteTime;
}
/**
* Sets the value of max worker execute time, in {@link DeploymentOptions#setMaxWorkerExecuteTimeUnit maxWorkerExecuteTimeUnit}.
* <p>
* The default value of {@link DeploymentOptions#setMaxWorkerExecuteTimeUnit maxWorkerExecuteTimeUnit} is {@link TimeUnit#NANOSECONDS}
* <p>
* When the verticle does not use a {@link #getWorkerPoolName() named worker pool}, this option has no effect.
*
* @param maxWorkerExecuteTime the value of max worker execute time, in in {@link DeploymentOptions#setMaxWorkerExecuteTimeUnit maxWorkerExecuteTimeUnit}.
* @return a reference to this, so the API can be used fluently
*/
public DeploymentOptions setMaxWorkerExecuteTime(long maxWorkerExecuteTime) {
if (maxWorkerExecuteTime < 1) {
throw new IllegalArgumentException("maxExecuteTime must be > 0");
}
this.maxWorkerExecuteTime = maxWorkerExecuteTime;
return this;
}
/**
* When the verticle does not use a {@link #getWorkerPoolName() named worker pool}, this option has no effect.
*
* @return the time unit of {@code maxWorkerExecuteTime}
*/
public TimeUnit getMaxWorkerExecuteTimeUnit() {
return maxWorkerExecuteTimeUnit;
}
/**
* Set the time unit of {@code maxWorkerExecuteTime}
* <p>
* When the verticle does not use a {@link #getWorkerPoolName() named worker pool}, this option has no effect.
*
* @param maxWorkerExecuteTimeUnit the time unit of {@code maxWorkerExecuteTime}
* @return a reference to this, so the API can be used fluently
*/
public DeploymentOptions setMaxWorkerExecuteTimeUnit(TimeUnit maxWorkerExecuteTimeUnit) {
this.maxWorkerExecuteTimeUnit = maxWorkerExecuteTimeUnit;
return this;
}
/**
* @return the classloader used for deploying the Verticle
*/
public ClassLoader getClassLoader() {
return classLoader;
}
/**
* Set the classloader to use for deploying the Verticle.
*
* <p> The {@code VerticleFactory} will use this classloader for creating the Verticle
* and the Verticle {@link io.vertx.core.Context} will set this classloader as context
* classloader for the tasks execution on context.
*
* <p> By default no classloader is required and the deployment will use the current thread context
* classloader.
*
* @param classLoader the loader to use
* @return a reference to this, so the API can be used fluently
*/
public DeploymentOptions setClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader;
return this;
}
/**
* Convert this to JSON
*
* @return the JSON
*/
public JsonObject toJson() {
JsonObject json = new JsonObject();
DeploymentOptionsConverter.toJson(this, json);
return json;
}
}
| DeploymentOptions |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java | {
"start": 4742,
"end": 19328
} | class ____ use as default.
*/
public static Class<? extends DelegationTokenAuthenticator>
getDefaultDelegationTokenAuthenticator() {
return DEFAULT_AUTHENTICATOR;
}
private static DelegationTokenAuthenticator
obtainDelegationTokenAuthenticator(DelegationTokenAuthenticator dta,
ConnectionConfigurator connConfigurator) {
try {
if (dta == null) {
dta = DEFAULT_AUTHENTICATOR.newInstance();
dta.setConnectionConfigurator(connConfigurator);
}
return dta;
} catch (Exception ex) {
throw new IllegalArgumentException(ex);
}
}
private boolean useQueryStringforDelegationToken = false;
/**
* Creates an <code>DelegationTokenAuthenticatedURL</code>.
* <p>
* An instance of the default {@link DelegationTokenAuthenticator} will be
* used.
*/
public DelegationTokenAuthenticatedURL() {
this(null, null);
}
/**
* Creates an <code>DelegationTokenAuthenticatedURL</code>.
*
* @param authenticator the {@link DelegationTokenAuthenticator} instance to
* use, if <code>null</code> the default one will be used.
*/
public DelegationTokenAuthenticatedURL(
DelegationTokenAuthenticator authenticator) {
this(authenticator, null);
}
/**
* Creates an <code>DelegationTokenAuthenticatedURL</code> using the default
* {@link DelegationTokenAuthenticator} class.
*
* @param connConfigurator a connection configurator.
*/
public DelegationTokenAuthenticatedURL(
ConnectionConfigurator connConfigurator) {
this(null, connConfigurator);
}
/**
* Creates an <code>DelegationTokenAuthenticatedURL</code>.
*
* @param authenticator the {@link DelegationTokenAuthenticator} instance to
* use, if <code>null</code> the default one will be used.
* @param connConfigurator a connection configurator.
*/
public DelegationTokenAuthenticatedURL(
DelegationTokenAuthenticator authenticator,
ConnectionConfigurator connConfigurator) {
super(obtainDelegationTokenAuthenticator(authenticator, connConfigurator),
connConfigurator);
}
/**
* Sets if delegation token should be transmitted in the URL query string.
* By default it is transmitted using the
* {@link DelegationTokenAuthenticator#DELEGATION_TOKEN_HEADER} HTTP header.
* <p>
* This method is provided to enable WebHDFS backwards compatibility.
*
* @param useQueryString <code>TRUE</code> if the token is transmitted in the
* URL query string, <code>FALSE</code> if the delegation token is transmitted
* using the {@link DelegationTokenAuthenticator#DELEGATION_TOKEN_HEADER} HTTP
* header.
*/
@Deprecated
protected void setUseQueryStringForDelegationToken(boolean useQueryString) {
useQueryStringforDelegationToken = useQueryString;
}
/**
* Returns if delegation token is transmitted as a HTTP header.
*
* @return <code>TRUE</code> if the token is transmitted in the URL query
* string, <code>FALSE</code> if the delegation token is transmitted using the
* {@link DelegationTokenAuthenticator#DELEGATION_TOKEN_HEADER} HTTP header.
*/
public boolean useQueryStringForDelegationToken() {
return useQueryStringforDelegationToken;
}
/**
* Returns an authenticated {@link HttpURLConnection}, it uses a Delegation
* Token only if the given auth token is an instance of {@link Token} and
* it contains a Delegation Token, otherwise use the configured
* {@link DelegationTokenAuthenticator} to authenticate the connection.
*
* @param url the URL to connect to. Only HTTP/S URLs are supported.
* @param token the authentication token being used for the user.
* @return an authenticated {@link HttpURLConnection}.
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
*/
@Override
public HttpURLConnection openConnection(URL url, AuthenticatedURL.Token token)
throws IOException, AuthenticationException {
return (token instanceof Token) ? openConnection(url, (Token) token)
: super.openConnection(url ,token);
}
/**
* Returns an authenticated {@link HttpURLConnection}. If the Delegation
* Token is present, it will be used taking precedence over the configured
* <code>Authenticator</code>.
*
* @param url the URL to connect to. Only HTTP/S URLs are supported.
* @param token the authentication token being used for the user.
* @return an authenticated {@link HttpURLConnection}.
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
*/
public HttpURLConnection openConnection(URL url, Token token)
throws IOException, AuthenticationException {
return openConnection(url, token, null);
}
private URL augmentURL(URL url, Map<String, String> params)
throws IOException {
if (params != null && params.size() > 0) {
String urlStr = url.toExternalForm();
StringBuilder sb = new StringBuilder(urlStr);
String separator = (urlStr.contains("?")) ? "&" : "?";
for (Map.Entry<String, String> param : params.entrySet()) {
sb.append(separator).append(param.getKey()).append("=").append(
param.getValue());
separator = "&";
}
url = new URL(sb.toString());
}
return url;
}
/**
* Returns an authenticated {@link HttpURLConnection}. If the Delegation
* Token is present, it will be used taking precedence over the configured
* <code>Authenticator</code>. If the <code>doAs</code> parameter is not NULL,
* the request will be done on behalf of the specified <code>doAs</code> user.
*
* @param url the URL to connect to. Only HTTP/S URLs are supported.
* @param token the authentication token being used for the user.
* @param doAs user to do the the request on behalf of, if NULL the request is
* as self.
* @return an authenticated {@link HttpURLConnection}.
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
*/
@SuppressWarnings("unchecked")
public HttpURLConnection openConnection(URL url, Token token, String doAs)
throws IOException, AuthenticationException {
Preconditions.checkNotNull(url, "url");
Preconditions.checkNotNull(token, "token");
Map<String, String> extraParams = new HashMap<String, String>();
org.apache.hadoop.security.token.Token<? extends TokenIdentifier> dToken
= null;
LOG.debug("Connecting to url {} with token {} as {}", url, token, doAs);
// if we have valid auth token, it takes precedence over a delegation token
// and we don't even look for one.
if (!token.isSet()) {
// delegation token
Credentials creds = UserGroupInformation.getCurrentUser().
getCredentials();
LOG.debug("Token not set, looking for delegation token. Creds:{},"
+ " size:{}", creds.getAllTokens(), creds.numberOfTokens());
if (!creds.getAllTokens().isEmpty()) {
dToken = selectDelegationToken(url, creds);
if (dToken != null) {
if (useQueryStringForDelegationToken()) {
// delegation token will go in the query string, injecting it
extraParams.put(
KerberosDelegationTokenAuthenticator.DELEGATION_PARAM,
dToken.encodeToUrlString());
} else {
// delegation token will go as request header, setting it in the
// auth-token to ensure no authentication handshake is triggered
// (if we have a delegation token, we are authenticated)
// the delegation token header is injected in the connection request
// at the end of this method.
token.delegationToken = (org.apache.hadoop.security.token.Token
<AbstractDelegationTokenIdentifier>) dToken;
}
}
}
}
// proxyuser
if (doAs != null) {
extraParams.put(DO_AS, URLEncoder.encode(doAs, "UTF-8"));
}
url = augmentURL(url, extraParams);
HttpURLConnection conn = super.openConnection(url, token);
if (!token.isSet() && !useQueryStringForDelegationToken() && dToken != null) {
// injecting the delegation token header in the connection request
conn.setRequestProperty(
DelegationTokenAuthenticator.DELEGATION_TOKEN_HEADER,
dToken.encodeToUrlString());
}
return conn;
}
/**
* Select a delegation token from all tokens in credentials, based on url.
*
* @param url url.
* @param creds credentials.
* @return token.
*/
@InterfaceAudience.Private
public org.apache.hadoop.security.token.Token<? extends TokenIdentifier>
selectDelegationToken(URL url, Credentials creds) {
final InetSocketAddress serviceAddr = new InetSocketAddress(url.getHost(),
url.getPort());
final Text service = SecurityUtil.buildTokenService(serviceAddr);
org.apache.hadoop.security.token.Token<? extends TokenIdentifier> dToken =
creds.getToken(service);
LOG.debug("Using delegation token {} from service:{}", dToken, service);
return dToken;
}
/**
* Requests a delegation token using the configured <code>Authenticator</code>
* for authentication.
*
* @param url the URL to get the delegation token from. Only HTTP/S URLs are
* supported.
* @param token the authentication token being used for the user where the
* Delegation token will be stored.
* @param renewer the renewer user.
* @return a delegation token.
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
*/
public org.apache.hadoop.security.token.Token<AbstractDelegationTokenIdentifier>
getDelegationToken(URL url, Token token, String renewer)
throws IOException, AuthenticationException {
return getDelegationToken(url, token, renewer, null);
}
/**
* Requests a delegation token using the configured <code>Authenticator</code>
* for authentication.
*
* @param url the URL to get the delegation token from. Only HTTP/S URLs are
* supported.
* @param token the authentication token being used for the user where the
* Delegation token will be stored.
* @param renewer the renewer user.
* @param doAsUser the user to do as, which will be the token owner.
* @return a delegation token.
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
*/
public org.apache.hadoop.security.token.Token<AbstractDelegationTokenIdentifier>
getDelegationToken(URL url, Token token, String renewer, String doAsUser)
throws IOException, AuthenticationException {
Preconditions.checkNotNull(url, "url");
Preconditions.checkNotNull(token, "token");
try {
token.delegationToken =
((KerberosDelegationTokenAuthenticator) getAuthenticator()).
getDelegationToken(url, token, renewer, doAsUser);
return token.delegationToken;
} catch (IOException ex) {
token.delegationToken = null;
throw ex;
}
}
/**
* Renews a delegation token from the server end-point using the
* configured <code>Authenticator</code> for authentication.
*
* @param url the URL to renew the delegation token from. Only HTTP/S URLs are
* supported.
* @param token the authentication token with the Delegation Token to renew.
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
* @return delegation token long value.
*/
public long renewDelegationToken(URL url, Token token)
throws IOException, AuthenticationException {
return renewDelegationToken(url, token, null);
}
/**
* Renews a delegation token from the server end-point using the
* configured <code>Authenticator</code> for authentication.
*
* @param url the URL to renew the delegation token from. Only HTTP/S URLs are
* supported.
* @param token the authentication token with the Delegation Token to renew.
* @param doAsUser the user to do as, which will be the token owner.
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
* @return delegation token long value.
*/
public long renewDelegationToken(URL url, Token token, String doAsUser)
throws IOException, AuthenticationException {
Preconditions.checkNotNull(url, "url");
Preconditions.checkNotNull(token, "token");
Preconditions.checkNotNull(token.delegationToken,
"No delegation token available");
try {
return ((KerberosDelegationTokenAuthenticator) getAuthenticator()).
renewDelegationToken(url, token, token.delegationToken, doAsUser);
} catch (IOException ex) {
token.delegationToken = null;
throw ex;
}
}
/**
* Cancels a delegation token from the server end-point. It does not require
* being authenticated by the configured <code>Authenticator</code>.
*
* @param url the URL to cancel the delegation token from. Only HTTP/S URLs
* are supported.
* @param token the authentication token with the Delegation Token to cancel.
* @throws IOException if an IO error occurred.
*/
public void cancelDelegationToken(URL url, Token token)
throws IOException {
cancelDelegationToken(url, token, null);
}
/**
* Cancels a delegation token from the server end-point. It does not require
* being authenticated by the configured <code>Authenticator</code>.
*
* @param url the URL to cancel the delegation token from. Only HTTP/S URLs
* are supported.
* @param token the authentication token with the Delegation Token to cancel.
* @param doAsUser the user to do as, which will be the token owner.
* @throws IOException if an IO error occurred.
*/
public void cancelDelegationToken(URL url, Token token, String doAsUser)
throws IOException {
Preconditions.checkNotNull(url, "url");
Preconditions.checkNotNull(token, "token");
Preconditions.checkNotNull(token.delegationToken,
"No delegation token available");
try {
((KerberosDelegationTokenAuthenticator) getAuthenticator()).
cancelDelegationToken(url, token, token.delegationToken, doAsUser);
} finally {
token.delegationToken = null;
}
}
}
| to |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/BuildProfileTest.java | {
"start": 6287,
"end": 6778
} | class ____ implements ContainerResponseFilter {
private final String value;
public ResponseFilter7(String value) {
this.value = value;
}
@Override
public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
throws IOException {
responseContext.getHeaders().add("X-RF-7", value);
}
}
@IfBuildProfile("test")
@Provider
public static | ResponseFilter7 |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java | {
"start": 3784,
"end": 5725
} | class ____ {
/**
* List of state change listeners; it is final to guarantee
* that it will never be null.
*/
private final List<ServiceStateChangeListener> listeners =
new ArrayList<ServiceStateChangeListener>();
/**
* Thread-safe addition of a new listener to the end of a list.
* Attempts to re-register a listener that is already registered
* will be ignored.
* @param l listener
*/
public synchronized void add(ServiceStateChangeListener l) {
if(!listeners.contains(l)) {
listeners.add(l);
}
}
/**
* Remove any registration of a listener from the listener list.
* @param l listener
* @return true if the listener was found (and then removed)
*/
public synchronized boolean remove(ServiceStateChangeListener l) {
return listeners.remove(l);
}
/**
* Reset the listener list
*/
public synchronized void reset() {
listeners.clear();
}
/**
* Change to a new state and notify all listeners.
* This method will block until all notifications have been issued.
* It caches the list of listeners before the notification begins,
* so additions or removal of listeners will not be visible.
* @param service the service that has changed state
*/
public void notifyListeners(Service service) {
//take a very fast snapshot of the callback list
//very much like CopyOnWriteArrayList, only more minimal
ServiceStateChangeListener[] callbacks;
synchronized (this) {
callbacks = listeners.toArray(new ServiceStateChangeListener[listeners.size()]);
}
//iterate through the listeners outside the synchronized method,
//ensuring that listener registration/unregistration doesn't break anything
for (ServiceStateChangeListener l : callbacks) {
l.stateChanged(service);
}
}
}
}
| ServiceListeners |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestRedudantBlocks.java | {
"start": 1999,
"end": 5651
} | class ____ {
private MiniDFSCluster cluster;
private DistributedFileSystem fs;
private final Path dirPath = new Path("/striped");
private Path filePath = new Path(dirPath, "file");
private final ErasureCodingPolicy ecPolicy =
SystemErasureCodingPolicies.getPolicies().get(1);
private final short dataBlocks = (short) ecPolicy.getNumDataUnits();
private final short parityBlocks = (short) ecPolicy.getNumParityUnits();
private final short groupSize = (short) (dataBlocks + parityBlocks);
private final int cellSize = ecPolicy.getCellSize();
private final int stripesPerBlock = 4;
private final int blockSize = stripesPerBlock * cellSize;
private final int numDNs = groupSize;
@BeforeEach
public void setup() throws IOException {
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
SimulatedFSDataset.setFactory(conf);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build();
cluster.waitActive();
fs = cluster.getFileSystem();
fs.enableErasureCodingPolicy(ecPolicy.getName());
fs.mkdirs(dirPath);
fs.getClient().setErasureCodingPolicy(dirPath.toString(),
ecPolicy.getName());
}
@AfterEach
public void tearDown() {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
@Test
public void testProcessOverReplicatedAndRedudantBlock() throws Exception {
long fileLen = dataBlocks * blockSize;
DFSTestUtil.createStripedFile(cluster, filePath, null, 1, stripesPerBlock,
false);
LocatedBlocks lbs = cluster.getNameNodeRpc()
.getBlockLocations(filePath.toString(), 0, fileLen);
LocatedStripedBlock bg = (LocatedStripedBlock) (lbs.get(0));
long gs = bg.getBlock().getGenerationStamp();
String bpid = bg.getBlock().getBlockPoolId();
long groupId = bg.getBlock().getBlockId();
Block blk = new Block(groupId, blockSize, gs);
int i = 0;
// one missing block
for (; i < groupSize - 1; i++) {
blk.setBlockId(groupId + i);
cluster.injectBlocks(i, Arrays.asList(blk), bpid);
}
cluster.triggerBlockReports();
// one redundant block
blk.setBlockId(groupId + 2);
cluster.injectBlocks(i, Arrays.asList(blk), bpid);
BlockInfoStriped blockInfo =
(BlockInfoStriped)cluster.getNamesystem().getBlockManager()
.getStoredBlock(new Block(groupId));
// update blocksMap
cluster.triggerBlockReports();
// delete redundant block
cluster.triggerHeartbeats();
//wait for IBR
GenericTestUtils.waitFor(
() -> cluster.getNamesystem().getBlockManager()
.countNodes(blockInfo).liveReplicas() >= groupSize -1,
500, 10000);
// trigger reconstruction
cluster.triggerHeartbeats();
//wait for IBR
GenericTestUtils.waitFor(
() -> cluster.getNamesystem().getBlockManager()
.countNodes(blockInfo).liveReplicas() >= groupSize,
500, 10000);
HashSet<Long> blockIdsSet = new HashSet<Long>();
lbs = cluster.getNameNodeRpc().getBlockLocations(filePath.toString(), 0,
fileLen);
bg = (LocatedStripedBlock) (lbs.get(0));
final LocatedBlock[] blocks = StripedBlockUtil.parseStripedBlockGroup(bg,
cellSize, dataBlocks, parityBlocks);
for (LocatedBlock dn : blocks) {
if (dn != null) {
blockIdsSet.add(dn.getBlock().getBlockId());
}
}
assertEquals(groupSize, blockIdsSet.size());
}
}
| TestRedudantBlocks |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/test/http/Headers.java | {
"start": 750,
"end": 892
} | class ____ empty headers
*/
Headers() {
this.headers = new com.sun.net.httpserver.Headers();
}
/**
* Creates a | with |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/LogTruncationException.java | {
"start": 1348,
"end": 2696
} | class ____ extends OffsetOutOfRangeException {
private final Map<TopicPartition, OffsetAndMetadata> divergentOffsets;
public LogTruncationException(Map<TopicPartition, Long> fetchOffsets,
Map<TopicPartition, OffsetAndMetadata> divergentOffsets) {
this("Truncated partitions detected with divergent offsets " + divergentOffsets, fetchOffsets, divergentOffsets);
}
public LogTruncationException(String message,
Map<TopicPartition, Long> fetchOffsets,
Map<TopicPartition, OffsetAndMetadata> divergentOffsets) {
super(message, fetchOffsets);
this.divergentOffsets = Collections.unmodifiableMap(divergentOffsets);
}
/**
* Get the divergent offsets for the partitions which were truncated. For each
* partition, this is the first offset which is known to diverge from what the
* consumer read.
*
* Note that there is no guarantee that this offset will be known. It is necessary
* to use {@link #partitions()} to see the set of partitions that were truncated
* and then check for the presence of a divergent offset in this map.
*/
public Map<TopicPartition, OffsetAndMetadata> divergentOffsets() {
return divergentOffsets;
}
}
| LogTruncationException |
java | apache__camel | components/camel-infinispan/camel-infinispan-embedded/src/main/java/org/apache/camel/component/infinispan/embedded/InfinispanEmbeddedManager.java | {
"start": 1755,
"end": 6063
} | class ____ extends ServiceSupport implements InfinispanManager<EmbeddedCacheManager> {
private final InfinispanEmbeddedConfiguration configuration;
private CamelContext camelContext;
private EmbeddedCacheManager cacheContainer;
private boolean isManagedCacheContainer;
public InfinispanEmbeddedManager() {
this(null, new InfinispanEmbeddedConfiguration());
}
public InfinispanEmbeddedManager(InfinispanEmbeddedConfiguration configuration) {
this(null, configuration);
}
public InfinispanEmbeddedManager(CamelContext camelContext, InfinispanEmbeddedConfiguration configuration) {
this.camelContext = camelContext;
this.configuration = configuration;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public void doStart() throws Exception {
cacheContainer = configuration.getCacheContainer();
if (cacheContainer == null) {
final Configuration containerConf = configuration.getCacheContainerConfiguration();
// Check if a container configuration object has been provided so use
// it and discard any other additional configuration.
if (containerConf != null) {
ConfigurationBuilderHolder holder = new ConfigurationBuilderHolder();
holder.getGlobalConfigurationBuilder().defaultCacheName("default");
holder.getNamedConfigurationBuilders().put("default", new ConfigurationBuilder());
cacheContainer = new DefaultCacheManager(holder);
cacheContainer.defineConfiguration("default", containerConf);
} else {
if (ObjectHelper.isNotEmpty(configuration.getConfigurationUri())) {
cacheContainer = new DefaultCacheManager(
InfinispanUtil.openInputStream(camelContext, configuration.getConfigurationUri()));
} else {
ConfigurationBuilderHolder holder = new ConfigurationBuilderHolder();
holder.getGlobalConfigurationBuilder().defaultCacheName("default");
holder.getNamedConfigurationBuilders().put("default", new ConfigurationBuilder());
cacheContainer = new DefaultCacheManager(holder);
}
}
isManagedCacheContainer = true;
}
}
@Override
public void doStop() throws Exception {
if (isManagedCacheContainer) {
cacheContainer.stop();
}
super.doStop();
}
@Override
public EmbeddedCacheManager getCacheContainer() {
return cacheContainer;
}
@Override
public <K, V> BasicCache<K, V> getCache() {
Cache<K, V> cache = cacheContainer.getCache();
return configuration.hasFlags()
? cache.getAdvancedCache().withFlags(configuration.getFlags())
: cache;
}
@Override
public <K, V> BasicCache<K, V> getCache(String cacheName) {
Cache<K, V> cache;
if (ObjectHelper.isEmpty(cacheName) || CACHE_MANAGER_CURRENT.equals(cacheName)) {
cache = cacheContainer.getCache();
} else {
cache = cacheContainer.getCache(cacheName);
}
return configuration.hasFlags()
? cache.getAdvancedCache().withFlags(configuration.getFlags())
: cache;
}
@Override
public <K, V> BasicCache<K, V> getCache(Message message, String defaultCache) {
final String cacheName = message.getHeader(InfinispanConstants.CACHE_NAME, defaultCache, String.class);
final Cache<K, V> cache = (Cache<K, V>) getCache(cacheName);
return message.getHeader(InfinispanConstants.IGNORE_RETURN_VALUES, false, boolean.class)
? cache.getAdvancedCache().withFlags(Flag.IGNORE_RETURN_VALUES)
: cache;
}
@Override
public Set<String> getCacheNames() {
return cacheContainer.getCacheNames();
}
@Override
public void stopCache(String cacheName) {
cacheContainer.stopCache(cacheName);
}
}
| InfinispanEmbeddedManager |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/SelfAssertionTest.java | {
"start": 6341,
"end": 6801
} | class ____ {
void test(int x) {
// BUG: Diagnostic contains:
assertThat(Duration.ofMillis(x)).isEqualTo(Duration.ofMillis(x));
}
}
""")
.doTest();
}
@Test
public void junitPositiveAssertion() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static org.junit.Assert.assertEquals;
abstract | Test |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/async/AsyncLoggerConfigTest.java | {
"start": 2263,
"end": 5078
} | class ____ {
private static final String FQCN = AsyncLoggerConfigTest.class.getName();
@TempLoggingDir
private static Path loggingPath;
@Test
@LoggerContextSource
void testAdditivity(final LoggerContext context) throws Exception {
final Path file = loggingPath.resolve("AsyncLoggerConfigTest.log");
assertThat(file).isEmptyFile();
final Logger log = context.getLogger("com.foo.Bar");
final String msg = "Additive logging: 2 for the price of 1!";
log.info(msg);
CoreLoggerContexts.stopLoggerContext(file.toFile()); // stop async thread
final String location = "testAdditivity";
try (final BufferedReader reader = Files.newBufferedReader(file)) {
for (int i = 0; i < 2; i++) {
assertThat(reader.readLine())
.as("Message")
.contains(msg)
.as("Location")
.contains(location);
}
}
}
@Test
void testIncludeLocationDefaultsToFalse() {
final Configuration configuration = new NullConfiguration();
final LoggerConfig rootLoggerConfig =
RootLogger.newAsyncRootBuilder().setConfig(configuration).build();
assertFalse(rootLoggerConfig.isIncludeLocation(), "Include location should default to false for async loggers");
final LoggerConfig loggerConfig = AsyncLoggerConfig.newAsyncBuilder()
.setConfig(configuration)
.setLoggerName("com.foo.Bar")
.build();
assertFalse(loggerConfig.isIncludeLocation(), "Include location should default to false for async loggers");
}
@Test
void testSingleFilterInvocation() {
final Configuration configuration = new NullConfiguration();
final Filter filter = mock(Filter.class);
final LoggerConfig config = AsyncLoggerConfig.newAsyncBuilder()
.setLoggerName(FQCN)
.setConfig(configuration)
.setLevel(Level.INFO)
.setFilter(filter)
.build();
final Appender appender = mock(Appender.class);
when(appender.isStarted()).thenReturn(true);
when(appender.getName()).thenReturn("test");
config.addAppender(appender, null, null);
final AsyncLoggerConfigDisruptor disruptor =
(AsyncLoggerConfigDisruptor) configuration.getAsyncLoggerConfigDelegate();
disruptor.start();
try {
config.log(FQCN, FQCN, null, Level.INFO, new SimpleMessage(), null);
verify(appender, timeout(500).times(1)).append(any());
verify(filter, times(1)).filter(any());
} finally {
disruptor.stop();
}
}
}
| AsyncLoggerConfigTest |
java | apache__maven | impl/maven-di/src/main/java/org/apache/maven/di/impl/Binding.java | {
"start": 5666,
"end": 6214
} | class ____<T> extends Binding<T> {
final Supplier<T> supplier;
public BindingToSupplier(Supplier<T> supplier) {
super(null, Collections.emptySet());
this.supplier = supplier;
}
@Override
public Supplier<T> compile(Function<Dependency<?>, Supplier<?>> compiler) {
return supplier;
}
@Override
public String toString() {
return "BindingToSupplier[" + supplier + "]" + getDependencies();
}
}
public static | BindingToSupplier |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SnmpEndpointBuilderFactory.java | {
"start": 55209,
"end": 57765
} | interface ____ extends EndpointProducerBuilder {
default SnmpEndpointProducerBuilder basic() {
return (SnmpEndpointProducerBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedSnmpEndpointProducerBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedSnmpEndpointProducerBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
/**
* Builder for endpoint for the SNMP component.
*/
public | AdvancedSnmpEndpointProducerBuilder |
java | dropwizard__dropwizard | dropwizard-util/src/main/java/io/dropwizard/util/Throwables.java | {
"start": 290,
"end": 1135
} | class ____ {
private Throwables() {
}
/**
* Search an exception chain for an exception matching a given condition.
*
* @param condition The condition to match on
* @param t The head of the exception chain
* @return An {@link Optional} containing the first match in the chain, starting from the head, or empty if no
* matching exception was found
* @since 2.1.0
*/
public static Optional<Throwable> findThrowableInChain(Predicate<Throwable> condition, @Nullable Throwable t) {
final Set<Throwable> seen = new HashSet<>();
while (t != null && !seen.contains(t)) {
if (condition.test(t)) {
return Optional.of(t);
}
seen.add(t);
t = t.getCause();
}
return Optional.empty();
}
}
| Throwables |
java | quarkusio__quarkus | independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/BootstrapMavenContext.java | {
"start": 4271,
"end": 56002
} | class ____ {
private static final Logger log = Logger.getLogger(BootstrapMavenContext.class);
private static final String BASEDIR = "basedir";
private static final String DEFAULT_REMOTE_REPO_ID = "central";
private static final String DEFAULT_REMOTE_REPO_URL = "https://repo.maven.apache.org/maven2";
private static final String MAVEN_DOT_HOME = "maven.home";
private static final String MAVEN_HOME = "MAVEN_HOME";
private static final String MAVEN_SETTINGS = "maven.settings";
public static final String MAVEN_TOP_LEVEL_PROJECT_BASEDIR = "maven.top-level-basedir";
private static final String SETTINGS_XML = "settings.xml";
private static final String SETTINGS_SECURITY = "settings.security";
static final String EFFECTIVE_MODEL_BUILDER_PROP = "quarkus.bootstrap.effective-model-builder";
private static final String WARN_ON_FAILING_WS_MODULES_PROP = "quarkus.bootstrap.warn-on-failing-workspace-modules";
private static final String MAVEN_RESOLVER_TRANSPORT_KEY = "maven.resolver.transport";
private static final String MAVEN_RESOLVER_TRANSPORT_DEFAULT = "default";
private static final String MAVEN_RESOLVER_TRANSPORT_WAGON = "wagon";
private static final String MAVEN_RESOLVER_TRANSPORT_NATIVE = "native";
private static final String MAVEN_RESOLVER_TRANSPORT_AUTO = "auto";
private static final String WAGON_TRANSPORTER_PRIORITY_KEY = "aether.priority.WagonTransporterFactory";
private static final String NATIVE_HTTP_TRANSPORTER_PRIORITY_KEY = "aether.priority.HttpTransporterFactory";
private static final String NATIVE_FILE_TRANSPORTER_PRIORITY_KEY = "aether.priority.FileTransporterFactory";
private static final String RESOLVER_MAX_PRIORITY = String.valueOf(Float.MAX_VALUE);
private boolean artifactTransferLogging;
private BootstrapMavenOptions cliOptions;
private File userSettings;
private File globalSettings;
private Boolean offline;
// Typically, this property will not be enabled in Quarkus application development use-cases
// It was introduced to support use-cases of using the bootstrap resolver API beyond Quarkus application development
private Boolean warnOnFailingWorkspaceModules;
private LocalWorkspace workspace;
private LocalProject currentProject;
private Settings settings;
private List<org.apache.maven.model.Profile> activeSettingsProfiles;
private RepositorySystem repoSystem;
private RepositorySystemSession repoSession;
private List<RemoteRepository> remoteRepos;
private List<RemoteRepository> remotePluginRepos;
private RemoteRepositoryManager remoteRepoManager;
private String localRepo;
private String[] localRepoTail;
private Boolean localRepoTailIgnoreAvailability;
private Path currentPom;
private Boolean currentProjectExists;
private String alternatePomName;
private Path rootProjectDir;
private boolean preferPomsFromWorkspace;
private Boolean effectiveModelBuilder;
private Boolean wsModuleParentHierarchy;
private SettingsDecrypter settingsDecrypter;
private final List<String> excludeSisuBeanPackages;
private final List<String> includeSisuBeanPackages;
public static BootstrapMavenContextConfig<?> config() {
return new BootstrapMavenContextConfig<>();
}
public BootstrapMavenContext() throws BootstrapMavenException {
this(new BootstrapMavenContextConfig<>());
}
public BootstrapMavenContext(BootstrapMavenContextConfig<?> config)
throws BootstrapMavenException {
/*
* WARNING: this constructor calls instance methods as part of the initialization.
* This means the values that are available in the config should be set before
* the instance method invocations.
*/
this.alternatePomName = config.alternatePomName;
this.artifactTransferLogging = config.artifactTransferLogging;
this.localRepo = config.localRepo;
this.localRepoTail = config.localRepoTail;
this.localRepoTailIgnoreAvailability = config.localRepoTailIgnoreAvailability;
this.offline = config.offline;
this.warnOnFailingWorkspaceModules = config.warnOnFailedWorkspaceModules;
this.repoSystem = config.repoSystem;
this.repoSession = config.repoSession;
this.remoteRepos = config.remoteRepos;
this.remotePluginRepos = config.remotePluginRepos;
this.remoteRepoManager = config.remoteRepoManager;
this.settingsDecrypter = config.settingsDecrypter;
this.cliOptions = config.cliOptions;
this.excludeSisuBeanPackages = config.getExcludeSisuBeanPackages();
this.includeSisuBeanPackages = config.getIncludeSisuBeanPackages();
if (config.rootProjectDir == null) {
final String topLevelBaseDirStr = PropertyUtils.getProperty(MAVEN_TOP_LEVEL_PROJECT_BASEDIR);
if (topLevelBaseDirStr != null) {
final Path tmp = Path.of(topLevelBaseDirStr);
if (!Files.exists(tmp)) {
throw new BootstrapMavenException("Top-level project base directory " + topLevelBaseDirStr
+ " specified with system property " + MAVEN_TOP_LEVEL_PROJECT_BASEDIR + " does not exist");
}
this.rootProjectDir = tmp;
}
} else {
this.rootProjectDir = config.rootProjectDir;
}
this.preferPomsFromWorkspace = config.preferPomsFromWorkspace;
this.effectiveModelBuilder = config.effectiveModelBuilder;
this.wsModuleParentHierarchy = config.wsModuleParentHierarchy;
this.userSettings = config.userSettings;
if (config.currentProject != null) {
this.currentProject = config.currentProject;
this.currentPom = currentProject.getRawModel().getPomFile().toPath();
this.workspace = config.currentProject.getWorkspace();
} else if (config.workspaceDiscovery) {
currentProject = resolveCurrentProject(config.providedModules);
this.workspace = currentProject == null ? null : currentProject.getWorkspace();
if (workspace != null) {
if (config.repoSession == null && repoSession != null && repoSession.getWorkspaceReader() == null) {
repoSession = new DefaultRepositorySystemSession(repoSession).setWorkspaceReader(workspace);
if (config.remoteRepos == null && remoteRepos != null) {
final List<RemoteRepository> rawProjectRepos = resolveRawProjectRepos(remoteRepos);
if (!rawProjectRepos.isEmpty()) {
remoteRepos = getRemoteRepositoryManager().aggregateRepositories(repoSession, remoteRepos,
rawProjectRepos, true);
}
}
}
}
}
}
public ArtifactCoords getCurrentProjectArtifact(String extension) throws BootstrapMavenException {
if (currentProject != null) {
return currentProject.getAppArtifact(extension);
}
final Model model = loadCurrentProjectModel();
if (model == null) {
return null;
}
return ArtifactCoords.of(ModelUtils.getGroupId(model), model.getArtifactId(), ArtifactCoords.DEFAULT_CLASSIFIER,
extension, ModelUtils.getVersion(model));
}
public LocalProject getCurrentProject() {
return currentProject;
}
public LocalWorkspace getWorkspace() {
return workspace;
}
public BootstrapMavenOptions getCliOptions() {
return cliOptions == null ? cliOptions = BootstrapMavenOptions.newInstance() : cliOptions;
}
public File getUserSettings() {
if (userSettings == null) {
final String quarkusMavenSettings = getProperty(MAVEN_SETTINGS);
if (quarkusMavenSettings != null) {
var f = new File(quarkusMavenSettings);
return userSettings = f.exists() ? f : null;
}
return userSettings = resolveSettingsFile(
getCliOptions().getOptionValue(BootstrapMavenOptions.ALTERNATE_USER_SETTINGS),
() -> new File(getUserMavenConfigurationHome(), SETTINGS_XML));
}
return userSettings;
}
private static File getUserMavenConfigurationHome() {
return new File(PropertyUtils.getUserHome(), ".m2");
}
private String getProperty(String name) {
String value = PropertyUtils.getProperty(name);
if (value != null) {
return value;
}
final Properties props = getCliOptions().getSystemProperties();
return props == null ? null : props.getProperty(name);
}
public File getGlobalSettings() {
return globalSettings == null
? globalSettings = resolveSettingsFile(
getCliOptions().getOptionValue(BootstrapMavenOptions.ALTERNATE_GLOBAL_SETTINGS),
() -> {
String mavenHome = getProperty(MAVEN_DOT_HOME);
if (mavenHome == null) {
mavenHome = System.getenv(MAVEN_HOME);
if (mavenHome == null) {
mavenHome = "";
}
}
return new File(mavenHome, "conf/settings.xml");
})
: globalSettings;
}
public boolean isOffline() throws BootstrapMavenException {
return offline == null
? offline = (getCliOptions().hasOption(BootstrapMavenOptions.OFFLINE) || getEffectiveSettings().isOffline())
: offline;
}
public boolean isWarnOnFailingWorkspaceModules() {
return warnOnFailingWorkspaceModules == null
? warnOnFailingWorkspaceModules = Boolean.getBoolean(WARN_ON_FAILING_WS_MODULES_PROP)
: warnOnFailingWorkspaceModules;
}
public RepositorySystem getRepositorySystem() throws BootstrapMavenException {
if (repoSystem == null) {
initRepoSystemAndManager();
}
return repoSystem;
}
public RemoteRepositoryManager getRemoteRepositoryManager() {
if (remoteRepoManager == null) {
initRepoSystemAndManager();
}
return remoteRepoManager;
}
public RepositorySystemSession getRepositorySystemSession() throws BootstrapMavenException {
return repoSession == null ? repoSession = newRepositorySystemSession() : repoSession;
}
public List<RemoteRepository> getRemoteRepositories() throws BootstrapMavenException {
return remoteRepos == null ? remoteRepos = resolveRemoteRepos() : remoteRepos;
}
public List<RemoteRepository> getRemotePluginRepositories() throws BootstrapMavenException {
return remotePluginRepos == null ? remotePluginRepos = resolveRemotePluginRepos() : remotePluginRepos;
}
public SettingsDecrypter getSettingsDecrypter() {
if (settingsDecrypter == null) {
initRepoSystemAndManager();
}
return settingsDecrypter;
}
public Settings getEffectiveSettings() throws BootstrapMavenException {
if (settings != null) {
return settings;
}
final DefaultSettingsBuildingRequest settingsRequest = new DefaultSettingsBuildingRequest()
.setSystemProperties(System.getProperties())
.setUserSettingsFile(getUserSettings())
.setGlobalSettingsFile(getGlobalSettings());
final Properties cmdLineProps = getCliOptions().getSystemProperties();
if (cmdLineProps != null) {
settingsRequest.setUserProperties(cmdLineProps);
}
final Settings effectiveSettings;
try {
final SettingsBuildingResult result = new DefaultSettingsBuilderFactory()
.newInstance().build(settingsRequest);
final List<SettingsProblem> problems = result.getProblems();
if (!problems.isEmpty()) {
for (SettingsProblem problem : problems) {
switch (problem.getSeverity()) {
case ERROR:
case FATAL:
throw new BootstrapMavenException("Settings problem encountered at " + problem.getLocation(),
problem.getException());
default:
log.warn("Settings problem encountered at " + problem.getLocation(), problem.getException());
}
}
}
effectiveSettings = result.getEffectiveSettings();
} catch (SettingsBuildingException e) {
throw new BootstrapMavenException("Failed to initialize Maven repository settings", e);
}
return settings = effectiveSettings;
}
public String getLocalRepo() throws BootstrapMavenException {
return localRepo == null ? localRepo = resolveLocalRepo(getEffectiveSettings()) : localRepo;
}
private String[] getLocalRepoTail() {
return localRepoTail == null ? localRepoTail = resolveLocalRepoTail() : localRepoTail;
}
private boolean getLocalRepoTailIgnoreAvailability() {
return localRepoTailIgnoreAvailability == null
? localRepoTailIgnoreAvailability = resolveLocalRepoTailIgnoreAvailability()
: localRepoTailIgnoreAvailability;
}
private LocalProject resolveCurrentProject(List<WorkspaceModulePom> providedModules) throws BootstrapMavenException {
try {
return LocalProject.loadWorkspace(this, providedModules);
} catch (Exception e) {
throw new BootstrapMavenException("Failed to load current project at " + getCurrentProjectPomOrNull(), e);
}
}
private String resolveLocalRepo(Settings settings) {
String localRepo = System.getenv("QUARKUS_LOCAL_REPO");
if (localRepo != null) {
return localRepo;
}
localRepo = getProperty("maven.repo.local");
if (localRepo != null) {
return localRepo;
}
localRepo = settings.getLocalRepository();
return localRepo == null ? new File(getUserMavenConfigurationHome(), "repository").getAbsolutePath() : localRepo;
}
private String[] resolveLocalRepoTail() {
final String localRepoTail = getProperty("maven.repo.local.tail");
if (localRepoTail == null) {
return new String[] {};
}
if (localRepoTail.trim().isEmpty()) {
return new String[] {};
}
return localRepoTail.split(",");
}
private boolean resolveLocalRepoTailIgnoreAvailability() {
final String ignoreAvailability = getProperty("maven.repo.local.tail.ignoreAvailability");
// The only "falsy" value is `false` itself
if ("false".equalsIgnoreCase(ignoreAvailability)) {
return false;
}
//All other strings are interpreted as `true`.
return true;
}
private File resolveSettingsFile(String settingsArg, Supplier<File> supplier) {
File userSettings;
if (settingsArg != null) {
userSettings = new File(settingsArg);
if (userSettings.exists()) {
return userSettings;
}
if (userSettings.isAbsolute()) {
return null;
}
// in case the settings path is a relative one we check whether the pom path is also a relative one
// in which case we can resolve the settings path relative to the project directory
// otherwise, we don't have a clue what the settings path is relative to
String alternatePomDir = getCliOptions().getOptionValue(BootstrapMavenOptions.ALTERNATE_POM_FILE);
if (alternatePomDir != null) {
File tmp = new File(alternatePomDir);
if (tmp.isAbsolute()) {
alternatePomDir = null;
} else {
if (!tmp.isDirectory()) {
tmp = tmp.getParentFile();
}
alternatePomDir = tmp == null ? null : tmp.toString();
}
}
// Root project base dir
userSettings = resolveSettingsFile(settingsArg, alternatePomDir, System.getenv("MAVEN_PROJECTBASEDIR"));
if (userSettings != null) {
return userSettings;
}
// current module project base dir
userSettings = resolveSettingsFile(settingsArg, alternatePomDir, PropertyUtils.getProperty(BASEDIR));
if (userSettings != null) {
return userSettings;
}
userSettings = new File(PropertyUtils.getUserHome(), settingsArg);
if (userSettings.exists()) {
return userSettings;
}
}
userSettings = supplier.get();
return userSettings.exists() ? userSettings : null;
}
private File resolveSettingsFile(String settingsArg, String alternatePomDir, String projectBaseDir) {
if (projectBaseDir == null) {
return null;
}
File userSettings;
if (alternatePomDir != null && projectBaseDir.endsWith(alternatePomDir)) {
userSettings = new File(projectBaseDir.substring(0, projectBaseDir.length() - alternatePomDir.length()),
settingsArg);
if (userSettings.exists()) {
return userSettings;
}
}
userSettings = new File(projectBaseDir, settingsArg);
if (userSettings.exists()) {
return userSettings;
}
return null;
}
// mostly a copy of `DefaultRepositorySystemSessionFactory.newRepositorySession()`
private DefaultRepositorySystemSession newRepositorySystemSession() throws BootstrapMavenException {
DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession();
Settings settings = getEffectiveSettings();
session.setCache(new DefaultRepositoryCache());
Map<Object, Object> configProps = new LinkedHashMap<>();
configProps.put(ConfigurationProperties.USER_AGENT, getUserAgent());
configProps.put(ConfigurationProperties.INTERACTIVE, settings.isInteractiveMode());
// First add properties populated from settings.xml
Map<?, ?> propertiesFromActiveProfiles = getActiveSettingsProfiles()
.stream()
.map(ModelBase::getProperties)
.flatMap(it -> it.entrySet().stream())
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (a, b) -> b));
configProps.putAll(propertiesFromActiveProfiles);
// Resolver's ConfigUtils solely rely on config properties, that is why we need to add both here as well.
configProps.putAll(System.getProperties());
if (getCliOptions().getSystemProperties() != null) {
configProps.putAll(getCliOptions().getSystemProperties());
}
session.setOffline(isOffline());
final BootstrapMavenOptions mvnArgs = getCliOptions();
if (!mvnArgs.isEmpty()) {
if (mvnArgs.hasOption(BootstrapMavenOptions.SUPRESS_SNAPSHOT_UPDATES)) {
session.setUpdatePolicy(RepositoryPolicy.UPDATE_POLICY_NEVER);
} else if (mvnArgs.hasOption(BootstrapMavenOptions.UPDATE_SNAPSHOTS)) {
session.setUpdatePolicy(RepositoryPolicy.UPDATE_POLICY_ALWAYS);
}
if (mvnArgs.hasOption(BootstrapMavenOptions.CHECKSUM_FAILURE_POLICY)) {
session.setChecksumPolicy(RepositoryPolicy.CHECKSUM_POLICY_FAIL);
} else if (mvnArgs.hasOption(BootstrapMavenOptions.CHECKSUM_WARNING_POLICY)) {
session.setChecksumPolicy(RepositoryPolicy.CHECKSUM_POLICY_WARN);
}
}
if (workspace != null) {
session.setWorkspaceReader(workspace);
}
DefaultSettingsDecryptionRequest decrypt = new DefaultSettingsDecryptionRequest();
decrypt.setProxies(settings.getProxies());
decrypt.setServers(settings.getServers());
// need to set `settings-security.xml` location extra, because it isn't discovered
// by BeanBag when constructing `DefaultSecDispatcher`
File settingsSecurityXml = null;
boolean setSettingsSecurity = !System.getProperties().containsKey(SETTINGS_SECURITY)
&& (settingsSecurityXml = new File(getUserMavenConfigurationHome(), "settings-security.xml")).exists();
if (setSettingsSecurity) {
System.setProperty(SETTINGS_SECURITY, settingsSecurityXml.toString());
}
SettingsDecryptionResult decrypted = getSettingsDecrypter().decrypt(decrypt);
if (setSettingsSecurity) {
System.clearProperty(SETTINGS_SECURITY);
}
if (!decrypted.getProblems().isEmpty() && log.isDebugEnabled()) {
for (SettingsProblem problem : decrypted.getProblems()) {
log.debug(problem.getMessage(), problem.getException());
}
}
DefaultMirrorSelector mirrorSelector = new DefaultMirrorSelector();
for (Mirror mirror : settings.getMirrors()) {
mirrorSelector.add(
mirror.getId(),
mirror.getUrl(),
mirror.getLayout(),
false,
mirror.isBlocked(),
mirror.getMirrorOf(),
mirror.getMirrorOfLayouts());
}
session.setMirrorSelector(mirrorSelector);
DefaultProxySelector proxySelector = new DefaultProxySelector();
for (org.apache.maven.settings.Proxy proxy : decrypted.getProxies()) {
AuthenticationBuilder authBuilder = new AuthenticationBuilder();
authBuilder.addUsername(proxy.getUsername()).addPassword(proxy.getPassword());
proxySelector.add(
new org.eclipse.aether.repository.Proxy(
proxy.getProtocol(), proxy.getHost(), proxy.getPort(), authBuilder.build()),
proxy.getNonProxyHosts());
}
session.setProxySelector(proxySelector);
DefaultAuthenticationSelector authSelector = new DefaultAuthenticationSelector();
for (Server server : decrypted.getServers()) {
AuthenticationBuilder authBuilder = new AuthenticationBuilder();
authBuilder.addUsername(server.getUsername()).addPassword(server.getPassword());
authBuilder.addPrivateKey(server.getPrivateKey(), server.getPassphrase());
authSelector.add(server.getId(), authBuilder.build());
if (server.getConfiguration() != null) {
Xpp3Dom dom = (Xpp3Dom) server.getConfiguration();
for (int i = dom.getChildCount() - 1; i >= 0; i--) {
Xpp3Dom child = dom.getChild(i);
if ("wagonProvider".equals(child.getName())) {
dom.removeChild(i);
}
}
XmlPlexusConfiguration config = new XmlPlexusConfiguration(dom);
configProps.put("aether.connector.wagon.config." + server.getId(), config);
// Translate to proper resolver configuration properties as well (as Plexus XML above is Wagon specific
// only), but support only configuration/httpConfiguration/all, see
// https://maven.apache.org/guides/mini/guide-http-settings.html
Map<String, String> headers = null;
Integer connectTimeout = null;
Integer requestTimeout = null;
PlexusConfiguration httpHeaders = config.getChild("httpHeaders", false);
if (httpHeaders != null) {
PlexusConfiguration[] properties = httpHeaders.getChildren("property");
if (properties != null && properties.length > 0) {
headers = new HashMap<>();
for (PlexusConfiguration property : properties) {
headers.put(
property.getChild("name").getValue(),
property.getChild("value").getValue());
}
}
}
PlexusConfiguration connectTimeoutXml = config.getChild("connectTimeout", false);
if (connectTimeoutXml != null) {
connectTimeout = Integer.parseInt(connectTimeoutXml.getValue());
} else {
// fallback configuration name
PlexusConfiguration httpConfiguration = config.getChild("httpConfiguration", false);
if (httpConfiguration != null) {
PlexusConfiguration httpConfigurationAll = httpConfiguration.getChild("all", false);
if (httpConfigurationAll != null) {
connectTimeoutXml = httpConfigurationAll.getChild("connectionTimeout", false);
if (connectTimeoutXml != null) {
connectTimeout = Integer.parseInt(connectTimeoutXml.getValue());
log.warn("Settings for server " + server.getId() + " uses legacy format");
}
}
}
}
PlexusConfiguration requestTimeoutXml = config.getChild("requestTimeout", false);
if (requestTimeoutXml != null) {
requestTimeout = Integer.parseInt(requestTimeoutXml.getValue());
} else {
// fallback configuration name
PlexusConfiguration httpConfiguration = config.getChild("httpConfiguration", false);
if (httpConfiguration != null) {
PlexusConfiguration httpConfigurationAll = httpConfiguration.getChild("all", false);
if (httpConfigurationAll != null) {
requestTimeoutXml = httpConfigurationAll.getChild("readTimeout", false);
if (requestTimeoutXml != null) {
requestTimeout = Integer.parseInt(requestTimeoutXml.getValue());
log.warn("Settings for server " + server.getId() + " uses legacy format");
}
}
}
}
// org.eclipse.aether.ConfigurationProperties.HTTP_HEADERS => Map<String, String>
if (headers != null) {
configProps.put(ConfigurationProperties.HTTP_HEADERS + "." + server.getId(), headers);
}
// org.eclipse.aether.ConfigurationProperties.CONNECT_TIMEOUT => int
if (connectTimeout != null) {
configProps.put(ConfigurationProperties.CONNECT_TIMEOUT + "." + server.getId(), connectTimeout);
}
// org.eclipse.aether.ConfigurationProperties.REQUEST_TIMEOUT => int
if (requestTimeout != null) {
configProps.put(ConfigurationProperties.REQUEST_TIMEOUT + "." + server.getId(), requestTimeout);
}
}
configProps.put("aether.connector.perms.fileMode." + server.getId(), server.getFilePermissions());
configProps.put("aether.connector.perms.dirMode." + server.getId(), server.getDirectoryPermissions());
}
session.setAuthenticationSelector(authSelector);
Object transport = configProps.getOrDefault(MAVEN_RESOLVER_TRANSPORT_KEY, MAVEN_RESOLVER_TRANSPORT_DEFAULT);
if (MAVEN_RESOLVER_TRANSPORT_DEFAULT.equals(transport)) {
// The "default" mode (user did not set anything) from now on defaults to AUTO
} else if (MAVEN_RESOLVER_TRANSPORT_NATIVE.equals(transport)) {
// Make sure (whatever extra priority is set) that resolver native is selected
configProps.put(NATIVE_FILE_TRANSPORTER_PRIORITY_KEY, RESOLVER_MAX_PRIORITY);
configProps.put(NATIVE_HTTP_TRANSPORTER_PRIORITY_KEY, RESOLVER_MAX_PRIORITY);
} else if (MAVEN_RESOLVER_TRANSPORT_WAGON.equals(transport)) {
// Make sure (whatever extra priority is set) that wagon is selected
configProps.put(WAGON_TRANSPORTER_PRIORITY_KEY, RESOLVER_MAX_PRIORITY);
} else if (!MAVEN_RESOLVER_TRANSPORT_AUTO.equals(transport)) {
throw new IllegalArgumentException("Unknown resolver transport '" + transport
+ "'. Supported transports are: " + MAVEN_RESOLVER_TRANSPORT_WAGON + ", "
+ MAVEN_RESOLVER_TRANSPORT_NATIVE + ", " + MAVEN_RESOLVER_TRANSPORT_AUTO);
}
session.setUserProperties(getCliOptions().getSystemProperties());
session.setSystemProperties(System.getProperties());
session.setConfigProperties(configProps);
if (artifactTransferLogging) {
TransferListener transferListener;
if (mvnArgs.hasOption(BootstrapMavenOptions.NO_TRANSFER_PROGRESS)) {
transferListener = new QuietMavenTransferListener();
} else if (mvnArgs.hasOption(BootstrapMavenOptions.BATCH_MODE)) {
transferListener = new BatchModeMavenTransferListener(System.out);
} else {
transferListener = new ConsoleMavenTransferListener(System.out, true);
}
session.setTransferListener(transferListener);
}
setUpLocalRepositoryManager(session);
return session;
}
private void setUpLocalRepositoryManager(DefaultRepositorySystemSession session) throws BootstrapMavenException {
String localRepoPath = getLocalRepo();
String[] localRepoTailPaths = getLocalRepoTail();
LocalRepositoryManager head = getRepositorySystem().newLocalRepositoryManager(session,
new LocalRepository(localRepoPath));
if (localRepoTailPaths.length == 0) {
session.setLocalRepositoryManager(head);
} else {
List<LocalRepositoryManager> tail = new ArrayList<>(localRepoTailPaths.length);
for (String tailPath : localRepoTailPaths) {
tail.add(getRepositorySystem().newLocalRepositoryManager(session, new LocalRepository(tailPath)));
}
session.setLocalRepositoryManager(
new ChainedLocalRepositoryManager(head, tail, getLocalRepoTailIgnoreAvailability()));
}
}
private List<RemoteRepository> resolveRemoteRepos() throws BootstrapMavenException {
final List<RemoteRepository> rawRepos = new ArrayList<>();
readMavenReposFromEnv(rawRepos, System.getenv());
addReposFromProfiles(rawRepos);
final boolean centralConfiguredInSettings = includesDefaultRepo(rawRepos);
if (!centralConfiguredInSettings) {
rawRepos.add(newDefaultRepository());
}
if (workspace == null) {
return newResolutionRepos(rawRepos);
}
final List<RemoteRepository> rawProjectRepos = resolveRawProjectRepos(newResolutionRepos(rawRepos));
if (!rawProjectRepos.isEmpty()) {
if (!centralConfiguredInSettings) {
// if the default repo was added here, we are removing it to add it last
rawRepos.remove(rawRepos.size() - 1);
}
rawRepos.addAll(rawProjectRepos);
if (!centralConfiguredInSettings && !includesDefaultRepo(rawProjectRepos)) {
rawRepos.add(newDefaultRepository());
}
}
return newResolutionRepos(rawRepos);
}
private List<RemoteRepository> resolveRemotePluginRepos() throws BootstrapMavenException {
final List<RemoteRepository> rawRepos = new ArrayList<>();
addReposFromProfiles(rawRepos);
// central must be there
if (!includesDefaultRepo(rawRepos)) {
rawRepos.add(newDefaultRepository());
}
return newResolutionRepos(rawRepos);
}
private List<RemoteRepository> newResolutionRepos(final List<RemoteRepository> rawRepos)
throws BootstrapMavenException {
return getRepositorySystem().newResolutionRepositories(getRepositorySystemSession(), rawRepos);
}
private void addReposFromProfiles(final List<RemoteRepository> rawRepos) throws BootstrapMavenException {
// reverse the order of the profiles to match the order in which the repos appear in Maven mojos
final List<org.apache.maven.model.Profile> profiles = getActiveSettingsProfiles();
for (int i = profiles.size() - 1; i >= 0; --i) {
addProfileRepos(profiles.get(i).getRepositories(), rawRepos);
}
}
public static RemoteRepository newDefaultRepository() {
return new RemoteRepository.Builder(DEFAULT_REMOTE_REPO_ID, "default", DEFAULT_REMOTE_REPO_URL)
.setReleasePolicy(new RepositoryPolicy(true, RepositoryPolicy.UPDATE_POLICY_DAILY,
RepositoryPolicy.CHECKSUM_POLICY_WARN))
.setSnapshotPolicy(new RepositoryPolicy(false, RepositoryPolicy.UPDATE_POLICY_DAILY,
RepositoryPolicy.CHECKSUM_POLICY_WARN))
.build();
}
private Model loadCurrentProjectModel() throws BootstrapMavenException {
final Path pom = getCurrentProjectPomOrNull();
if (pom == null) {
return null;
}
try {
return ModelUtils.readModel(pom);
} catch (IOException e) {
throw new BootstrapMavenException("Failed to parse " + pom, e);
}
}
private List<RemoteRepository> resolveRawProjectRepos(List<RemoteRepository> repos)
throws BootstrapMavenException {
final Artifact projectArtifact;
if (currentProject == null) {
final Model model = loadCurrentProjectModel();
if (model == null) {
return List.of();
}
projectArtifact = new DefaultArtifact(ModelUtils.getGroupId(model), model.getArtifactId(),
ArtifactCoords.DEFAULT_CLASSIFIER, ArtifactCoords.TYPE_POM, ModelUtils.getVersion(model));
} else {
projectArtifact = new DefaultArtifact(currentProject.getGroupId(), currentProject.getArtifactId(),
ArtifactCoords.DEFAULT_CLASSIFIER, ArtifactCoords.TYPE_POM, currentProject.getVersion());
}
final RepositorySystem repoSystem = getRepositorySystem();
final RepositorySystemSession repoSession = getRepositorySystemSession();
try {
return repoSystem
.readArtifactDescriptor(repoSession, new ArtifactDescriptorRequest()
.setArtifact(projectArtifact)
.setRepositories(repos))
.getRepositories();
} catch (ArtifactDescriptorException e) {
throw new BootstrapMavenException("Failed to read artifact descriptor for " + projectArtifact, e);
}
}
public List<org.apache.maven.model.Profile> getActiveSettingsProfiles()
throws BootstrapMavenException {
if (activeSettingsProfiles != null) {
return activeSettingsProfiles;
}
final Settings settings = getEffectiveSettings();
final List<Profile> allSettingsProfiles = settings.getProfiles();
if (allSettingsProfiles.isEmpty()) {
return activeSettingsProfiles = List.of();
}
final BootstrapMavenOptions mvnArgs = getCliOptions();
final Path currentPom = getCurrentProjectPomOrNull();
final DefaultProfileActivationContext context = new DefaultProfileActivationContext()
.setActiveProfileIds(mvnArgs.getActiveProfileIds())
.setInactiveProfileIds(mvnArgs.getInactiveProfileIds())
.setSystemProperties(System.getProperties())
.setProjectDirectory(
currentPom == null ? getCurrentProjectBaseDir().toFile() : currentPom.getParent().toFile());
final DefaultProfileSelector profileSelector = new DefaultProfileSelector()
.addProfileActivator(new PropertyProfileActivator())
.addProfileActivator(new JdkVersionProfileActivator())
.addProfileActivator(new OperatingSystemProfileActivator())
.addProfileActivator(createFileProfileActivator());
List<org.apache.maven.model.Profile> selectedProfiles = profileSelector
.getActiveProfiles(toModelProfiles(allSettingsProfiles), context, new ModelProblemCollector() {
public void add(ModelProblemCollectorRequest req) {
log.error("Failed to activate a Maven profile: " + req.getMessage());
}
});
if (!settings.getActiveProfiles().isEmpty()) {
final Set<String> activeProfiles = new HashSet<>(settings.getActiveProfiles());
// remove already activated to avoid duplicates
selectedProfiles.forEach(p -> activeProfiles.remove(p.getId()));
if (!activeProfiles.isEmpty()) {
final List<org.apache.maven.model.Profile> allActiveProfiles = new ArrayList<>(
selectedProfiles.size() + activeProfiles.size());
allActiveProfiles.addAll(selectedProfiles);
for (Profile profile : allSettingsProfiles) {
if (activeProfiles.contains(profile.getId())) {
allActiveProfiles.add(SettingsUtils.convertFromSettingsProfile(profile));
}
}
selectedProfiles = allActiveProfiles;
}
}
return activeSettingsProfiles = selectedProfiles;
}
private static List<org.apache.maven.model.Profile> toModelProfiles(List<Profile> profiles) {
final List<org.apache.maven.model.Profile> result = new ArrayList<>(profiles.size());
for (Profile p : profiles) {
result.add(SettingsUtils.convertFromSettingsProfile(p));
}
return result;
}
private static boolean includesDefaultRepo(List<RemoteRepository> repositories) {
if (repositories.isEmpty()) {
return false;
}
for (ArtifactRepository repository : repositories) {
if (repository.getId().equals(DEFAULT_REMOTE_REPO_ID)) {
return true;
}
}
return false;
}
private static void addProfileRepos(List<org.apache.maven.model.Repository> repositories,
final List<RemoteRepository> all) {
for (org.apache.maven.model.Repository repo : repositories) {
final RemoteRepository.Builder repoBuilder = new RemoteRepository.Builder(repo.getId(), repo.getLayout(),
repo.getUrl());
org.apache.maven.model.RepositoryPolicy policy = repo.getReleases();
if (policy != null) {
repoBuilder.setReleasePolicy(toAetherRepoPolicy(policy));
}
policy = repo.getSnapshots();
if (policy != null) {
repoBuilder.setSnapshotPolicy(toAetherRepoPolicy(policy));
}
all.add(repoBuilder.build());
}
}
private static RepositoryPolicy toAetherRepoPolicy(org.apache.maven.model.RepositoryPolicy modelPolicy) {
return new RepositoryPolicy(modelPolicy.isEnabled(),
isEmpty(modelPolicy.getUpdatePolicy()) ? RepositoryPolicy.UPDATE_POLICY_DAILY
: modelPolicy.getUpdatePolicy(),
isEmpty(modelPolicy.getChecksumPolicy()) ? RepositoryPolicy.CHECKSUM_POLICY_WARN
: modelPolicy.getChecksumPolicy());
}
private static boolean isEmpty(final CharSequence cs) {
return cs == null || cs.length() == 0;
}
private void initRepoSystemAndManager() {
final MavenFactory factory = configureMavenFactory();
if (repoSystem == null) {
repoSystem = factory.getRepositorySystem();
}
if (remoteRepoManager == null) {
remoteRepoManager = factory.getContainer().requireBean(RemoteRepositoryManager.class);
}
if (settingsDecrypter == null) {
settingsDecrypter = factory.getContainer().requireBean(SettingsDecrypter.class);
}
}
protected MavenFactory configureMavenFactory() {
return MavenFactory.create(RepositorySystem.class.getClassLoader(), builder -> {
for (var pkg : includeSisuBeanPackages) {
builder.includePackage(pkg);
}
for (var pkg : excludeSisuBeanPackages) {
builder.excludePackage(pkg);
}
builder.addBean(ModelBuilder.class)
.setSupplier(scope -> new MavenModelBuilder(BootstrapMavenContext.this))
.setPriority(100).build();
});
}
private static String getUserAgent() {
return "Apache-Maven/" + getMavenVersion() + " (Java " + PropertyUtils.getProperty("java.version") + "; "
+ PropertyUtils.getProperty("os.name") + " " + PropertyUtils.getProperty("os.version") + ")";
}
private static String getMavenVersion() {
final String mvnVersion = PropertyUtils.getProperty("maven.version");
if (mvnVersion != null) {
return mvnVersion;
}
final Properties props = new Properties();
try (InputStream is = BootstrapMavenContext.class.getResourceAsStream(
"/META-INF/maven/org.apache.maven/maven-core/pom.properties")) {
if (is != null) {
props.load(is);
}
} catch (IOException e) {
log.debug("Failed to read Maven version", e);
}
return props.getProperty("version", "unknown-version");
}
public boolean isCurrentProjectExists() {
return currentProjectExists == null
? currentProjectExists = getCurrentProjectPomOrNull() != null
: currentProjectExists;
}
public Path getCurrentProjectPomOrNull() {
if (currentPom != null
|| currentProjectExists != null && !currentProjectExists) {
return currentPom;
}
final Path pom = resolveCurrentPom();
return currentPom = (currentProjectExists = pom != null) ? pom : null;
}
private Path resolveCurrentPom() {
Path alternatePom = null;
// explicitly set absolute path has a priority
if (alternatePomName != null) {
alternatePom = Paths.get(alternatePomName);
if (alternatePom.isAbsolute()) {
return pomXmlOrNull(alternatePom);
}
}
if (alternatePom == null) {
// check whether an alternate pom was provided as a CLI arg
final String cliPomName = getCliOptions().getOptionValue(BootstrapMavenOptions.ALTERNATE_POM_FILE);
if (cliPomName != null) {
alternatePom = Path.of(cliPomName);
}
}
final String basedirProp = PropertyUtils.getProperty(BASEDIR);
if (basedirProp != null) {
// this is the actual current project dir
return getPomForDirOrNull(Path.of(basedirProp), alternatePom);
}
// we are not in the context of a Maven build
if (alternatePom != null && alternatePom.isAbsolute()) {
return pomXmlOrNull(alternatePom);
}
// trying the current dir as the basedir
final Path basedir = Path.of("").normalize().toAbsolutePath();
if (alternatePom != null) {
return pomXmlOrNull(basedir.resolve(alternatePom));
}
final Path pom = basedir.resolve(LocalProject.POM_XML);
return Files.exists(pom) ? pom : null;
}
static Path getPomForDirOrNull(final Path basedir, Path alternatePom) {
if (alternatePom != null) {
if (alternatePom.getNameCount() == 1 || basedir.endsWith(alternatePom.getParent())) {
if (alternatePom.isAbsolute()) {
// if the basedir matches the parent of the alternate pom, it's the alternate pom
return alternatePom;
}
// if the basedir ends with the alternate POM parent relative path, we can try it as the base dir
final Path pom = basedir.resolve(alternatePom.getFileName());
if (Files.exists(pom)) {
return pom;
}
}
}
final Path pom = basedir.resolve(LocalProject.POM_XML);
if (Files.exists(pom)) {
return pom;
}
// give up
return null;
}
private static Path pomXmlOrNull(Path path) {
if (Files.isDirectory(path)) {
path = path.resolve(LocalProject.POM_XML);
}
return Files.exists(path) ? path.normalize() : null;
}
public Path getCurrentProjectBaseDir() {
if (currentProject != null) {
return currentProject.getDir();
}
final String basedirProp = PropertyUtils.getProperty(BASEDIR);
return basedirProp == null ? Path.of("").normalize().toAbsolutePath() : Paths.get(basedirProp);
}
public Path getRootProjectBaseDir() {
// originally we checked for MAVEN_PROJECTBASEDIR which is set by the mvn script
// and points to the first parent containing '.mvn' dir but it's not consistent
// with how Maven discovers the workspace and also created issues testing the Quarkus platform
// due to its specific FS layout
return rootProjectDir;
}
public boolean isPreferPomsFromWorkspace() {
return preferPomsFromWorkspace;
}
public boolean isEffectiveModelBuilder() {
if (effectiveModelBuilder == null) {
effectiveModelBuilder = Boolean.getBoolean(EFFECTIVE_MODEL_BUILDER_PROP);
}
return effectiveModelBuilder;
}
public boolean isWorkspaceModuleParentHierarchy() {
return wsModuleParentHierarchy == null ? false : wsModuleParentHierarchy;
}
static final String BOOTSTRAP_MAVEN_REPOS = "BOOTSTRAP_MAVEN_REPOS";
static final String BOOTSTRAP_MAVEN_REPO_PREFIX = "BOOTSTRAP_MAVEN_REPO_";
static final String URL_SUFFIX = "_URL";
static final String SNAPSHOT_SUFFIX = "_SNAPSHOT";
static final String RELEASE_SUFFIX = "_RELEASE";
static void readMavenReposFromEnv(List<RemoteRepository> repos, Map<String, String> env) {
final String envRepos = env.get(BOOTSTRAP_MAVEN_REPOS);
if (envRepos == null || envRepos.isBlank()) {
return;
}
final StringBuilder buf = new StringBuilder();
for (int i = 0; i < envRepos.length(); ++i) {
final char c = envRepos.charAt(i);
if (c == ',') {
initMavenRepoFromEnv(envRepos, buf.toString(), env, repos);
buf.setLength(0);
} else {
buf.append(c);
}
}
if (buf.length() > 0) {
initMavenRepoFromEnv(envRepos, buf.toString(), env, repos);
}
}
private static void initMavenRepoFromEnv(String envRepos, String repoId, Map<String, String> env,
List<RemoteRepository> repos) {
final String envRepoId = toEnvVarPart(repoId);
String repoUrl = null;
boolean snapshot = true;
boolean release = true;
for (Map.Entry<String, String> envvar : env.entrySet()) {
final String varName = envvar.getKey();
if (varName.startsWith(BOOTSTRAP_MAVEN_REPO_PREFIX)
&& varName.regionMatches(BOOTSTRAP_MAVEN_REPO_PREFIX.length(), envRepoId, 0, envRepoId.length())) {
if (isMavenRepoEnvVarOption(varName, repoId, URL_SUFFIX)) {
repoUrl = envvar.getValue();
} else if (isMavenRepoEnvVarOption(varName, repoId, SNAPSHOT_SUFFIX)) {
snapshot = Boolean.parseBoolean(envvar.getValue());
} else if (isMavenRepoEnvVarOption(varName, repoId, RELEASE_SUFFIX)) {
release = Boolean.parseBoolean(envvar.getValue());
}
}
}
if (repoUrl == null || repoUrl.isBlank()) {
log.warn("Maven repository " + repoId + " listed in " + BOOTSTRAP_MAVEN_REPOS + "=" + envRepos
+ " was ignored because the corresponding " + BOOTSTRAP_MAVEN_REPO_PREFIX + envRepoId + URL_SUFFIX
+ " is missing");
} else {
final RemoteRepository.Builder repoBuilder = new RemoteRepository.Builder(repoId, "default", repoUrl);
if (!release) {
repoBuilder.setReleasePolicy(new RepositoryPolicy(false, RepositoryPolicy.UPDATE_POLICY_DAILY,
RepositoryPolicy.CHECKSUM_POLICY_WARN));
}
if (!snapshot) {
repoBuilder.setSnapshotPolicy(new RepositoryPolicy(false, RepositoryPolicy.UPDATE_POLICY_DAILY,
RepositoryPolicy.CHECKSUM_POLICY_WARN));
}
repos.add(repoBuilder.build());
}
}
private static String toEnvVarPart(String s) {
final StringBuilder buf = new StringBuilder(s.length());
for (int i = 0; i < s.length(); ++i) {
final char c = s.charAt(i);
if (c == '.' || c == '-') {
buf.append('_');
} else {
buf.append(Character.toUpperCase(c));
}
}
return buf.toString();
}
private static boolean isMavenRepoEnvVarOption(String varName, String repoId, String option) {
return varName.length() == BOOTSTRAP_MAVEN_REPO_PREFIX.length() + repoId.length() + option.length()
&& varName.endsWith(option);
}
private static FileProfileActivator createFileProfileActivator() throws BootstrapMavenException {
var activator = new FileProfileActivator();
var translator = new DefaultPathTranslator();
try {
var pathInterpolator = new ProfileActivationFilePathInterpolator();
pathInterpolator.setPathTranslator(translator);
activator.setProfileActivationFilePathInterpolator(pathInterpolator);
} catch (NoClassDefFoundError e) {
// ProfileActivationFilePathInterpolator not found; Maven < 3.8.5 (https://github.com/apache/maven/pull/649)
try {
activator.getClass().getMethod("setPathTranslator", org.apache.maven.model.path.PathTranslator.class)
.invoke(activator, translator);
} catch (ReflectiveOperationException reflectionExc) {
throw new BootstrapMavenException(
"Failed to set up DefaultPathTranslator for Maven < 3.8.5 DefaultPathTranslator",
reflectionExc);
}
}
return activator;
}
}
| BootstrapMavenContext |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/IsNameNodeActiveServlet.java | {
"start": 974,
"end": 1216
} | class ____ extends IsActiveServlet {
@Override
protected boolean isActive() {
NameNode namenode = NameNodeHttpServer.getNameNodeFromContext(
getServletContext());
return namenode.isActiveState();
}
}
| IsNameNodeActiveServlet |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/constructor/CreatingMocksWithConstructorTest.java | {
"start": 860,
"end": 1262
} | class ____ {
private final String message;
AbstractMessage() {
this.message = "hey!";
}
AbstractMessage(String message) {
this.message = message;
}
AbstractMessage(int i) {
this.message = String.valueOf(i);
}
String getMessage() {
return message;
}
}
static | AbstractMessage |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/LoggingAuditor.java | {
"start": 4172,
"end": 4297
} | class ____ at TRACE.
* The context information is added as the HTTP referrer.
*/
@InterfaceAudience.Private
public | construction |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java | {
"start": 1165,
"end": 4995
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreatestBooleanEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator[] values;
private final DriverContext driverContext;
private Warnings warnings;
public GreatestBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values,
DriverContext driverContext) {
this.source = source;
this.values = values;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
BooleanBlock[] valuesBlocks = new BooleanBlock[values.length];
try (Releasable valuesRelease = Releasables.wrap(valuesBlocks)) {
for (int i = 0; i < valuesBlocks.length; i++) {
valuesBlocks[i] = (BooleanBlock)values[i].eval(page);
}
BooleanVector[] valuesVectors = new BooleanVector[values.length];
for (int i = 0; i < valuesBlocks.length; i++) {
valuesVectors[i] = valuesBlocks[i].asVector();
if (valuesVectors[i] == null) {
return eval(page.getPositionCount(), valuesBlocks);
}
}
return eval(page.getPositionCount(), valuesVectors).asBlock();
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
for (EvalOperator.ExpressionEvaluator e : values) {
baseRamBytesUsed += e.baseRamBytesUsed();
}
return baseRamBytesUsed;
}
public BooleanBlock eval(int positionCount, BooleanBlock[] valuesBlocks) {
try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) {
boolean[] valuesValues = new boolean[values.length];
position: for (int p = 0; p < positionCount; p++) {
for (int i = 0; i < valuesBlocks.length; i++) {
switch (valuesBlocks[i].getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
}
// unpack valuesBlocks into valuesValues
for (int i = 0; i < valuesBlocks.length; i++) {
int o = valuesBlocks[i].getFirstValueIndex(p);
valuesValues[i] = valuesBlocks[i].getBoolean(o);
}
result.appendBoolean(Greatest.process(valuesValues));
}
return result.build();
}
}
public BooleanVector eval(int positionCount, BooleanVector[] valuesVectors) {
try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) {
boolean[] valuesValues = new boolean[values.length];
position: for (int p = 0; p < positionCount; p++) {
// unpack valuesVectors into valuesValues
for (int i = 0; i < valuesVectors.length; i++) {
valuesValues[i] = valuesVectors[i].getBoolean(p);
}
result.appendBoolean(p, Greatest.process(valuesValues));
}
return result.build();
}
}
@Override
public String toString() {
return "GreatestBooleanEvaluator[" + "values=" + Arrays.toString(values) + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(() -> Releasables.close(values));
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | GreatestBooleanEvaluator |
java | mapstruct__mapstruct | integrationtest/src/test/java/org/mapstruct/itest/tests/GradleIncrementalCompilationTest.java | {
"start": 5924,
"end": 6787
} | interface
____ mapperFile = new File( sourceDirectory, "org/mapstruct/itest/gradle/lib/TestMapper.java" );
replaceInFile( mapperFile, "field", "otherField" );
BuildResult secondBuildResult = getRunner( "--info" ).build();
// 2 classes should be recompiled: TestMapper -> TestMapperImpl
assertRecompiled( secondBuildResult, 2 );
}
@ParameterizedTest
@MethodSource("gradleVersions")
public void testChangeUnrelatedFile(String gradleVersion) throws IOException {
setup( gradleVersion );
getRunner().build();
File unrelatedFile = new File( sourceDirectory, "org/mapstruct/itest/gradle/lib/UnrelatedComponent.java" );
replaceInFile( unrelatedFile, "true", "false" );
BuildResult secondBuildResult = getRunner( "--info" ).build();
// Only the UnrelatedComponent | File |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/language/JavaExpression.java | {
"start": 1354,
"end": 3180
} | class ____ extends TypedExpressionDefinition {
@XmlAttribute
@Metadata(label = "advanced", defaultValue = "true", javaType = "java.lang.Boolean")
private String preCompile;
@XmlAttribute
@Metadata(label = "advanced", defaultValue = "true", javaType = "java.lang.Boolean")
private String singleQuotes;
public JavaExpression() {
}
protected JavaExpression(JavaExpression source) {
super(source);
this.preCompile = source.preCompile;
this.singleQuotes = source.singleQuotes;
}
public JavaExpression(String expression) {
super(expression);
}
private JavaExpression(Builder builder) {
super(builder);
this.preCompile = builder.preCompile;
this.singleQuotes = builder.singleQuotes;
}
@Override
public JavaExpression copyDefinition() {
return new JavaExpression(this);
}
@Override
public String getLanguage() {
return "java";
}
public String getPreCompile() {
return preCompile;
}
/**
* Whether the expression should be pre compiled once during initialization phase. If this is turned off, then the
* expression is reloaded and compiled on each evaluation.
*/
public void setPreCompile(String preCompile) {
this.preCompile = preCompile;
}
public String getSingleQuotes() {
return singleQuotes;
}
/**
* Whether single quotes can be used as replacement for double quotes. This is convenient when you need to work with
* strings inside strings.
*/
public void setSingleQuotes(String singleQuotes) {
this.singleQuotes = singleQuotes;
}
/**
* {@code Builder} is a specific builder for {@link JavaExpression}.
*/
@XmlTransient
public static | JavaExpression |
java | apache__camel | core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/DefaultConfigurerResolver.java | {
"start": 1309,
"end": 1634
} | class ____ implements ConfigurerResolver {
/**
* This is a special container for the CamelContext because, with Camel 4, we split the CamelContext and the former
* ExtendedCamelContext. This holds them in a single configuration, directing the target appropriately
*/
public static | DefaultConfigurerResolver |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/test/java/io/quarkus/redis/datasource/TransactionalTimeSeriesCommandsTest.java | {
"start": 995,
"end": 4895
} | class ____ extends DatasourceTestBase {
private RedisDataSource blocking;
private ReactiveRedisDataSource reactive;
@BeforeEach
void initialize() {
blocking = new BlockingRedisDataSourceImpl(vertx, redis, api, Duration.ofSeconds(60));
reactive = new ReactiveRedisDataSourceImpl(vertx, redis, api);
}
@AfterEach
public void clear() {
blocking.flushall();
}
@Test
public void timeSeriesBlocking() {
TransactionResult result = blocking.withTransaction(tx -> {
var ts = tx.timeseries();
assertThat(ts.getDataSource()).isEqualTo(tx);
ts.tsCreate("ts1");
ts.tsAdd("ts2", 10, 1, new AddArgs().label("foo", "bar").compressed().chunkSize(1024));
ts.tsCreate("ts3", new CreateArgs().forever().label("foo", "baz"));
ts.tsAdd("ts1", 2);
ts.tsAdd("ts1", 20, 20);
ts.tsMAdd(
SeriesSample.from("ts1", 30, 3),
SeriesSample.from("ts2", 30, 3),
SeriesSample.from("ts3", 30, 5));
ts.tsGet("ts3"); // 5
ts.tsMGet(Filter.withLabel("foo", "bar")); // ts2
ts.tsMGet(new MGetArgs().withLabels(), Filter.withLabel("foo", "baz")); // ts3
ts.tsRange("ts3", TimeSeriesRange.fromTimeSeries());
ts.tsMRange(TimeSeriesRange.fromTimeSeries(), Filter.withLabelHavingValueFrom("foo", "bar", "baz"));
});
assertThat(result.size()).isEqualTo(11);
assertThat(result.discarded()).isFalse();
assertThat((Sample) result.get(6)).isEqualTo(new Sample(30, 5.0));
Map<String, SampleGroup> map = result.get(7);
assertThat(map).hasSize(1);
map = result.get(8);
assertThat(map).hasSize(1);
assertThat((List<Sample>) result.get(9)).containsExactly(new Sample(30, 5.0));
map = result.get(10);
assertThat(map).hasSize(2);
}
@Test
public void timeseriesReactive() {
TransactionResult result = reactive.withTransaction(tx -> {
var ts = tx.timeseries();
assertThat(ts.getDataSource()).isEqualTo(tx);
var u1 = ts.tsCreate("ts1");
var u2 = ts.tsAdd("ts2", 10, 1, new AddArgs().label("foo", "bar").compressed().chunkSize(1024));
var u3 = ts.tsCreate("ts3", new CreateArgs().forever().label("foo", "baz"));
var u4 = ts.tsAdd("ts1", 2);
var u5 = ts.tsAdd("ts1", 20, 20);
var u6 = ts.tsMAdd(
SeriesSample.from("ts1", 30, 3),
SeriesSample.from("ts2", 30, 3),
SeriesSample.from("ts3", 30, 5));
var u7 = ts.tsGet("ts3"); // 5
var u8 = ts.tsMGet(Filter.withLabel("foo", "bar")); // ts2
var u9 = ts.tsMGet(new MGetArgs().withLabels(), Filter.withLabel("foo", "baz")); // ts3
var u10 = ts.tsRange("ts3", TimeSeriesRange.fromTimeSeries());
var u11 = ts.tsMRange(TimeSeriesRange.fromTimeSeries(), Filter.withLabelHavingValueFrom("foo", "bar", "baz"));
return u1.chain(() -> u2).chain(() -> u3).chain(() -> u4).chain(() -> u5)
.chain(() -> u6).chain(() -> u7).chain(() -> u8)
.chain(() -> u9).chain(() -> u10).chain(() -> u11);
}).await().indefinitely();
assertThat(result.size()).isEqualTo(11);
assertThat(result.discarded()).isFalse();
assertThat((Sample) result.get(6)).isEqualTo(new Sample(30, 5.0));
Map<String, SampleGroup> map = result.get(7);
assertThat(map).hasSize(1);
map = result.get(8);
assertThat(map).hasSize(1);
assertThat((List<Sample>) result.get(9)).containsExactly(new Sample(30, 5.0));
map = result.get(10);
assertThat(map).hasSize(2);
}
}
| TransactionalTimeSeriesCommandsTest |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/view/AbstractTemplateViewResolver.java | {
"start": 1104,
"end": 3962
} | class ____ extends UrlBasedViewResolver {
private boolean exposeRequestAttributes = false;
private boolean allowRequestOverride = false;
private boolean exposeSessionAttributes = false;
private boolean allowSessionOverride = false;
private boolean exposeSpringMacroHelpers = true;
@Override
protected Class<?> requiredViewClass() {
return AbstractTemplateView.class;
}
/**
* Set whether all request attributes should be added to the
* model prior to merging with the template. Default is "false".
* @see AbstractTemplateView#setExposeRequestAttributes
*/
public void setExposeRequestAttributes(boolean exposeRequestAttributes) {
this.exposeRequestAttributes = exposeRequestAttributes;
}
/**
* Set whether HttpServletRequest attributes are allowed to override (hide)
* controller generated model attributes of the same name. Default is "false",
* which causes an exception to be thrown if request attributes of the same
* name as model attributes are found.
* @see AbstractTemplateView#setAllowRequestOverride
*/
public void setAllowRequestOverride(boolean allowRequestOverride) {
this.allowRequestOverride = allowRequestOverride;
}
/**
* Set whether all HttpSession attributes should be added to the
* model prior to merging with the template. Default is "false".
* @see AbstractTemplateView#setExposeSessionAttributes
*/
public void setExposeSessionAttributes(boolean exposeSessionAttributes) {
this.exposeSessionAttributes = exposeSessionAttributes;
}
/**
* Set whether HttpSession attributes are allowed to override (hide)
* controller generated model attributes of the same name. Default is "false",
* which causes an exception to be thrown if session attributes of the same
* name as model attributes are found.
* @see AbstractTemplateView#setAllowSessionOverride
*/
public void setAllowSessionOverride(boolean allowSessionOverride) {
this.allowSessionOverride = allowSessionOverride;
}
/**
* Set whether to expose a RequestContext for use by Spring's macro library,
* under the name "springMacroRequestContext". Default is "true".
* @see AbstractTemplateView#setExposeSpringMacroHelpers
*/
public void setExposeSpringMacroHelpers(boolean exposeSpringMacroHelpers) {
this.exposeSpringMacroHelpers = exposeSpringMacroHelpers;
}
@Override
protected AbstractUrlBasedView buildView(String viewName) throws Exception {
AbstractTemplateView view = (AbstractTemplateView) super.buildView(viewName);
view.setExposeRequestAttributes(this.exposeRequestAttributes);
view.setAllowRequestOverride(this.allowRequestOverride);
view.setExposeSessionAttributes(this.exposeSessionAttributes);
view.setAllowSessionOverride(this.allowSessionOverride);
view.setExposeSpringMacroHelpers(this.exposeSpringMacroHelpers);
return view;
}
}
| AbstractTemplateViewResolver |
java | quarkusio__quarkus | independent-projects/qute/core/src/test/java/io/quarkus/qute/QuteTest.java | {
"start": 4192,
"end": 4492
} | class ____ {
public final String name;
static final AtomicInteger FOO_COUNTER = new AtomicInteger();
public Animal(String name) {
this.name = name;
}
public int getFoo() {
return FOO_COUNTER.incrementAndGet();
}
}
}
| Animal |
java | apache__camel | core/camel-main/src/main/java/org/apache/camel/main/BaseMainSupport.java | {
"start": 5898,
"end": 5998
} | class ____ main implementations to allow bootstrapping Camel in standalone mode.
*/
public abstract | for |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoValueNotEclipseTest.java | {
"start": 1581,
"end": 1646
} | interface ____<T> {
Optional<T> optional();
| AbstractOptional |
java | resilience4j__resilience4j | resilience4j-spring-boot2/src/main/java/io/github/resilience4j/timelimiter/autoconfigure/TimeLimiterProperties.java | {
"start": 892,
"end": 967
} | class ____ extends TimeLimiterConfigurationProperties {
}
| TimeLimiterProperties |
java | apache__logging-log4j2 | log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/util/BenchmarkMessageParams.java | {
"start": 922,
"end": 1622
} | class ____ {
static final char[] CHARS = new char[16];
static {
Arrays.fill(CHARS, 'a');
}
public static final String TEST = new String(CHARS);
public static volatile String one = "1";
public static volatile String two = "2";
public static volatile String three = "3";
public static volatile String four = "4";
public static volatile String five = "5";
public static volatile String six = "6";
public static volatile String seven = "7";
public static volatile String eight = "8";
public static volatile String nine = "9";
public static volatile String ten = "10";
public static volatile String eleven = "11";
}
| BenchmarkMessageParams |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/subjects/ReplaySubject.java | {
"start": 24522,
"end": 25559
} | interface ____<T> {
void add(T value);
void addFinal(Object notificationLite);
void replay(ReplayDisposable<T> rs);
int size();
@Nullable
T getValue();
T[] getValues(T[] array);
/**
* Returns the terminal NotificationLite object or null if not yet terminated.
* @return the terminal NotificationLite object or null if not yet terminated
*/
Object get();
/**
* Atomically compares and sets the next terminal NotificationLite object if the
* current equals to the expected NotificationLite object.
* @param expected the expected NotificationLite object
* @param next the next NotificationLite object
* @return true if successful
*/
boolean compareAndSet(Object expected, Object next);
/**
* Make sure an old inaccessible head value is released
* in a bounded buffer.
*/
void trimHead();
}
static final | ReplayBuffer |
java | apache__camel | components/camel-debezium/camel-debezium-mysql/src/main/java/org/apache/camel/component/debezium/mysql/DebeziumMySqlEndpoint.java | {
"start": 1592,
"end": 2899
} | class ____ extends DebeziumEndpoint<MySqlConnectorEmbeddedDebeziumConfiguration>
implements EndpointServiceLocation {
@UriParam
private MySqlConnectorEmbeddedDebeziumConfiguration configuration;
public DebeziumMySqlEndpoint(final String uri, final DebeziumMySqlComponent component,
final MySqlConnectorEmbeddedDebeziumConfiguration configuration) {
super(uri, component);
this.configuration = configuration;
}
public DebeziumMySqlEndpoint() {
}
@Override
public String getServiceUrl() {
return configuration.getDatabaseHostname() + ":" + configuration.getDatabasePort();
}
@Override
public String getServiceProtocol() {
return "jdbc";
}
@Override
public Map<String, String> getServiceMetadata() {
if (configuration.getDatabaseUser() != null) {
return Map.of("username", configuration.getDatabaseUser());
}
return null;
}
@Override
public MySqlConnectorEmbeddedDebeziumConfiguration getConfiguration() {
return configuration;
}
@Override
public void setConfiguration(final MySqlConnectorEmbeddedDebeziumConfiguration configuration) {
this.configuration = configuration;
}
}
| DebeziumMySqlEndpoint |
java | google__dagger | javatests/dagger/hilt/processor/internal/bindvalue/BindValueErrorsTest.java | {
"start": 15892,
"end": 16005
} | interface ____{}",
"",
" @jakarta.inject.Qualifier",
" @ | MyQualifier |
java | netty__netty | transport-classes-epoll/src/main/java/io/netty/channel/epoll/TcpMd5Util.java | {
"start": 1065,
"end": 2717
} | class ____ {
static Collection<InetAddress> newTcpMd5Sigs(AbstractEpollChannel channel, Collection<InetAddress> current,
Map<InetAddress, byte[]> newKeys) throws IOException {
checkNotNull(channel, "channel");
checkNotNull(current, "current");
checkNotNull(newKeys, "newKeys");
// Validate incoming values
for (Entry<InetAddress, byte[]> e : newKeys.entrySet()) {
final byte[] key = e.getValue();
checkNotNullWithIAE(e.getKey(), "e.getKey");
checkNonEmpty(key, e.getKey().toString());
if (key.length > Native.TCP_MD5SIG_MAXKEYLEN) {
throw new IllegalArgumentException("newKeys[" + e.getKey() +
"] has a key with invalid length; should not exceed the maximum length (" +
Native.TCP_MD5SIG_MAXKEYLEN + ')');
}
}
// Remove mappings not present in the new set.
for (InetAddress addr : current) {
if (!newKeys.containsKey(addr)) {
channel.socket.setTcpMd5Sig(addr, null);
}
}
if (newKeys.isEmpty()) {
return Collections.emptySet();
}
// Set new mappings and store addresses which we set.
final Collection<InetAddress> addresses = new ArrayList<InetAddress>(newKeys.size());
for (Entry<InetAddress, byte[]> e : newKeys.entrySet()) {
channel.socket.setTcpMd5Sig(e.getKey(), e.getValue());
addresses.add(e.getKey());
}
return addresses;
}
private TcpMd5Util() {
}
}
| TcpMd5Util |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/util/HtmlCharacterEntityReferencesTests.java | {
"start": 1065,
"end": 4555
} | class ____ {
private static final String DTD_FILE = "HtmlCharacterEntityReferences.dtd";
@Test
void testSupportsAllCharacterEntityReferencesDefinedByHtml() {
HtmlCharacterEntityReferences references = new HtmlCharacterEntityReferences();
Map<Integer, String> charactersMap = getReferenceCharacterMap();
for (int character = 0; character < 10000; character++) {
String referenceName = charactersMap.get(character);
if (referenceName != null) {
String fullReference = HtmlCharacterEntityReferences.REFERENCE_START + referenceName + HtmlCharacterEntityReferences.REFERENCE_END;
assertThat(references.isMappedToReference((char) character))
.as("The unicode character " + character + " should be mapped to a reference")
.isTrue();
assertThat(references.convertToReference((char) character))
.as("The reference of unicode character " + character + " should be entity " + referenceName)
.isEqualTo(fullReference);
assertThat(references.convertToCharacter(referenceName))
.as("The entity reference [" + referenceName + "] should be mapped to unicode character " + character)
.isEqualTo((char) character);
}
else if (character == 39) {
assertThat(references.isMappedToReference((char) character)).isTrue();
assertThat(references.convertToReference((char) character)).isEqualTo("'");
}
else {
assertThat(references.isMappedToReference((char) character))
.as("The unicode character " + character + " should not be mapped to a reference")
.isFalse();
assertThat(references.convertToReference((char) character))
.as("No entity reference of unicode character " + character + " should exist")
.isNull();
}
}
assertThat(references.getSupportedReferenceCount())
.as("The registered entity count of entityReferences should match the number of entity references")
.isEqualTo(charactersMap.size() + 1);
assertThat(references.getSupportedReferenceCount()).as(
"The HTML 4.0 Standard defines 252+1 entity references so do entityReferences")
.isEqualTo(252 + 1);
assertThat((int) references.convertToCharacter("invalid"))
.as("Invalid entity reference names should not be convertible")
.isEqualTo((char) -1);
}
// SPR-9293
@Test
void testConvertToReferenceUTF8() {
HtmlCharacterEntityReferences entityReferences = new HtmlCharacterEntityReferences();
String utf8 = "UTF-8";
assertThat(entityReferences.convertToReference('<', utf8)).isEqualTo("<");
assertThat(entityReferences.convertToReference('>', utf8)).isEqualTo(">");
assertThat(entityReferences.convertToReference('&', utf8)).isEqualTo("&");
assertThat(entityReferences.convertToReference('"', utf8)).isEqualTo(""");
assertThat(entityReferences.convertToReference('\'', utf8)).isEqualTo("'");
assertThat(entityReferences.convertToReference((char) 233, utf8)).isNull();
assertThat(entityReferences.convertToReference((char) 934, utf8)).isNull();
}
private Map<Integer, String> getReferenceCharacterMap() {
CharacterEntityResourceIterator entityIterator = new CharacterEntityResourceIterator();
Map<Integer, String> referencedCharactersMap = new HashMap<>();
while (entityIterator.hasNext()) {
int character = entityIterator.getReferredCharacter();
String entityName = entityIterator.nextEntry();
referencedCharactersMap.put(character, entityName);
}
return referencedCharactersMap;
}
private static | HtmlCharacterEntityReferencesTests |
java | google__gson | metrics/src/main/java/com/google/gson/metrics/BagOfPrimitivesDeserializationBenchmark.java | {
"start": 969,
"end": 4099
} | class ____ {
private Gson gson;
private String json;
public static void main(String[] args) {
NonUploadingCaliperRunner.run(BagOfPrimitivesDeserializationBenchmark.class, args);
}
@BeforeExperiment
void setUp() throws Exception {
this.gson = new Gson();
BagOfPrimitives bag = new BagOfPrimitives(10L, 1, false, "foo");
this.json = gson.toJson(bag);
}
/** Benchmark to measure Gson performance for deserializing an object */
public void timeBagOfPrimitivesDefault(int reps) {
for (int i = 0; i < reps; ++i) {
BagOfPrimitives unused = gson.fromJson(json, BagOfPrimitives.class);
}
}
/** Benchmark to measure deserializing objects by hand */
public void timeBagOfPrimitivesStreaming(int reps) throws IOException {
for (int i = 0; i < reps; ++i) {
StringReader reader = new StringReader(json);
JsonReader jr = new JsonReader(reader);
jr.beginObject();
long longValue = 0;
int intValue = 0;
boolean booleanValue = false;
String stringValue = null;
while (jr.hasNext()) {
String name = jr.nextName();
switch (name) {
case "longValue":
longValue = jr.nextLong();
break;
case "intValue":
intValue = jr.nextInt();
break;
case "booleanValue":
booleanValue = jr.nextBoolean();
break;
case "stringValue":
stringValue = jr.nextString();
break;
default:
throw new IOException("Unexpected name: " + name);
}
}
jr.endObject();
new BagOfPrimitives(longValue, intValue, booleanValue, stringValue);
}
}
/**
* This benchmark measures the ideal Gson performance: the cost of parsing a JSON stream and
* setting object values by reflection. We should strive to reduce the discrepancy between this
* and {@link #timeBagOfPrimitivesDefault(int)} .
*/
public void timeBagOfPrimitivesReflectionStreaming(int reps) throws Exception {
for (int i = 0; i < reps; ++i) {
StringReader reader = new StringReader(json);
JsonReader jr = new JsonReader(reader);
jr.beginObject();
BagOfPrimitives bag = new BagOfPrimitives();
while (jr.hasNext()) {
String name = jr.nextName();
for (Field field : BagOfPrimitives.class.getDeclaredFields()) {
if (field.getName().equals(name)) {
Class<?> fieldType = field.getType();
if (fieldType.equals(long.class)) {
field.setLong(bag, jr.nextLong());
} else if (fieldType.equals(int.class)) {
field.setInt(bag, jr.nextInt());
} else if (fieldType.equals(boolean.class)) {
field.setBoolean(bag, jr.nextBoolean());
} else if (fieldType.equals(String.class)) {
field.set(bag, jr.nextString());
} else {
throw new RuntimeException("Unexpected: type: " + fieldType + ", name: " + name);
}
}
}
}
jr.endObject();
}
}
}
| BagOfPrimitivesDeserializationBenchmark |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/KeyStoreProvider.java | {
"start": 1621,
"end": 2785
} | class ____ extends AbstractJavaKeyStoreProvider {
private FileSystem fs;
private FsPermission permissions;
protected KeyStoreProvider(URI uri, Configuration conf)
throws IOException {
super(uri, conf);
}
@Override
protected OutputStream getOutputStreamForKeystore() throws IOException {
FSDataOutputStream out = FileSystem.create(fs, getPath(), permissions);
return out;
}
@Override
protected boolean keystoreExists() throws IOException {
return fs.exists(getPath());
}
@Override
protected InputStream getInputStreamForFile() throws IOException {
return fs.open(getPath());
}
@Override
protected void createPermissions(String perms) {
permissions = new FsPermission(perms);
}
@Override
protected void stashOriginalFilePermissions() throws IOException {
// save off permissions in case we need to
// rewrite the keystore in flush()
FileStatus s = fs.getFileStatus(getPath());
permissions = s.getPermission();
}
protected void initFileSystem(URI uri)
throws IOException {
super.initFileSystem(uri);
fs = getPath().getFileSystem(getConf());
}
}
| KeyStoreProvider |
java | google__guice | extensions/throwingproviders/test/com/google/inject/throwingproviders/CheckedProviderTest.java | {
"start": 25202,
"end": 25346
} | interface ____<T> extends CheckedProvider<T> {
T get(T defaultValue) throws RemoteException, BindException;
}
| RemoteProviderWithExtraMethod |
java | elastic__elasticsearch | libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/env/EnvironmentBridge.java | {
"start": 1368,
"end": 1690
} | class ____ extends StableBridgeAPI.ProxyInternal<Environment> implements EnvironmentBridge {
private ProxyInternal(final Environment delegate) {
super(delegate);
}
@Override
public Environment toInternal() {
return this.internalDelegate;
}
}
}
| ProxyInternal |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/writing/ProductionBindingRepresentation.java | {
"start": 1589,
"end": 4971
} | class ____ implements BindingRepresentation {
private final ContributionBinding binding;
private final DerivedFromFrameworkInstanceRequestRepresentation.Factory
derivedFromFrameworkInstanceRequestRepresentationFactory;
private final RequestRepresentation producerNodeInstanceRequestRepresentation;
private final Map<BindingRequest, RequestRepresentation> requestRepresentations = new HashMap<>();
@AssistedInject
ProductionBindingRepresentation(
@Assisted ContributionBinding binding,
CompilerOptions compilerOptions,
ComponentImplementation componentImplementation,
DerivedFromFrameworkInstanceRequestRepresentation.Factory
derivedFromFrameworkInstanceRequestRepresentationFactory,
ProducerNodeInstanceRequestRepresentation.Factory
producerNodeInstanceRequestRepresentationFactory,
UnscopedFrameworkInstanceCreationExpressionFactory
unscopedFrameworkInstanceCreationExpressionFactory,
BindingRepresentations bindingRepresentations) {
this.binding = binding;
this.derivedFromFrameworkInstanceRequestRepresentationFactory =
derivedFromFrameworkInstanceRequestRepresentationFactory;
Optional<MemberSelect> staticMethod = staticFactoryCreation();
FrameworkInstanceSupplier frameworkInstanceSupplier =
staticMethod.isPresent()
? staticMethod::get
: new FrameworkFieldInitializer(
compilerOptions,
componentImplementation,
binding,
binding.scope().isPresent()
? bindingRepresentations.scope(
binding, unscopedFrameworkInstanceCreationExpressionFactory.create(binding))
: unscopedFrameworkInstanceCreationExpressionFactory.create(binding));
this.producerNodeInstanceRequestRepresentation =
producerNodeInstanceRequestRepresentationFactory.create(binding, frameworkInstanceSupplier);
}
@Override
public RequestRepresentation getRequestRepresentation(BindingRequest request) {
return reentrantComputeIfAbsent(
requestRepresentations, request, this::getRequestRepresentationUncached);
}
private RequestRepresentation getRequestRepresentationUncached(BindingRequest request) {
return request.requestKind().equals(RequestKind.PRODUCER)
? producerNodeInstanceRequestRepresentation
: derivedFromFrameworkInstanceRequestRepresentationFactory.create(
binding,
producerNodeInstanceRequestRepresentation,
request.requestKind(),
FrameworkType.PRODUCER_NODE);
}
/**
* If {@code resolvedBindings} is an unscoped provision binding with no factory arguments, then we
* don't need a field to hold its factory. In that case, this method returns the static member
* select that returns the factory.
*/
private Optional<MemberSelect> staticFactoryCreation() {
if (binding.dependencies().isEmpty()) {
if (binding.kind().equals(MULTIBOUND_MAP)) {
return Optional.of(StaticMemberSelects.emptyMapFactory((MultiboundMapBinding) binding));
}
if (binding.kind().equals(MULTIBOUND_SET)) {
return Optional.of(StaticMemberSelects.emptySetFactory((MultiboundSetBinding) binding));
}
}
return Optional.empty();
}
@AssistedFactory
static | ProductionBindingRepresentation |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/RoutingSlipDefinition.java | {
"start": 1491,
"end": 8919
} | class ____<Type extends ProcessorDefinition<Type>> extends ExpressionNode {
public static final String DEFAULT_DELIMITER = ",";
@XmlAttribute
@Metadata(defaultValue = ",")
private String uriDelimiter;
@XmlAttribute
@Metadata(label = "advanced", javaType = "java.lang.Boolean")
private String ignoreInvalidEndpoints;
@XmlAttribute
@Metadata(label = "advanced", javaType = "java.lang.Integer")
private String cacheSize;
public RoutingSlipDefinition() {
if (uriDelimiter == null) {
setUriDelimiter(DEFAULT_DELIMITER);
} else {
setUriDelimiter(uriDelimiter);
}
}
private RoutingSlipDefinition(RoutingSlipDefinition source) {
super(source);
this.uriDelimiter = source.uriDelimiter;
this.ignoreInvalidEndpoints = source.ignoreInvalidEndpoints;
this.cacheSize = source.cacheSize;
}
public RoutingSlipDefinition(String headerName) {
this(headerName, DEFAULT_DELIMITER);
}
public RoutingSlipDefinition(String headerName, String uriDelimiter) {
super(new HeaderExpression(headerName));
setUriDelimiter(uriDelimiter);
}
public RoutingSlipDefinition(Expression expression, String uriDelimiter) {
super(expression);
setUriDelimiter(uriDelimiter);
}
public RoutingSlipDefinition(Expression expression) {
this(expression, DEFAULT_DELIMITER);
}
@Override
public RoutingSlipDefinition copyDefinition() {
return new RoutingSlipDefinition<>(this);
}
@Override
public String toString() {
return "RoutingSlip[" + getExpression() + "]";
}
@Override
public String getShortName() {
return "routingSlip";
}
@Override
public String getLabel() {
return "routingSlip[" + getExpression() + "]";
}
@Override
public List<ProcessorDefinition<?>> getOutputs() {
return Collections.emptyList();
}
/**
* Expression to define the routing slip, which defines which endpoints to route the message in a pipeline style.
* Notice the expression is evaluated once, if you want a more dynamic style, then the dynamic router eip is a
* better choice.
*/
@Override
public void setExpression(ExpressionDefinition expression) {
// override to include javadoc what the expression is used for
super.setExpression(expression);
}
public void setUriDelimiter(String uriDelimiter) {
this.uriDelimiter = uriDelimiter;
}
public String getUriDelimiter() {
return uriDelimiter;
}
public void setIgnoreInvalidEndpoints(String ignoreInvalidEndpoints) {
this.ignoreInvalidEndpoints = ignoreInvalidEndpoints;
}
public String getIgnoreInvalidEndpoints() {
return ignoreInvalidEndpoints;
}
public String getCacheSize() {
return cacheSize;
}
public void setCacheSize(String cacheSize) {
this.cacheSize = cacheSize;
}
// Fluent API
// -------------------------------------------------------------------------
@Override
@SuppressWarnings("unchecked")
public Type end() {
// allow end() to return to previous type so you can continue in the DSL
return (Type) super.end();
}
/**
* Ignore the invalidate endpoint exception when try to create a producer with that endpoint
*
* @return the builder
*/
public RoutingSlipDefinition<Type> ignoreInvalidEndpoints() {
return ignoreInvalidEndpoints(true);
}
/**
* Ignore the invalidate endpoint exception when try to create a producer with that endpoint
*
* @return the builder
*/
public RoutingSlipDefinition<Type> ignoreInvalidEndpoints(boolean ignoreInvalidEndpoints) {
return ignoreInvalidEndpoints(Boolean.toString(ignoreInvalidEndpoints));
}
/**
* Ignore the invalidate endpoint exception when try to create a producer with that endpoint
*
* @return the builder
*/
public RoutingSlipDefinition<Type> ignoreInvalidEndpoints(String ignoreInvalidEndpoints) {
setIgnoreInvalidEndpoints(ignoreInvalidEndpoints);
return this;
}
/**
* Sets the uri delimiter to use
*
* @param uriDelimiter the delimiter
* @return the builder
*/
public RoutingSlipDefinition<Type> uriDelimiter(String uriDelimiter) {
setUriDelimiter(uriDelimiter);
return this;
}
/**
* Sets the maximum size used by the {@link org.apache.camel.spi.ProducerCache} which is used to cache and reuse
* producers when using this routing slip, when uris are reused.
*
* Beware that when using dynamic endpoints then it affects how well the cache can be utilized. If each dynamic
* endpoint is unique then its best to turn off caching by setting this to -1, which allows Camel to not cache both
* the producers and endpoints; they are regarded as prototype scoped and will be stopped and discarded after use.
* This reduces memory usage as otherwise producers/endpoints are stored in memory in the caches.
*
* However if there are a high degree of dynamic endpoints that have been used before, then it can benefit to use
* the cache to reuse both producers and endpoints and therefore the cache size can be set accordingly or rely on
* the default size (1000).
*
* If there is a mix of unique and used before dynamic endpoints, then setting a reasonable cache size can help
* reduce memory usage to avoid storing too many non frequent used producers.
*
* @param cacheSize the cache size, use <tt>0</tt> for default cache size, or <tt>-1</tt> to turn cache off.
* @return the builder
*/
public RoutingSlipDefinition<Type> cacheSize(int cacheSize) {
return cacheSize(Integer.toString(cacheSize));
}
/**
* Sets the maximum size used by the {@link org.apache.camel.spi.ProducerCache} which is used to cache and reuse
* producers when using this routing slip, when uris are reused.
*
* Beware that when using dynamic endpoints then it affects how well the cache can be utilized. If each dynamic
* endpoint is unique then its best to turn off caching by setting this to -1, which allows Camel to not cache both
* the producers and endpoints; they are regarded as prototype scoped and will be stopped and discarded after use.
* This reduces memory usage as otherwise producers/endpoints are stored in memory in the caches.
*
* However if there are a high degree of dynamic endpoints that have been used before, then it can benefit to use
* the cache to reuse both producers and endpoints and therefore the cache size can be set accordingly or rely on
* the default size (1000).
*
* If there is a mix of unique and used before dynamic endpoints, then setting a reasonable cache size can help
* reduce memory usage to avoid storing too many non frequent used producers.
*
* @param cacheSize the cache size, use <tt>0</tt> for default cache size, or <tt>-1</tt> to turn cache off.
* @return the builder
*/
public RoutingSlipDefinition<Type> cacheSize(String cacheSize) {
setCacheSize(cacheSize);
return this;
}
}
| RoutingSlipDefinition |
java | google__dagger | javatests/dagger/internal/codegen/InjectConstructorFactoryGeneratorTest.java | {
"start": 15443,
"end": 15733
} | interface ____<T> {}");
Source barModule =
CompilerTests.javaSource(
"test.BarModule",
"package test;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Module",
"public | Bar |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/jackson2/UnmodifiableSetMixin.java | {
"start": 1523,
"end": 1660
} | class ____ {
/**
* Mixin Constructor
* @param s the Set
*/
@JsonCreator
UnmodifiableSetMixin(Set<?> s) {
}
}
| UnmodifiableSetMixin |
java | spring-projects__spring-framework | spring-oxm/src/main/java/org/springframework/oxm/support/MarshallingSource.java | {
"start": 3345,
"end": 6542
} | class ____ implements XMLReader {
private final Marshaller marshaller;
private final Object content;
private @Nullable DTDHandler dtdHandler;
private @Nullable ContentHandler contentHandler;
private @Nullable EntityResolver entityResolver;
private @Nullable ErrorHandler errorHandler;
private @Nullable LexicalHandler lexicalHandler;
private MarshallingXMLReader(Marshaller marshaller, Object content) {
Assert.notNull(marshaller, "'marshaller' must not be null");
Assert.notNull(content, "'content' must not be null");
this.marshaller = marshaller;
this.content = content;
}
@Override
public void setContentHandler(@Nullable ContentHandler contentHandler) {
this.contentHandler = contentHandler;
}
@Override
public @Nullable ContentHandler getContentHandler() {
return this.contentHandler;
}
@Override
public void setDTDHandler(@Nullable DTDHandler dtdHandler) {
this.dtdHandler = dtdHandler;
}
@Override
public @Nullable DTDHandler getDTDHandler() {
return this.dtdHandler;
}
@Override
public void setEntityResolver(@Nullable EntityResolver entityResolver) {
this.entityResolver = entityResolver;
}
@Override
public @Nullable EntityResolver getEntityResolver() {
return this.entityResolver;
}
@Override
public void setErrorHandler(@Nullable ErrorHandler errorHandler) {
this.errorHandler = errorHandler;
}
@Override
public @Nullable ErrorHandler getErrorHandler() {
return this.errorHandler;
}
protected @Nullable LexicalHandler getLexicalHandler() {
return this.lexicalHandler;
}
@Override
public boolean getFeature(String name) throws SAXNotRecognizedException {
throw new SAXNotRecognizedException(name);
}
@Override
public void setFeature(String name, boolean value) throws SAXNotRecognizedException {
throw new SAXNotRecognizedException(name);
}
@Override
public @Nullable Object getProperty(String name) throws SAXNotRecognizedException {
if ("http://xml.org/sax/properties/lexical-handler".equals(name)) {
return this.lexicalHandler;
}
else {
throw new SAXNotRecognizedException(name);
}
}
@Override
public void setProperty(String name, Object value) throws SAXNotRecognizedException {
if ("http://xml.org/sax/properties/lexical-handler".equals(name)) {
this.lexicalHandler = (LexicalHandler) value;
}
else {
throw new SAXNotRecognizedException(name);
}
}
@Override
public void parse(InputSource input) throws SAXException {
parse();
}
@Override
public void parse(String systemId) throws SAXException {
parse();
}
private void parse() throws SAXException {
SAXResult result = new SAXResult(getContentHandler());
result.setLexicalHandler(getLexicalHandler());
try {
this.marshaller.marshal(this.content, result);
}
catch (IOException ex) {
SAXParseException saxException = new SAXParseException(ex.getMessage(), null, null, -1, -1, ex);
ErrorHandler errorHandler = getErrorHandler();
if (errorHandler != null) {
errorHandler.fatalError(saxException);
}
else {
throw saxException;
}
}
}
}
}
| MarshallingXMLReader |
java | spring-projects__spring-boot | module/spring-boot-webflux/src/test/java/org/springframework/boot/webflux/autoconfigure/actuate/web/ControllerEndpointWebFluxIntegrationTests.java | {
"start": 3338,
"end": 3437
} | class ____ {
@GetMapping("/")
String example() {
return "Example";
}
}
}
| ExampleController |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/odps/OdpsSelectTest3.java | {
"start": 737,
"end": 1092
} | class ____ extends TestCase {
public void test_distribute_by() throws Exception {
String sql = "select RANK() OVER (PARTITION BY ui ORDER BY duration DESC) rank from dual";
assertEquals("SELECT RANK() OVER (PARTITION BY ui ORDER BY duration DESC) AS rank"
+ "\nFROM dual", SQLUtils.formatOdps(sql));
}
}
| OdpsSelectTest3 |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/errors/ErrorReporter.java | {
"start": 1044,
"end": 1422
} | interface ____<T> extends AutoCloseable {
/**
* Report an error and return the producer future.
*
* @param context the processing context (cannot be null).
* @return future result from the producer sending a record to Kafka.
*/
Future<RecordMetadata> report(ProcessingContext<T> context);
@Override
default void close() { }
}
| ErrorReporter |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/BeanDefinitionHolder.java | {
"start": 1542,
"end": 5822
} | class ____ implements BeanMetadataElement {
private final BeanDefinition beanDefinition;
private final String beanName;
private final String @Nullable [] aliases;
/**
* Create a new BeanDefinitionHolder.
* @param beanDefinition the BeanDefinition to wrap
* @param beanName the name of the bean, as specified for the bean definition
*/
public BeanDefinitionHolder(BeanDefinition beanDefinition, String beanName) {
this(beanDefinition, beanName, null);
}
/**
* Create a new BeanDefinitionHolder.
* @param beanDefinition the BeanDefinition to wrap
* @param beanName the name of the bean, as specified for the bean definition
* @param aliases alias names for the bean, or {@code null} if none
*/
public BeanDefinitionHolder(BeanDefinition beanDefinition, String beanName, String @Nullable [] aliases) {
Assert.notNull(beanDefinition, "BeanDefinition must not be null");
Assert.notNull(beanName, "Bean name must not be null");
this.beanDefinition = beanDefinition;
this.beanName = beanName;
this.aliases = aliases;
}
/**
* Copy constructor: Create a new BeanDefinitionHolder with the
* same contents as the given BeanDefinitionHolder instance.
* <p>Note: The wrapped BeanDefinition reference is taken as-is;
* it is {@code not} deeply copied.
* @param beanDefinitionHolder the BeanDefinitionHolder to copy
*/
public BeanDefinitionHolder(BeanDefinitionHolder beanDefinitionHolder) {
Assert.notNull(beanDefinitionHolder, "BeanDefinitionHolder must not be null");
this.beanDefinition = beanDefinitionHolder.getBeanDefinition();
this.beanName = beanDefinitionHolder.getBeanName();
this.aliases = beanDefinitionHolder.getAliases();
}
/**
* Return the wrapped BeanDefinition.
*/
public BeanDefinition getBeanDefinition() {
return this.beanDefinition;
}
/**
* Return the primary name of the bean, as specified for the bean definition.
*/
public String getBeanName() {
return this.beanName;
}
/**
* Return the alias names for the bean, as specified directly for the bean definition.
* @return the array of alias names, or {@code null} if none
*/
public String @Nullable [] getAliases() {
return this.aliases;
}
/**
* Expose the bean definition's source object.
* @see BeanDefinition#getSource()
*/
@Override
public @Nullable Object getSource() {
return this.beanDefinition.getSource();
}
/**
* Determine whether the given candidate name matches the bean name
* or the aliases stored in this bean definition.
*/
public boolean matchesName(@Nullable String candidateName) {
return (candidateName != null && (candidateName.equals(this.beanName) ||
candidateName.equals(BeanFactoryUtils.transformedBeanName(this.beanName)) ||
ObjectUtils.containsElement(this.aliases, candidateName)));
}
/**
* Return a friendly, short description for the bean, stating name and aliases.
* @see #getBeanName()
* @see #getAliases()
*/
public String getShortDescription() {
if (this.aliases == null) {
return "Bean definition with name '" + this.beanName + "'";
}
return "Bean definition with name '" + this.beanName + "' and aliases [" + StringUtils.arrayToCommaDelimitedString(this.aliases) + ']';
}
/**
* Return a long description for the bean, including name and aliases
* as well as a description of the contained {@link BeanDefinition}.
* @see #getShortDescription()
* @see #getBeanDefinition()
*/
public String getLongDescription() {
return getShortDescription() + ": " + this.beanDefinition;
}
/**
* This implementation returns the long description. Can be overridden
* to return the short description or any kind of custom description instead.
* @see #getLongDescription()
* @see #getShortDescription()
*/
@Override
public String toString() {
return getLongDescription();
}
@Override
public boolean equals(@Nullable Object other) {
return (this == other || (other instanceof BeanDefinitionHolder that &&
this.beanDefinition.equals(that.beanDefinition) &&
this.beanName.equals(that.beanName) &&
ObjectUtils.nullSafeEquals(this.aliases, that.aliases)));
}
@Override
public int hashCode() {
return ObjectUtils.nullSafeHash(this.beanDefinition, this.beanName, this.aliases);
}
}
| BeanDefinitionHolder |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/EqualsUnsafeCast.java | {
"start": 3266,
"end": 4388
} | class ____ with getClass.
methodInvoked = true;
return null;
}
@Override
public Void visitTypeCast(TypeCastTree node, Void unused) {
ExpressionTree expression = node.getExpression();
if (!methodInvoked
&& expression instanceof IdentifierTree
&& parameter.equals(getSymbol(expression))
&& checkedTypes.stream().noneMatch(t -> isSubtype(t, getType(node.getType()), state))) {
StatementTree enclosingStatement =
findEnclosingNode(getCurrentPath(), StatementTree.class);
state.reportMatch(
describeMatch(
node,
SuggestedFix.prefixWith(
enclosingStatement,
String.format(
"if (!(%s instanceof %s)) { return false; }",
state.getSourceForNode(expression),
state.getSourceForNode(node.getType())))));
}
return super.visitTypeCast(node, null);
}
}.scan(state.getPath(), null);
return NO_MATCH;
}
}
| comparisons |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/jakartaData/java/org/hibernate/processor/test/data/superdao/Repo.java | {
"start": 260,
"end": 328
} | interface ____ extends SuperRepo {
@Find
Book get(String isbn);
}
| Repo |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoValueTest.java | {
"start": 62334,
"end": 62625
} | class ____<T extends Comparable<T>> {
public abstract List<T> list();
public abstract T t();
public static <T extends Comparable<T>> Builder<T> builder() {
return new AutoValue_AutoValueTest_BuilderWithSet.Builder<T>();
}
@AutoValue.Builder
public | BuilderWithSet |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/simp/user/MultiServerUserRegistry.java | {
"start": 14676,
"end": 14738
} | class ____ find user sessions across all servers.
*/
private | to |
java | hibernate__hibernate-orm | hibernate-jfr/src/main/java/org/hibernate/event/jfr/internal/PrePartialFlushEvent.java | {
"start": 587,
"end": 860
} | class ____ extends Event implements DiagnosticEvent {
public static final String NAME = "org.hibernate.orm.PrePartialFlushEvent";
@Label("Session Identifier")
public String sessionIdentifier;
@Override
public String toString() {
return NAME;
}
}
| PrePartialFlushEvent |
java | hibernate__hibernate-orm | local-build-plugins/src/main/java/org/hibernate/orm/post/IndexerTask.java | {
"start": 736,
"end": 1623
} | class ____ extends DefaultTask {
private final Provider<IndexManager> indexManager;
public IndexerTask() {
setGroup( TASK_GROUP_NAME );
setDescription( String.format( "Builds a Jandex Index from the artifacts attached to the `%s` Configuration", AGGREGATE_CONFIG_NAME ) );
indexManager = getProject().provider( () -> getProject().getExtensions().getByType( IndexManager.class ) );
}
@InputFiles
@SkipWhenEmpty
public Configuration getArtifactsToProcess() {
return indexManager.get().getArtifactsToProcess();
}
@OutputFile
public Provider<RegularFile> getIndexFileReference() {
return indexManager.get().getIndexFileReferenceAccess();
}
@OutputFile
public Provider<RegularFile> getPackageFileReferenceAccess() {
return indexManager.get().getPackageFileReferenceAccess();
}
@TaskAction
public void createIndex() {
indexManager.get().index();
}
}
| IndexerTask |
java | apache__flink | flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/statemachine/kafka/KafkaStandaloneGenerator.java | {
"start": 2259,
"end": 3630
} | class ____ implements Collector<Event>, AutoCloseable {
private final KafkaProducer<Object, byte[]> producer;
private final EventDeSerializationSchema serializer;
private final String topic;
private final int partition;
KafkaCollector(String brokerAddress, String topic, int partition) {
this.topic = checkNotNull(topic);
this.partition = partition;
this.serializer = new EventDeSerializationSchema();
// create Kafka producer
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerAddress);
properties.put(
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
ByteArraySerializer.class.getCanonicalName());
properties.put(
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
ByteArraySerializer.class.getCanonicalName());
this.producer = new KafkaProducer<>(properties);
}
@Override
public void collect(Event evt) {
byte[] serialized = serializer.serialize(evt);
producer.send(new ProducerRecord<>(topic, partition, null, serialized));
}
@Override
public void close() {
producer.close();
}
}
}
| KafkaCollector |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java | {
"start": 1087,
"end": 1204
} | class ____ file checksums for files. */
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract | representing |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 14900,
"end": 15341
} | class ____<T extends @NullableType Object> extends Foo<T>`.
// Some buggy versions of javac do not report type annotations correctly in this context.
// AutoValue can't copy them if it can't see them, so we make a special annotation processor to
// detect if we are in the presence of this bug and if so we don't fail.
@Test
public void testTypeParametersWithAnnotationsOnBounds() {
@SupportedAnnotationTypes("*")
| AutoValue_Foo |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/logging/CppLogMessageHandler.java | {
"start": 17470,
"end": 18001
} | class ____ {
Instant timestamp;
int count;
CppLogMessage message;
Level level;
MessageSummary() {
this.timestamp = Instant.EPOCH;
this.message = null;
this.count = 0;
this.level = Level.OFF;
}
void reset(Instant timestamp, CppLogMessage message, Level level) {
this.timestamp = timestamp;
this.message = message;
this.count = 0;
this.level = level;
}
}
}
| MessageSummary |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/ingest/IngestDocument.java | {
"start": 64365,
"end": 65267
} | enum ____ {
INDEX(IndexFieldMapper.NAME),
TYPE("_type"),
ID(IdFieldMapper.NAME),
ROUTING(RoutingFieldMapper.NAME),
VERSION(VersionFieldMapper.NAME),
VERSION_TYPE("_version_type"),
IF_SEQ_NO("_if_seq_no"),
IF_PRIMARY_TERM("_if_primary_term"),
DYNAMIC_TEMPLATES("_dynamic_templates");
private static final Set<String> METADATA_NAMES = Arrays.stream(Metadata.values())
.map(metadata -> metadata.fieldName)
.collect(Collectors.toSet());
private final String fieldName;
Metadata(String fieldName) {
this.fieldName = fieldName;
}
public static boolean isMetadata(String field) {
return METADATA_NAMES.contains(field);
}
public String getFieldName() {
return fieldName;
}
}
private static final | Metadata |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/constraint/PlacementConstraintParser.java | {
"start": 6296,
"end": 6893
} | class ____ implements ConstraintTokenizer {
private final StringTokenizer tokenizer;
BaseStringTokenizer(String expr, String delimiter) {
this.tokenizer = new StringTokenizer(expr, delimiter);
}
@Override
public boolean hasMoreElements() {
return tokenizer.hasMoreTokens();
}
@Override
public String nextElement() {
return tokenizer.nextToken();
}
}
/**
* Tokenizer used to parse conjunction form of a constraint expression,
* [AND|OR](C1:C2:...:Cn). Each Cn is a constraint expression.
*/
public static final | BaseStringTokenizer |
java | apache__camel | core/camel-base/src/main/java/org/apache/camel/impl/event/AbstractRouteEvent.java | {
"start": 1087,
"end": 1641
} | class ____ extends EventObject implements RouteEvent {
private static final @Serial long serialVersionUID = 1L;
private final Route route;
private long timestamp;
public AbstractRouteEvent(Route source) {
super(source);
this.route = source;
}
@Override
public Route getRoute() {
return route;
}
@Override
public long getTimestamp() {
return timestamp;
}
@Override
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
}
| AbstractRouteEvent |
java | apache__flink | flink-core/src/main/java/org/apache/flink/core/fs/FSDataInputStream.java | {
"start": 1234,
"end": 1988
} | class ____ extends InputStream {
/**
* Seek to the given offset from the start of the file. The next read() will be from that
* location. Can't seek past the end of the stream.
*
* @param desired the desired offset
* @throws IOException Thrown if an error occurred while seeking inside the input stream.
*/
public abstract void seek(long desired) throws IOException;
/**
* Gets the current position in the input stream.
*
* @return current position in the input stream
* @throws IOException Thrown if an I/O error occurred in the underlying stream implementation
* while accessing the stream's position.
*/
public abstract long getPos() throws IOException;
}
| FSDataInputStream |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/validation/beanvalidation/LocalValidatorFactoryBean.java | {
"start": 3699,
"end": 6118
} | class ____ extends SpringValidatorAdapter
implements ValidatorFactory, ApplicationContextAware, InitializingBean, DisposableBean {
@SuppressWarnings("rawtypes")
private @Nullable Class providerClass;
private @Nullable ValidationProviderResolver validationProviderResolver;
private @Nullable MessageInterpolator messageInterpolator;
private @Nullable TraversableResolver traversableResolver;
private @Nullable ConstraintValidatorFactory constraintValidatorFactory;
private @Nullable ParameterNameDiscoverer parameterNameDiscoverer;
private Resource @Nullable [] mappingLocations;
private final Map<String, String> validationPropertyMap = new HashMap<>();
private @Nullable Consumer<Configuration<?>> configurationInitializer;
private @Nullable ApplicationContext applicationContext;
private @Nullable ValidatorFactory validatorFactory;
/**
* Specify the desired provider class, if any.
* <p>If not specified, JSR-303's default search mechanism will be used.
* @see jakarta.validation.Validation#byProvider(Class)
* @see jakarta.validation.Validation#byDefaultProvider()
*/
@SuppressWarnings("rawtypes")
public void setProviderClass(Class providerClass) {
this.providerClass = providerClass;
}
/**
* Specify a JSR-303 {@link ValidationProviderResolver} for bootstrapping the
* provider of choice, as an alternative to {@code META-INF} driven resolution.
* @since 4.3
*/
public void setValidationProviderResolver(ValidationProviderResolver validationProviderResolver) {
this.validationProviderResolver = validationProviderResolver;
}
/**
* Specify a custom MessageInterpolator to use for this ValidatorFactory
* and its exposed default Validator.
*/
public void setMessageInterpolator(MessageInterpolator messageInterpolator) {
this.messageInterpolator = messageInterpolator;
}
/**
* Specify a custom Spring MessageSource for resolving validation messages,
* instead of relying on JSR-303's default "ValidationMessages.properties" bundle
* in the classpath. This may refer to a Spring context's shared "messageSource" bean,
* or to some special MessageSource setup for validation purposes only.
* <p><b>NOTE:</b> This feature requires Hibernate Validator 4.3 or higher on the classpath.
* You may nevertheless use a different validation provider but Hibernate Validator's
* {@link ResourceBundleMessageInterpolator} | LocalValidatorFactoryBean |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java | {
"start": 2172,
"end": 3458
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TestTimelineClientV2Impl.class);
private TestV2TimelineClient client;
private static final long TIME_TO_SLEEP = 150L;
private static final String EXCEPTION_MSG = "Exception in the content";
@BeforeEach
public void setup(TestInfo testInfo) {
conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
conf.setInt(YarnConfiguration.NUMBER_OF_ASYNC_ENTITIES_TO_MERGE, 3);
if (!testInfo.getDisplayName()
.contains("testRetryOnConnectionFailure")) {
client = createTimelineClient(conf);
}
}
@Test
void getTestInfo(TestInfo testInfo) {
System.out.println(testInfo.getDisplayName());
System.out.println(testInfo.getTestMethod());
System.out.println(testInfo.getTestClass());
System.out.println(testInfo.getTags());
}
private YarnConfiguration conf;
private TestV2TimelineClient createTimelineClient(YarnConfiguration config) {
ApplicationId id = ApplicationId.newInstance(0, 0);
TestV2TimelineClient tc = new TestV2TimelineClient(id);
tc.init(config);
tc.start();
return tc;
}
private | TestTimelineClientV2Impl |
java | google__dagger | hilt-android-testing/main/java/dagger/hilt/android/testing/OnComponentReadyRunner.java | {
"start": 1208,
"end": 3030
} | class ____ {
private final List<EntryPointListener<?>> listeners = new ArrayList<>();
private GeneratedComponentManager<?> componentManager;
private boolean componentHostSet = false;
/** Used by generated code, to notify listeners that the component has been created. */
public void setComponentManager(GeneratedComponentManager<?> componentManager) {
Preconditions.checkState(!componentHostSet, "Component host was already set.");
componentHostSet = true;
this.componentManager = componentManager;
for (EntryPointListener<?> listener : listeners) {
listener.deliverComponent(componentManager);
}
}
/** Must be called on the test thread, before the Statement is evaluated. */
public static <T> void addListener(
Context context, Class<T> entryPoint, OnComponentReadyListener<T> listener) {
Application application = Contexts.getApplication(context.getApplicationContext());
if (application instanceof TestApplicationComponentManagerHolder) {
TestApplicationComponentManagerHolder managerHolder =
(TestApplicationComponentManagerHolder) application;
OnComponentReadyRunnerHolder runnerHolder =
(OnComponentReadyRunnerHolder) managerHolder.componentManager();
runnerHolder.getOnComponentReadyRunner().addListenerInternal(entryPoint, listener);
}
}
private <T> void addListenerInternal(Class<T> entryPoint, OnComponentReadyListener<T> listener) {
if (componentHostSet) {
// If the componentHost was already set, just call through immediately
runListener(componentManager, entryPoint, listener);
} else {
listeners.add(EntryPointListener.create(entryPoint, listener));
}
}
public boolean isEmpty() {
return listeners.isEmpty();
}
@AutoValue
abstract static | OnComponentReadyRunner |
java | elastic__elasticsearch | x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportSetTransformUpgradeModeActionTests.java | {
"start": 2231,
"end": 9376
} | class ____ extends ESTestCase {
private ClusterService clusterService;
private PersistentTasksService persistentTasksService;
private Client client;
private TransportSetTransformUpgradeModeAction action;
@Before
public void setUp() throws Exception {
super.setUp();
clusterService = clusterService();
doAnswer(ans -> {
ClusterStateUpdateTask task = ans.getArgument(1);
task.clusterStateProcessed(ClusterState.EMPTY_STATE, ClusterState.EMPTY_STATE);
return null;
}).when(clusterService).submitUnbatchedStateUpdateTask(any(), any(ClusterStateUpdateTask.class));
when(clusterService.getClusterSettings()).thenReturn(ClusterSettings.createBuiltInClusterSettings());
PersistentTasksClusterService persistentTasksClusterService = new PersistentTasksClusterService(
Settings.EMPTY,
mock(),
clusterService,
mock()
);
persistentTasksService = mock();
client = mock();
ThreadPool threadPool = mock();
when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY));
when(client.threadPool()).thenReturn(threadPool);
action = new TransportSetTransformUpgradeModeAction(
mock(),
clusterService,
threadPool,
mock(),
persistentTasksClusterService,
persistentTasksService,
client
);
}
public void testUpgradeMode() {
assertTrue(action.upgradeMode(state(true)));
assertFalse(action.upgradeMode(state(false)));
}
private ClusterState state(boolean upgradeMode) {
return ClusterState.EMPTY_STATE.copyAndUpdate(
u -> u.metadata(
ClusterState.EMPTY_STATE.metadata()
.copyAndUpdate(
b -> b.putCustom(
TransformMetadata.TYPE,
TransformMetadata.getTransformMetadata(ClusterState.EMPTY_STATE).builder().upgradeMode(upgradeMode).build()
)
)
)
);
}
public void testCreateUpdatedState() {
var updatedState = action.createUpdatedState(
new SetUpgradeModeActionRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, true),
state(false)
);
assertTrue(TransformMetadata.upgradeMode(updatedState));
updatedState = action.createUpdatedState(
new SetUpgradeModeActionRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, false),
state(true)
);
assertFalse(TransformMetadata.upgradeMode(updatedState));
}
public void testUpgradeModeWithNoMetadata() throws InterruptedException {
upgradeModeSuccessfullyChanged(ClusterState.EMPTY_STATE, assertNoFailureListener(r -> {
assertThat(r, is(AcknowledgedResponse.TRUE));
verifyNoInteractions(persistentTasksService);
verify(client, never()).admin();
}));
}
public void testUpgradeModeWithNoTasks() throws InterruptedException {
upgradeModeSuccessfullyChanged(
ClusterState.EMPTY_STATE.copyAndUpdateMetadata(
m -> m.putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(1, Map.of()))
),
assertNoFailureListener(r -> {
assertThat(r, is(AcknowledgedResponse.TRUE));
verifyNoInteractions(persistentTasksService);
verify(client, never()).admin();
})
);
}
public void testUpgradeModeWithNoTransformTasks() throws InterruptedException {
upgradeModeSuccessfullyChanged(
ClusterState.EMPTY_STATE.copyAndUpdateMetadata(
m -> m.putCustom(
PersistentTasksCustomMetadata.TYPE,
new PersistentTasksCustomMetadata(1, Map.of("not a transform", mock()))
)
),
assertNoFailureListener(r -> {
assertThat(r, is(AcknowledgedResponse.TRUE));
verifyNoInteractions(persistentTasksService);
verify(client, never()).admin();
})
);
}
private void upgradeModeSuccessfullyChanged(ClusterState state, ActionListener<AcknowledgedResponse> listener)
throws InterruptedException {
upgradeModeSuccessfullyChanged(new SetUpgradeModeActionRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, true), state, listener);
}
private void upgradeModeSuccessfullyChanged(
SetUpgradeModeActionRequest request,
ClusterState state,
ActionListener<AcknowledgedResponse> listener
) throws InterruptedException {
CountDownLatch latch = new CountDownLatch(1);
action.upgradeModeSuccessfullyChanged(mock(), request, state, ActionListener.runAfter(listener, latch::countDown));
assertTrue("Failed to finish test after 10s", latch.await(10, TimeUnit.SECONDS));
}
public void testEnableUpgradeMode() throws InterruptedException {
doAnswer(ans -> {
ActionListener<ListTasksResponse> listener = ans.getArgument(2);
ListTasksResponse response = mock();
when(response.getNodeFailures()).thenReturn(List.of());
listener.onResponse(response);
return null;
}).when(client).execute(any(), any(), any());
when(client.admin()).thenReturn(new AdminClient(client));
upgradeModeSuccessfullyChanged(stateWithTransformTask(), assertNoFailureListener(r -> {
assertThat(r, is(AcknowledgedResponse.TRUE));
verify(clusterService).submitUnbatchedStateUpdateTask(
matches("unassign project .* persistent task \\[.*\\] from any node"),
any()
);
}));
}
private ClusterState stateWithTransformTask() {
PersistentTasksCustomMetadata.PersistentTask<?> task = mock();
when(task.getTaskName()).thenReturn(TransformField.TASK_NAME);
return ClusterState.EMPTY_STATE.copyAndUpdateMetadata(
m -> m.putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(1, Map.of("a transform", task)))
);
}
public void testDisableUpgradeMode() throws InterruptedException {
doAnswer(ans -> {
ActionListener<Boolean> listener = ans.getArgument(3);
listener.onResponse(true);
return null;
}).when(persistentTasksService).waitForPersistentTasksCondition(any(), any(), any(), any());
upgradeModeSuccessfullyChanged(
new SetUpgradeModeActionRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, false),
stateWithTransformTask(),
assertNoFailureListener(r -> {
assertThat(r, is(AcknowledgedResponse.TRUE));
verify(clusterService, never()).submitUnbatchedStateUpdateTask(eq("unassign persistent task from any node"), any());
})
);
}
}
| TransportSetTransformUpgradeModeActionTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/client/JobCancellationException.java | {
"start": 1015,
"end": 1273
} | class ____ extends JobExecutionException {
private static final long serialVersionUID = 2818087325120827526L;
public JobCancellationException(JobID jobID, String msg, Throwable cause) {
super(jobID, msg, cause);
}
}
| JobCancellationException |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/SharedCacheConfig.java | {
"start": 1220,
"end": 1330
} | class ____ parsing configuration parameters associated with the shared
* cache.
*/
@Private
@Unstable
public | for |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/util/EnglishEnums.java | {
"start": 938,
"end": 1232
} | class ____.</em>
*
* <p>
* Helps convert English Strings to English Enum values.
* </p>
* <p>
* Enum name arguments are converted internally to upper case with the {@linkplain java.util.Locale#ENGLISH ENGLISH} locale to
* avoid problems on the Turkish locale. Do not use with Turkish | private |
java | quarkusio__quarkus | extensions/jdbc/jdbc-postgresql/runtime/src/test/java/io/quarkus/jdbc/postgresql/runtime/PostgreSQLServiceBindingConverterTest.java | {
"start": 503,
"end": 3562
} | class ____ {
private final Path root = Paths.get("src/test/resources/bindings");
@Test
public void testConvertAll() {
List<ServiceBinding> serviceBindings = new ArrayList<>();
ServiceBinding binding = new ServiceBinding(root.resolve("test-name"));
serviceBindings.add(binding);
ServiceBindingConverter c = new PostgreSQLServiceBindingConverter();
Optional<ServiceBindingConfigSource> conntionProp = c.convert(serviceBindings);
String sslRootCertPath = root.resolve("test-name").resolve("root.crt").toString();
String expectedURL = "jdbc:postgresql://aws.crdb-cloud.com:26257/defaultdb?sslmode=verify-full&sslrootcert="
+ sslRootCertPath + "&options=--cluster%3Da-crdb-cluster-0101%20-c%20search_path%3Dkeyword";
assertThat(conntionProp.get().getProperties().get("quarkus.datasource.jdbc.url")).isEqualTo(expectedURL);
assertThat(conntionProp.get().getProperties().get("quarkus.datasource.password")).isEqualTo("\\");
assertThat(conntionProp.get().getProperties().get("quarkus.datasource.username")).isEqualTo("remote-user");
}
@Test
public void testConvertNoOptions() {
List<ServiceBinding> serviceBindings = new ArrayList<>();
ServiceBinding binding = new ServiceBinding(root.resolve("no-options"));
serviceBindings.add(binding);
ServiceBindingConverter c = new PostgreSQLServiceBindingConverter();
Optional<ServiceBindingConfigSource> conntionProp = c.convert(serviceBindings);
String sslRootCertPath = root.resolve("no-options").resolve("root.crt").toString();
String expectedURL = "jdbc:postgresql://aws.crdb-cloud.com:26257/defaultdb?sslmode=verify-full&sslrootcert="
+ sslRootCertPath;
assertThat(conntionProp.get().getProperties().get("quarkus.datasource.jdbc.url")).isEqualTo(expectedURL);
assertThat(conntionProp.get().getProperties().get("quarkus.datasource.password")).isEqualTo("\\");
assertThat(conntionProp.get().getProperties().get("quarkus.datasource.username")).isEqualTo("remote-user");
}
@Test
public void testConvertDisableSslMode() {
List<ServiceBinding> serviceBindings = new ArrayList<>();
ServiceBinding binding = new ServiceBinding(root.resolve("no-ssl"));
serviceBindings.add(binding);
ServiceBindingConverter c = new PostgreSQLServiceBindingConverter();
Optional<ServiceBindingConfigSource> conntionProp = c.convert(serviceBindings);
String expectedURL = "jdbc:postgresql://aws.crdb-cloud.com:26257/defaultdb?sslmode=disable&options=--cluster%3Da-crdb-cluster-0101";
assertThat(conntionProp.get().getProperties().get("quarkus.datasource.jdbc.url")).isEqualTo(expectedURL);
assertThat(conntionProp.get().getProperties().get("quarkus.datasource.password")).isEqualTo("pwd");
assertThat(conntionProp.get().getProperties().get("quarkus.datasource.username")).isEqualTo("remote-user");
}
}
| PostgreSQLServiceBindingConverterTest |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ADataBlocks.java | {
"start": 16580,
"end": 17993
} | class ____ extends BlockFactory {
private final DirectBufferPool bufferPool = new DirectBufferPool();
private final AtomicInteger buffersOutstanding = new AtomicInteger(0);
ByteBufferBlockFactory(StoreContext owner) {
super(owner);
}
@Override
ByteBufferBlock create(long index, long limit,
BlockOutputStreamStatistics statistics)
throws IOException {
checkArgument(limit > 0,
"Invalid block size: %d", limit);
return new ByteBufferBlock(index, limit, statistics);
}
private ByteBuffer requestBuffer(int limit) {
LOG.debug("Requesting buffer of size {}", limit);
buffersOutstanding.incrementAndGet();
return bufferPool.getBuffer(limit);
}
private void releaseBuffer(ByteBuffer buffer) {
LOG.debug("Releasing buffer");
bufferPool.returnBuffer(buffer);
buffersOutstanding.decrementAndGet();
}
/**
* Get count of outstanding buffers.
* @return the current buffer count
*/
public int getOutstandingBufferCount() {
return buffersOutstanding.get();
}
@Override
public String toString() {
return "ByteBufferBlockFactory{"
+ "buffersOutstanding=" + buffersOutstanding +
'}';
}
/**
* A DataBlock which requests a buffer from pool on creation; returns
* it when it is closed.
*/
| ByteBufferBlockFactory |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/autoproxy/benchmark/BenchmarkTests.java | {
"start": 6638,
"end": 7224
} | class ____ implements AfterReturningAdvice {
public int afterTakesInt;
@Override
public void afterReturning(Object returnValue, Method method, Object[] args, Object target) {
++afterTakesInt;
}
public static Advisor advisor() {
return new DefaultPointcutAdvisor(
new StaticMethodMatcherPointcut() {
@Override
public boolean matches(Method method, Class<?> targetClass) {
return method.getParameterCount() == 1 &&
method.getParameterTypes()[0].equals(Integer.class);
}
},
new TraceAfterReturningAdvice());
}
}
@Aspect
| TraceAfterReturningAdvice |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_2677/Issue2677Mapper.java | {
"start": 338,
"end": 1111
} | interface ____ {
Issue2677Mapper INSTANCE = Mappers.getMapper( Issue2677Mapper.class );
@Mapping(target = "id", source = "value.id")
Output map(Wrapper<? extends Parent> in);
@Mapping(target = ".", source = "value")
Output mapImplicitly(Wrapper<? extends Parent> in);
@Mapping(target = "id", source = "value.id")
Output mapFromParent(Wrapper<Parent> in);
@Mapping(target = "id", source = "value.id")
Output mapFromChild(Wrapper<Child> in);
@Mapping( target = "value", source = "wrapperValue")
Wrapper<String> mapToWrapper(String wrapperValue, Wrapper<? super Parent> wrapper);
@Mapping(target = "id", source = "value.id")
Output mapWithPresenceCheck(Wrapper<? extends ParentWithPresenceCheck> in);
| Issue2677Mapper |
java | quarkusio__quarkus | extensions/oidc-client/deployment/src/test/java/io/quarkus/oidc/client/OidcClientUserPasswordCustomFilterTestCase.java | {
"start": 531,
"end": 2141
} | class ____ {
private static Class<?>[] testClasses = {
FrontendResource.class,
ProtectedResource.class,
ProtectedResourceService.class,
OidcClientRequestCustomFilter.class
};
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(testClasses)
.addAsResource("application-oidc-client-custom-filter.properties", "application.properties"));
@Test
public void testGetUserName() {
RestAssured.when().get("/frontend/user")
.then()
.statusCode(200)
.body(equalTo("bob"));
}
@Test
public void testGetUserOidcClientNameAndRefreshTokens() {
RestAssured.when().get("/frontend/user")
.then()
.statusCode(200)
.body(equalTo("bob"));
// Wait until the access token has expired
long expiredTokenTime = System.currentTimeMillis() + 5000;
await().atMost(10, TimeUnit.SECONDS)
.pollInterval(Duration.ofSeconds(3))
.until(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return System.currentTimeMillis() > expiredTokenTime;
}
});
RestAssured.when().get("/frontend/user")
.then()
.statusCode(200)
.body(equalTo("bob"));
}
}
| OidcClientUserPasswordCustomFilterTestCase |
java | spring-projects__spring-boot | integration-test/spring-boot-actuator-integration-tests/src/test/java/org/springframework/boot/actuate/endpoint/web/annotation/AbstractWebEndpointIntegrationTests.java | {
"start": 23026,
"end": 23309
} | class ____ {
@Bean
VoidWriteResponseEndpoint voidWriteResponseEndpoint(EndpointDelegate delegate) {
return new VoidWriteResponseEndpoint(delegate);
}
}
@Configuration(proxyBeanMethods = false)
@Import(BaseConfiguration.class)
static | VoidWriteResponseEndpointConfiguration |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/util/collections/binary/AbstractBytesHashMap.java | {
"start": 9988,
"end": 12989
} | class ____ implements BytesMap.RecordArea<K, BinaryRowData> {
private final ArrayList<MemorySegment> segments = new ArrayList<>();
private final RandomAccessInputView inView;
private final SimpleCollectingOutputView outView;
RecordArea() {
this.outView = new SimpleCollectingOutputView(segments, memoryPool, segmentSize);
this.inView = new RandomAccessInputView(segments, segmentSize);
}
public void release() {
returnSegments(segments);
segments.clear();
}
public void reset() {
release();
// request a new memory segment from freeMemorySegments
// reset segmentNum and positionInSegment
outView.reset();
inView.setReadPosition(0);
}
// ----------------------- Append -----------------------
public int appendRecord(LookupInfo<K, BinaryRowData> lookupInfo, BinaryRowData value)
throws IOException {
final long oldLastPosition = outView.getCurrentOffset();
// serialize the key into the BytesHashMap record area
int skip = keySerializer.serializeToPages(lookupInfo.getKey(), outView);
long offset = oldLastPosition + skip;
// serialize the value into the BytesHashMap record area
valueSerializer.serializeToPages(value, outView);
if (offset > Integer.MAX_VALUE) {
LOG.warn(
"We can't handle key area with more than Integer.MAX_VALUE bytes,"
+ " because the pointer is a integer.");
throw new EOFException();
}
return (int) offset;
}
@Override
public long getSegmentsSize() {
return segments.size() * ((long) segmentSize);
}
// ----------------------- Read -----------------------
public void setReadPosition(int position) {
inView.setReadPosition(position);
}
public boolean readKeyAndEquals(K lookupKey) throws IOException {
reusedKey = keySerializer.mapFromPages(reusedKey, inView);
return lookupKey.equals(reusedKey);
}
/**
* @throws IOException when invalid memory address visited.
*/
void skipKey() throws IOException {
keySerializer.skipRecordFromPages(inView);
}
public BinaryRowData readValue(BinaryRowData reuse) throws IOException {
// depends on BinaryRowDataSerializer to check writing skip
// and to find the real start offset of the data
return valueSerializer.mapFromPages(reuse, inView);
}
// ----------------------- Iterator -----------------------
private KeyValueIterator<K, BinaryRowData> entryIterator(boolean requiresCopy) {
return new EntryIterator(requiresCopy);
}
private final | RecordArea |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/inlinedirtychecking/DirtyCheckPrivateUnMappedCollectionTest.java | {
"start": 3311,
"end": 3378
} | class ____ extends AbstractMeasurement {
}
public static | Measurement |
java | quarkusio__quarkus | independent-projects/qute/core/src/main/java/io/quarkus/qute/GetterAccessor.java | {
"start": 148,
"end": 732
} | class ____ implements ValueAccessor, AccessorCandidate {
private final Method method;
GetterAccessor(Method method) {
this.method = method;
}
@Override
public CompletionStage<Object> getValue(Object instance) {
try {
return CompletionStageSupport.toCompletionStage(method.invoke(instance));
} catch (Exception e) {
throw new IllegalStateException("Reflection invocation error", e);
}
}
@Override
public ValueAccessor getAccessor(EvalContext context) {
return this;
}
}
| GetterAccessor |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/annotations/InsertProvider.java | {
"start": 1221,
"end": 1541
} | class ____ {
* public static String insert() {
* return "INSERT INTO users (id, name) VALUES(#{id}, #{name})";
* }
* }
*
* }
* }</pre>
*
* @author Clinton Begin
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Repeatable(InsertProvider.List.class)
public @ | SqlProvider |
java | elastic__elasticsearch | x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java | {
"start": 1744,
"end": 41749
} | class ____ extends AbstractUpgradeTestCase {
public void testDataStreams() throws IOException {
if (CLUSTER_TYPE == ClusterType.OLD) {
String requestBody = """
{
"index_patterns": [ "logs-*" ],
"template": {
"mappings": {
"properties": {
"@timestamp": {
"type": "date"
}
}
}
},
"data_stream": {}
}""";
Request request = new Request("PUT", "/_index_template/1");
request.setJsonEntity(requestBody);
useIgnoreMultipleMatchingTemplatesWarningsHandler(request);
client().performRequest(request);
StringBuilder b = new StringBuilder();
for (int i = 0; i < 1000; i++) {
b.append(Strings.format("""
{"create":{"_index":"logs-foobar"}}
{"@timestamp":"2020-12-12","test":"value%s"}
""", i));
}
Request bulk = new Request("POST", "/_bulk");
bulk.addParameter("refresh", "true");
bulk.addParameter("filter_path", "errors");
bulk.setJsonEntity(b.toString());
Response response = client().performRequest(bulk);
assertEquals("{\"errors\":false}", EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8));
} else if (CLUSTER_TYPE == ClusterType.MIXED) {
long nowMillis = System.currentTimeMillis();
Request rolloverRequest = new Request("POST", "/logs-foobar/_rollover");
client().performRequest(rolloverRequest);
Request index = new Request("POST", "/logs-foobar/_doc");
index.addParameter("refresh", "true");
index.addParameter("filter_path", "_index");
if (Booleans.parseBoolean(System.getProperty("tests.first_round"))) {
// include legacy name and date-named indices with today +/-1 in case of clock skew
var expectedIndices = List.of(
"{\"_index\":\"" + DataStreamTestHelper.getLegacyDefaultBackingIndexName("logs-foobar", 2) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 2, nowMillis) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 2, nowMillis + 86400000) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 2, nowMillis - 86400000) + "\"}"
);
index.setJsonEntity("{\"@timestamp\":\"2020-12-12\",\"test\":\"value1000\"}");
Response response = client().performRequest(index);
assertThat(expectedIndices, Matchers.hasItem(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)));
} else {
// include legacy name and date-named indices with today +/-1 in case of clock skew
var expectedIndices = List.of(
"{\"_index\":\"" + DataStreamTestHelper.getLegacyDefaultBackingIndexName("logs-foobar", 3) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 3, nowMillis) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 3, nowMillis + 86400000) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 3, nowMillis - 86400000) + "\"}"
);
index.setJsonEntity("{\"@timestamp\":\"2020-12-12\",\"test\":\"value1001\"}");
Response response = client().performRequest(index);
assertThat(expectedIndices, Matchers.hasItem(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)));
}
}
final int expectedCount;
if (CLUSTER_TYPE.equals(ClusterType.OLD)) {
expectedCount = 1000;
} else if (CLUSTER_TYPE.equals(ClusterType.MIXED)) {
if (Booleans.parseBoolean(System.getProperty("tests.first_round"))) {
expectedCount = 1001;
} else {
expectedCount = 1002;
}
} else if (CLUSTER_TYPE.equals(ClusterType.UPGRADED)) {
expectedCount = 1002;
} else {
throw new AssertionError("unexpected cluster type");
}
assertCount("logs-foobar", expectedCount);
}
public void testDataStreamValidationDoesNotBreakUpgrade() throws Exception {
if (CLUSTER_TYPE == ClusterType.OLD) {
String requestBody = """
{
"index_patterns": [ "logs-*" ],
"template": {
"mappings": {
"properties": {
"@timestamp": {
"type": "date"
}
}
}
},
"data_stream": {}
}""";
Request request = new Request("PUT", "/_index_template/1");
request.setJsonEntity(requestBody);
useIgnoreMultipleMatchingTemplatesWarningsHandler(request);
client().performRequest(request);
String b = """
{"create":{"_index":"logs-barbaz"}}
{"@timestamp":"2020-12-12","test":"value0"}
{"create":{"_index":"logs-barbaz-2021.01.13"}}
{"@timestamp":"2020-12-12","test":"value0"}
""";
Request bulk = new Request("POST", "/_bulk");
bulk.addParameter("refresh", "true");
bulk.addParameter("filter_path", "errors");
bulk.setJsonEntity(b);
Response response = client().performRequest(bulk);
assertEquals("{\"errors\":false}", EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8));
Request rolloverRequest = new Request("POST", "/logs-barbaz-2021.01.13/_rollover");
client().performRequest(rolloverRequest);
} else {
if (CLUSTER_TYPE == ClusterType.MIXED) {
ensureHealth((request -> {
request.addParameter("timeout", "70s");
request.addParameter("wait_for_nodes", "3");
request.addParameter("wait_for_status", "yellow");
}));
} else if (CLUSTER_TYPE == ClusterType.UPGRADED) {
ensureHealth("logs-barbaz", (request -> {
request.addParameter("wait_for_nodes", "3");
request.addParameter("wait_for_status", "green");
request.addParameter("timeout", "70s");
request.addParameter("level", "shards");
}));
}
assertCount("logs-barbaz", 1);
assertCount("logs-barbaz-2021.01.13", 1);
}
}
public void testUpgradeDataStream() throws Exception {
/*
* This test tests upgrading a "normal" data stream (dataStreamName), and upgrading a data stream that was originally just an
* ordinary index that was converted to a data stream (dataStreamFromNonDataStreamIndices).
*/
String dataStreamName = "reindex_test_data_stream";
String dataStreamFromNonDataStreamIndices = "index_first_reindex_test_data_stream";
int numRollovers = randomIntBetween(0, 5);
boolean hasILMPolicy = randomBoolean();
boolean ilmEnabled = hasILMPolicy && randomBoolean();
if (ilmEnabled) {
startILM();
} else {
stopILM();
}
if (CLUSTER_TYPE == ClusterType.OLD) {
createAndRolloverDataStream(dataStreamName, numRollovers, hasILMPolicy, ilmEnabled);
createDataStreamFromNonDataStreamIndices(dataStreamFromNonDataStreamIndices);
} else if (CLUSTER_TYPE == ClusterType.UPGRADED) {
Map<String, Map<String, Object>> oldIndicesMetadata = getIndicesMetadata(dataStreamName);
String oldWriteIndex = getDataStreamBackingIndexNames(dataStreamName).getLast();
upgradeDataStream(dataStreamName, numRollovers, numRollovers + 1, 0, ilmEnabled);
cancelReindexTask(dataStreamName);
upgradeDataStream(dataStreamFromNonDataStreamIndices, 0, 1, 0, ilmEnabled);
cancelReindexTask(dataStreamFromNonDataStreamIndices);
Map<String, Map<String, Object>> upgradedIndicesMetadata = getIndicesMetadata(dataStreamName);
String newWriteIndex = getDataStreamBackingIndexNames(dataStreamName).getLast();
if (ilmEnabled) {
checkILMPhase(dataStreamName, newWriteIndex);
// Delete the data streams to avoid ILM continuously running cluster state tasks, see
// https://github.com/elastic/elasticsearch/issues/129097#issuecomment-3016122739
deleteDataStream(dataStreamName);
} else {
compareIndexMetadata(oldIndicesMetadata, oldWriteIndex, upgradedIndicesMetadata);
}
}
}
public void testMigrateDoesNotRestartOnUpgrade() throws Exception {
/*
* This test makes sure that if reindex is run and completed, then when the cluster is upgraded the task
* does not begin running again.
*/
String dataStreamName = "reindex_test_data_stream_upgrade_test";
int numRollovers = randomIntBetween(0, 5);
boolean hasILMPolicy = randomBoolean();
boolean ilmEnabled = hasILMPolicy && randomBoolean();
if (CLUSTER_TYPE == ClusterType.OLD) {
createAndRolloverDataStream(dataStreamName, numRollovers, hasILMPolicy, ilmEnabled);
upgradeDataStream(dataStreamName, numRollovers, numRollovers + 1, 0, ilmEnabled);
} else if (CLUSTER_TYPE == ClusterType.UPGRADED) {
makeSureNoUpgrade(dataStreamName);
cancelReindexTask(dataStreamName);
// Delete the data streams to avoid ILM continuously running cluster state tasks, see
// https://github.com/elastic/elasticsearch/issues/129097#issuecomment-3016122739
deleteDataStream(dataStreamName);
} else {
makeSureNoUpgrade(dataStreamName);
}
}
private void cancelReindexTask(String dataStreamName) throws IOException {
Request cancelRequest = new Request("POST", "_migration/reindex/" + dataStreamName + "/_cancel");
String upgradeUser = "upgrade_user";
String upgradeUserPassword = "x-pack-test-password";
createRole("upgrade_role", dataStreamName);
createUser(upgradeUser, upgradeUserPassword, "upgrade_role");
try (RestClient upgradeUserClient = getClient(upgradeUser, upgradeUserPassword)) {
Response cancelResponse = upgradeUserClient.performRequest(cancelRequest);
assertOK(cancelResponse);
}
}
private void compareIndexMetadata(
Map<String, Map<String, Object>> oldIndicesMetadata,
String oldWriteIndex,
Map<String, Map<String, Object>> upgradedIndicesMetadata
) {
for (Map.Entry<String, Map<String, Object>> upgradedIndexEntry : upgradedIndicesMetadata.entrySet()) {
String upgradedIndexName = upgradedIndexEntry.getKey();
if (upgradedIndexName.startsWith(".migrated-")) {
String oldIndexName = "." + upgradedIndexName.substring(".migrated-".length());
Map<String, Object> oldIndexMetadata = oldIndicesMetadata.get(oldIndexName);
Map<String, Object> upgradedIndexMetadata = upgradedIndexEntry.getValue();
compareSettings(oldIndexMetadata, upgradedIndexMetadata);
compareMappings((Map<?, ?>) oldIndexMetadata.get("mappings"), (Map<?, ?>) upgradedIndexMetadata.get("mappings"));
assertThat("ILM states did not match", upgradedIndexMetadata.get("ilm"), equalTo(oldIndexMetadata.get("ilm")));
if (oldIndexName.equals(oldWriteIndex) == false) { // the old write index will have been rolled over by upgrade
assertThat(
"Rollover info did not match",
upgradedIndexMetadata.get("rollover_info"),
equalTo(oldIndexMetadata.get("rollover_info"))
);
}
assertThat(upgradedIndexMetadata.get("system"), equalTo(oldIndexMetadata.get("system")));
}
}
}
@SuppressWarnings("unchecked")
private void checkILMPhase(String dataStreamName, String writeIndex) throws Exception {
assertBusy(() -> {
Request request = new Request("GET", dataStreamName + "/_ilm/explain");
Response response = client().performRequest(request);
Map<String, Object> responseMap = XContentHelper.convertToMap(
JsonXContent.jsonXContent,
response.getEntity().getContent(),
false
);
Map<String, Object> indices = (Map<String, Object>) responseMap.get("indices");
for (var index : indices.keySet()) {
if (index.equals(writeIndex) == false) {
Map<String, Object> ilmInfo = (Map<String, Object>) indices.get(index);
assertThat("Index [" + index + "] has not moved to cold ILM phase, " + indices, ilmInfo.get("phase"), equalTo("cold"));
}
}
}, 30, TimeUnit.SECONDS);
}
private void startILM() throws IOException {
setILMInterval();
var request = new Request("POST", "/_ilm/start");
assertOK(client().performRequest(request));
}
private void stopILM() throws IOException {
var request = new Request("POST", "/_ilm/stop");
assertOK(client().performRequest(request));
}
private void setILMInterval() throws IOException {
Request request = new Request("PUT", "/_cluster/settings");
request.setJsonEntity("""
{ "persistent": {"indices.lifecycle.poll_interval": "1s"} }
""");
assertOK(client().performRequest(request));
}
@SuppressWarnings("unchecked")
long getCreationDate(Map<String, Object> indexMetadata) {
return Long.parseLong(
(String) ((Map<String, Map<String, Object>>) indexMetadata.get("settings")).get("index").get("creation_date")
);
}
private void compareSettings(Map<String, Object> oldIndexMetadata, Map<String, Object> upgradedIndexMetadata) {
Map<String, Object> oldIndexSettings = getIndexSettingsFromIndexMetadata(oldIndexMetadata);
Map<String, Object> upgradedIndexSettings = getIndexSettingsFromIndexMetadata(upgradedIndexMetadata);
final Set<String> SETTINGS_TO_CHECK = Set.of(
"lifecycle",
"mode",
"routing",
"hidden",
"number_of_shards",
"creation_date",
"number_of_replicas"
);
for (String setting : SETTINGS_TO_CHECK) {
assertThat(
"Unexpected value for setting " + setting,
upgradedIndexSettings.get(setting),
equalTo(oldIndexSettings.get(setting))
);
}
}
private void compareMappings(Map<?, ?> oldMappings, Map<?, ?> upgradedMappings) {
boolean ignoreSource = Version.fromString(UPGRADE_FROM_VERSION).before(Version.V_9_0_0);
if (ignoreSource) {
Map<?, ?> doc = (Map<?, ?>) oldMappings.get("_doc");
if (doc != null) {
Map<?, ?> sourceEntry = (Map<?, ?>) doc.get("_source");
if (sourceEntry != null && sourceEntry.isEmpty()) {
doc.remove("_source");
}
assert doc.containsKey("_source") == false;
}
}
assertThat("Mappings did not match", upgradedMappings, equalTo(oldMappings));
}
@SuppressWarnings("unchecked")
private Map<String, Object> getIndexSettingsFromIndexMetadata(Map<String, Object> indexMetadata) {
return (Map<String, Object>) ((Map<String, Object>) indexMetadata.get("settings")).get("index");
}
private void createAndRolloverDataStream(String dataStreamName, int numRollovers, boolean hasILMPolicy, boolean ilmEnabled)
throws IOException {
if (hasILMPolicy) {
createIlmPolicy();
}
// We want to create a data stream and roll it over several times so that we have several indices to upgrade
String template = """
{
"settings":{
"index": {
$ILM_SETTING
"mode": "standard"
}
},
$DSL_TEMPLATE
"mappings":{
"dynamic_templates": [
{
"labels": {
"path_match": "pod.labels.*",
"mapping": {
"type": "keyword",
"time_series_dimension": true
}
}
}
],
"properties": {
"@timestamp" : {
"type": "date"
},
"metricset": {
"type": "keyword"
},
"k8s": {
"properties": {
"pod": {
"properties": {
"name": {
"type": "keyword"
},
"network": {
"properties": {
"tx": {
"type": "long"
},
"rx": {
"type": "long"
}
}
}
}
}
}
}
}
}
}
""";
if (hasILMPolicy) {
template = template.replace("$ILM_SETTING", """
"lifecycle.name": "test-lifecycle-policy",
""");
template = template.replace("$DSL_TEMPLATE", "");
} else {
template = template.replace("$ILM_SETTING", "");
template = template.replace("$DSL_TEMPLATE", """
"lifecycle": {
"data_retention": "7d"
},
""");
}
final String indexTemplate = """
{
"index_patterns": ["$PATTERN"],
"template": $TEMPLATE,
"data_stream": {
}
}""";
var putIndexTemplateRequest = new Request(
"POST",
"/_index_template/reindex_test_data_stream_template" + randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT)
);
putIndexTemplateRequest.setJsonEntity(indexTemplate.replace("$TEMPLATE", template).replace("$PATTERN", dataStreamName));
assertOK(client().performRequest(putIndexTemplateRequest));
bulkLoadData(dataStreamName);
for (int i = 0; i < numRollovers; i++) {
String oldIndexName = rollover(dataStreamName);
if (ilmEnabled == false && randomBoolean()) {
closeIndex(oldIndexName);
}
bulkLoadData(dataStreamName);
}
}
private static void createIlmPolicy() throws IOException {
String ilmPolicy = """
{
"policy": {
"phases": {
"warm": {
"min_age": "1s",
"actions": {
"forcemerge": {
"max_num_segments": 1
}
}
},
"cold": {
"actions": {
"set_priority" : {
"priority": 50
}
}
}
}
}
}""";
Request putIlmPolicyRequest = new Request("PUT", "_ilm/policy/test-lifecycle-policy");
putIlmPolicyRequest.setJsonEntity(ilmPolicy);
assertOK(client().performRequest(putIlmPolicyRequest));
}
/*
* This returns a Map of index metadata for each index in the data stream, as retrieved from the cluster state.
*/
@SuppressWarnings("unchecked")
private Map<String, Map<String, Object>> getIndicesMetadata(String dataStreamName) throws IOException {
Request getClusterStateRequest = new Request("GET", "/_cluster/state/metadata/" + dataStreamName);
Response clusterStateResponse = client().performRequest(getClusterStateRequest);
Map<String, Object> clusterState = XContentHelper.convertToMap(
JsonXContent.jsonXContent,
clusterStateResponse.getEntity().getContent(),
false
);
return ((Map<String, Map<String, Map<String, Object>>>) clusterState.get("metadata")).get("indices");
}
private void createDataStreamFromNonDataStreamIndices(String dataStreamFromNonDataStreamIndices) throws IOException {
/*
* This method creates an index, creates an alias to that index, and then converts the aliased index into a data stream. This is
* similar to the path that many indices (including system indices) took in versions 7/8.
*/
// First, we create an ordinary index with no @timestamp mapping:
final String templateWithNoTimestamp = """
{
"mappings":{
"properties": {
"message": {
"type": "text"
}
}
}
}
""";
// Note that this is not a data stream template:
final String indexTemplate = """
{
"index_patterns": ["$PATTERN"],
"template": $TEMPLATE
}""";
var putIndexTemplateRequest = new Request("POST", "/_index_template/reindex_test_data_stream_index_template");
putIndexTemplateRequest.setJsonEntity(
indexTemplate.replace("$TEMPLATE", templateWithNoTimestamp).replace("$PATTERN", dataStreamFromNonDataStreamIndices + "-*")
);
String indexName = dataStreamFromNonDataStreamIndices + "-01";
if (minimumTransportVersion().before(TransportVersions.V_8_0_0)) {
/*
* It is not possible to create a 7.x index template with a type. And you can't create an empty index with a type. But you can
* create the index with a type by posting a document to an index with a type. We do that here so that we test that the type is
* removed when we reindex into 8.x.
*/
String typeName = "test-type";
Request createIndexRequest = new Request("POST", indexName + "/" + typeName);
createIndexRequest.setJsonEntity("""
{
"@timestamp": "2099-11-15T13:12:00",
"message": "GET /search HTTP/1.1 200 1070000",
"user": {
"id": "kimchy"
}
}""");
createIndexRequest.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE).build());
assertOK(client().performRequest(createIndexRequest));
}
assertOK(client().performRequest(putIndexTemplateRequest));
bulkLoadDataMissingTimestamp(indexName);
/*
* Next, we will change the index's mapping to include a @timestamp field since we are going to convert it to a data stream. But
* first we have to flush the translog to disk because adding a @timestamp field will cause errors if it is done before the translog
* is flushed:
*/
assertOK(client().performRequest(new Request("POST", indexName + "/_flush")));
ensureHealth(indexName, (request -> {
request.addParameter("wait_for_nodes", "3");
request.addParameter("wait_for_status", "green");
request.addParameter("timeout", "70s");
request.addParameter("level", "shards");
}));
// Updating the mapping to include @timestamp:
Request updateIndexMappingRequest = new Request("PUT", indexName + "/_mapping");
updateIndexMappingRequest.setJsonEntity("""
{
"properties": {
"@timestamp" : {
"type": "date"
},
"message": {
"type": "text"
}
}
}""");
assertOK(client().performRequest(updateIndexMappingRequest));
// Creating an alias with the same name that the data stream will have:
Request createAliasRequest = new Request("POST", "/_aliases");
String aliasRequestBody = """
{
"actions": [
{
"add": {
"index": "$index",
"alias": "$alias"
}
}
]
}""";
createAliasRequest.setJsonEntity(
aliasRequestBody.replace("$index", indexName).replace("$alias", dataStreamFromNonDataStreamIndices)
);
assertOK(client().performRequest(createAliasRequest));
// This is now just an aliased index. We'll convert it into a data stream
final String templateWithTimestamp = """
{
"mappings":{
"properties": {
"@timestamp" : {
"type": "date"
},
"message": {
"type": "text"
}
}
}
}
""";
final String dataStreamTemplate = """
{
"index_patterns": ["$PATTERN"],
"template": $TEMPLATE,
"data_stream": {
}
}""";
var putDataStreamTemplateRequest = new Request("POST", "/_index_template/reindex_test_data_stream_data_stream_template");
putDataStreamTemplateRequest.setJsonEntity(
dataStreamTemplate.replace("$TEMPLATE", templateWithTimestamp).replace("$PATTERN", dataStreamFromNonDataStreamIndices)
);
assertOK(client().performRequest(putDataStreamTemplateRequest));
Request migrateToDataStreamRequest = new Request("POST", "/_data_stream/_migrate/" + dataStreamFromNonDataStreamIndices);
assertOK(client().performRequest(migrateToDataStreamRequest));
}
@SuppressWarnings("unchecked")
private void upgradeDataStream(
String dataStreamName,
int numRolloversOnOldCluster,
int expectedSuccessesCount,
int expectedErrorCount,
boolean ilmEnabled
) throws Exception {
List<String> indicesNeedingUpgrade = getDataStreamBackingIndexNames(dataStreamName);
final int explicitRolloverOnNewClusterCount = randomIntBetween(0, 2);
for (int i = 0; i < explicitRolloverOnNewClusterCount; i++) {
String oldIndexName = rollover(dataStreamName);
if (ilmEnabled == false && randomBoolean()) {
closeIndex(oldIndexName);
}
}
Request reindexRequest = new Request("POST", "/_migration/reindex");
reindexRequest.setJsonEntity(Strings.format("""
{
"mode": "upgrade",
"source": {
"index": "%s"
}
}""", dataStreamName));
String upgradeUser = "upgrade_user";
String upgradeUserPassword = "x-pack-test-password";
createRole("upgrade_role", dataStreamName);
createUser(upgradeUser, upgradeUserPassword, "upgrade_role");
try (RestClient upgradeUserClient = getClient(upgradeUser, upgradeUserPassword)) {
Response reindexResponse = upgradeUserClient.performRequest(reindexRequest);
assertOK(reindexResponse);
assertBusy(() -> {
Request statusRequest = new Request("GET", "_migration/reindex/" + dataStreamName + "/_status");
Response statusResponse = upgradeUserClient.performRequest(statusRequest);
Map<String, Object> statusResponseMap = XContentHelper.convertToMap(
JsonXContent.jsonXContent,
statusResponse.getEntity().getContent(),
false
);
String statusResponseString = statusResponseMap.keySet()
.stream()
.map(key -> key + "=" + statusResponseMap.get(key))
.collect(Collectors.joining(", ", "{", "}"));
assertOK(statusResponse);
assertThat(statusResponseString, statusResponseMap.get("complete"), equalTo(true));
final int originalWriteIndex = 1;
if (isOriginalClusterSameMajorVersionAsCurrent() || CLUSTER_TYPE == ClusterType.OLD) {
assertThat(
statusResponseString,
statusResponseMap.get("total_indices_in_data_stream"),
equalTo(originalWriteIndex + numRolloversOnOldCluster + explicitRolloverOnNewClusterCount)
);
// If the original cluster was the same as this one, we don't want any indices reindexed:
assertThat(statusResponseString, statusResponseMap.get("total_indices_requiring_upgrade"), equalTo(0));
assertThat(statusResponseString, statusResponseMap.get("successes"), equalTo(0));
} else {
// The number of rollovers that will have happened when we call reindex:
final int rolloversPerformedByReindex = explicitRolloverOnNewClusterCount == 0 ? 1 : 0;
final int expectedTotalIndicesInDataStream = originalWriteIndex + numRolloversOnOldCluster
+ explicitRolloverOnNewClusterCount + rolloversPerformedByReindex;
assertThat(
statusResponseString,
statusResponseMap.get("total_indices_in_data_stream"),
equalTo(expectedTotalIndicesInDataStream)
);
/*
* total_indices_requiring_upgrade is made up of: (the original write index) + numRolloversOnOldCluster. The number of
* rollovers on the upgraded cluster is irrelevant since those will not be reindexed.
*/
assertThat(
statusResponseString,
statusResponseMap.get("total_indices_requiring_upgrade"),
equalTo(originalWriteIndex + numRolloversOnOldCluster)
);
assertThat(statusResponseString, statusResponseMap.get("successes"), equalTo(expectedSuccessesCount));
// We expect all the original indices to have been deleted
if (expectedErrorCount == 0) {
for (String oldIndex : indicesNeedingUpgrade) {
assertThat(statusResponseString, indexExists(oldIndex), equalTo(false));
}
}
assertThat(
statusResponseString,
getDataStreamBackingIndexNames(dataStreamName).size(),
equalTo(expectedTotalIndicesInDataStream)
);
assertThat(statusResponseString, ((List<Object>) statusResponseMap.get("errors")).size(), equalTo(expectedErrorCount));
}
}, 60, TimeUnit.SECONDS);
// Verify it's possible to reindex again after a successful reindex
reindexResponse = upgradeUserClient.performRequest(reindexRequest);
assertOK(reindexResponse);
}
}
private void makeSureNoUpgrade(String dataStreamName) throws Exception {
String upgradeUser = "upgrade_user";
String upgradeUserPassword = "x-pack-test-password";
createRole("upgrade_role", dataStreamName);
createUser(upgradeUser, upgradeUserPassword, "upgrade_role");
try (RestClient upgradeUserClient = getClient(upgradeUser, upgradeUserPassword)) {
assertBusy(() -> {
try {
Request statusRequest = new Request("GET", "_migration/reindex/" + dataStreamName + "/_status");
Response statusResponse = upgradeUserClient.performRequest(statusRequest);
Map<String, Object> statusResponseMap = XContentHelper.convertToMap(
JsonXContent.jsonXContent,
statusResponse.getEntity().getContent(),
false
);
String statusResponseString = statusResponseMap.keySet()
.stream()
.map(key -> key + "=" + statusResponseMap.get(key))
.collect(Collectors.joining(", ", "{", "}"));
assertOK(statusResponse);
assertThat(statusResponseString, statusResponseMap.get("complete"), equalTo(true));
assertThat(statusResponseString, statusResponseMap.get("successes"), equalTo(0));
} catch (Exception e) {
fail(e);
}
}, 60, TimeUnit.SECONDS);
}
}
/*
* Similar to isOriginalClusterCurrent, but returns true if the major versions of the clusters are the same. So true
* for 8.6 and 8.17, but false for 7.17 and 8.18.
*/
private boolean isOriginalClusterSameMajorVersionAsCurrent() {
/*
* Since data stream reindex is specifically about upgrading a data stream from one major version to the next, it's ok to use the
* deprecated Version.fromString here
*/
return Version.fromString(UPGRADE_FROM_VERSION).major == Version.fromString(Build.current().version()).major;
}
private static void bulkLoadData(String dataStreamName) throws IOException {
final String bulk = """
{"create": {}}
{"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "cat", "network": {"tx": 2001818691, "rx": 802133794}}}}
{"create": {}}
{"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "hamster", "network": {"tx": 2005177954, "rx": 801479970}}}}
{"create": {}}
{"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "cow", "network": {"tx": 2006223737, "rx": 802337279}}}}
{"create": {}}
{"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "rat", "network": {"tx": 2012916202, "rx": 803685721}}}}
{"create": {}}
{"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "dog", "network": {"tx": 1434521831, "rx": 530575198}}}}
{"create": {}}
{"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "tiger", "network": {"tx": 1434577921, "rx": 530600088}}}}
{"create": {}}
{"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "lion", "network": {"tx": 1434587694, "rx": 530604797}}}}
{"create": {}}
{"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "elephant", "network": {"tx": 1434595272, "rx": 530605511}}}}
""";
var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk");
bulkRequest.setJsonEntity(bulk.replace("$now", formatInstant(Instant.now())));
var response = client().performRequest(bulkRequest);
assertOK(response);
}
/*
* This bulkloads data, where some documents have no @timestamp field and some do.
*/
private static void bulkLoadDataMissingTimestamp(String dataStreamName) throws IOException {
final String bulk = """
{"create": {}}
{"metricset": "pod", "k8s": {"pod": {"name": "cat", "network": {"tx": 2001818691, "rx": 802133794}}}}
{"create": {}}
{"metricset": "pod", "k8s": {"pod": {"name": "hamster", "network": {"tx": 2005177954, "rx": 801479970}}}}
{"create": {}}
{"metricset": "pod", "k8s": {"pod": {"name": "cow", "network": {"tx": 2006223737, "rx": 802337279}}}}
{"create": {}}
{"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "rat", "network": {"tx": 2012916202, "rx": 803685721}}}}
""";
var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk");
bulkRequest.setJsonEntity(bulk.replace("$now", formatInstant(Instant.now())));
var response = client().performRequest(bulkRequest);
assertOK(response);
}
static String formatInstant(Instant instant) {
return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant);
}
private static String rollover(String dataStreamName) throws IOException {
Request rolloverRequest = new Request("POST", "/" + dataStreamName + "/_rollover");
Response rolloverResponse = client().performRequest(rolloverRequest);
assertOK(rolloverResponse);
String oldIndexName = (String) entityAsMap(rolloverResponse).get("old_index");
return oldIndexName;
}
private void createUser(String name, String password, String role) throws IOException {
Request request = new Request("PUT", "/_security/user/" + name);
request.setJsonEntity("{ \"password\": \"" + password + "\", \"roles\": [ \"" + role + "\"] }");
assertOK(adminClient().performRequest(request));
}
private void createRole(String name, String dataStream) throws IOException {
Request request = new Request("PUT", "/_security/role/" + name);
request.setJsonEntity("{ \"indices\": [ { \"names\" : [ \"" + dataStream + "\"], \"privileges\": [ \"manage\" ] } ] }");
assertOK(adminClient().performRequest(request));
}
private void deleteDataStream(String name) throws IOException {
client().performRequest(new Request("DELETE", "_data_stream/" + name));
}
private RestClient getClient(String user, String passwd) throws IOException {
RestClientBuilder builder = RestClient.builder(adminClient().getNodes().toArray(new Node[0]));
String token = basicAuthHeaderValue(user, new SecureString(passwd.toCharArray()));
configureClient(builder, Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build());
builder.setStrictDeprecationMode(true);
return builder.build();
}
}
| DataStreamsUpgradeIT |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/builder/CompareToBuilder.java | {
"start": 3463,
"end": 3634
} | class ____).</p>
*
* @see Comparable
* @see Object#equals(Object)
* @see Object#hashCode()
* @see EqualsBuilder
* @see HashCodeBuilder
* @since 1.0
*/
public | hierarchy |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/functions/ProcessFunction.java | {
"start": 4452,
"end": 5335
} | class ____ {
/**
* Timestamp of the element currently being processed or timestamp of a firing timer.
*
* <p>This might be {@code null}, depending on the stream's watermark strategy.
*/
public abstract Long timestamp();
/** A {@link TimerService} for querying time and registering timers. */
public abstract TimerService timerService();
/**
* Emits a record to the side output identified by the {@link OutputTag}.
*
* @param outputTag the {@code OutputTag} that identifies the side output to emit to.
* @param value The record to emit.
*/
public abstract <X> void output(OutputTag<X> outputTag, X value);
}
/**
* Information available in an invocation of {@link #onTimer(long, OnTimerContext, Collector)}.
*/
public abstract | Context |
java | google__dagger | javatests/artifacts/dagger/build-tests/src/test/java/buildtests/TransitiveProvidesParameterizedTypeTest.java | {
"start": 5562,
"end": 6359
} | interface ____ {",
" @Provides",
" static String provideInt(TransitiveType<String> transitiveType) {",
" return \"\";",
" }",
"",
" @Provides",
" static TransitiveType<String> provideTransitiveType() {",
" return new TransitiveType<>();",
" }",
"}");
GradleModule.create(projectDir, "library2")
.addBuildFile(
"plugins {",
" id 'java'",
" id 'java-library'",
"}",
"dependencies {",
" implementation 'javax.inject:javax.inject:1'",
"}")
.addSrcFile(
"TransitiveType.java",
"package library2;",
"",
"public | MyModule |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedPartitioner.java | {
"start": 1026,
"end": 2013
} | class ____ {
/**
* Test is key-field-based partitioned works with empty key.
*/
@Test
public void testEmptyKey() throws Exception {
KeyFieldBasedPartitioner<Text, Text> kfbp =
new KeyFieldBasedPartitioner<Text, Text>();
JobConf conf = new JobConf();
conf.setInt("num.key.fields.for.partition", 10);
kfbp.configure(conf);
assertEquals(0, kfbp.getPartition(new Text(), new Text(), 10),
"Empty key should map to 0th partition");
}
@Test
public void testMultiConfigure() {
KeyFieldBasedPartitioner<Text, Text> kfbp =
new KeyFieldBasedPartitioner<Text, Text>();
JobConf conf = new JobConf();
conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k1,1");
kfbp.setConf(conf);
Text key = new Text("foo\tbar");
Text val = new Text("val");
int partNum = kfbp.getPartition(key, val, 4096);
kfbp.configure(conf);
assertEquals(partNum, kfbp.getPartition(key,val, 4096));
}
} | TestKeyFieldBasedPartitioner |
java | grpc__grpc-java | xds/src/main/java/io/grpc/xds/CdsLoadBalancerProvider.java | {
"start": 3298,
"end": 4040
} | class ____ {
/**
* Name of cluster to query CDS for.
*/
final String name;
/**
* Whether this cluster was dynamically chosen, so the XdsDependencyManager may be unaware of
* it without an explicit cluster subscription.
*/
final boolean isDynamic;
CdsConfig(String name) {
this(name, false);
}
CdsConfig(String name, boolean isDynamic) {
checkArgument(name != null && !name.isEmpty(), "name is null or empty");
this.name = name;
this.isDynamic = isDynamic;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("name", name)
.add("isDynamic", isDynamic)
.toString();
}
}
}
| CdsConfig |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/writer/AbstractBeanDefinitionBuilder.java | {
"start": 46481,
"end": 49074
} | class ____ extends InternalBeanElement<FieldElement> implements BeanFieldElement {
private final FieldElement fieldElement;
private final boolean requiresReflection;
private ClassElement genericType;
private InternalBeanElementField(FieldElement element, boolean requiresReflection) {
super(element, MutableAnnotationMetadata.of(element.getAnnotationMetadata()));
this.fieldElement = element;
this.requiresReflection = requiresReflection;
}
public boolean isRequiresReflection() {
return requiresReflection;
}
@Override
public BeanFieldElement inject() {
if (!AbstractBeanDefinitionBuilder.this.injectedFields.contains(this)) {
AbstractBeanDefinitionBuilder.this.injectedFields.add(this);
}
return BeanFieldElement.super.inject();
}
@Override
public BeanFieldElement injectValue(String expression) {
if (!AbstractBeanDefinitionBuilder.this.injectedFields.contains(this)) {
AbstractBeanDefinitionBuilder.this.injectedFields.add(this);
}
return BeanFieldElement.super.injectValue(expression);
}
@Override
public ClassElement getDeclaringType() {
return fieldElement.getDeclaringType();
}
@Override
public ClassElement getOwningType() {
return AbstractBeanDefinitionBuilder.this.beanType;
}
@NonNull
@Override
public ClassElement getType() {
return fieldElement.getType();
}
@Override
public ClassElement getGenericField() {
if (genericType != null) {
return genericType;
} else {
return fieldElement.getGenericField();
}
}
@NonNull
@Override
public BeanFieldElement typeArguments(@NonNull ClassElement... types) {
final ClassElement genericType = fieldElement.getGenericField();
final Map<String, ClassElement> typeArguments = genericType.getTypeArguments();
final Map<String, ClassElement> resolved = resolveTypeArguments(typeArguments, types);
if (resolved != null) {
this.genericType = genericType.withTypeArguments(resolved).withAnnotationMetadata(getAnnotationMetadata());
}
return this;
}
}
/**
* Models a {@link BeanParameterElement}.
*/
private final | InternalBeanElementField |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ShowCreateModelOperation.java | {
"start": 1305,
"end": 2248
} | class ____ implements ShowOperation {
private final ObjectIdentifier modelIdentifier;
private final ResolvedCatalogModel model;
private final boolean isTemporary;
public ShowCreateModelOperation(
ObjectIdentifier sqlIdentifier, ResolvedCatalogModel model, boolean isTemporary) {
this.modelIdentifier = sqlIdentifier;
this.model = model;
this.isTemporary = isTemporary;
}
public ResolvedCatalogModel getModel() {
return model;
}
@Override
public String asSummaryString() {
return String.format("SHOW CREATE MODEL %s", modelIdentifier.asSummaryString());
}
@Override
public TableResultInternal execute(Context ctx) {
String resultRow =
ShowCreateUtil.buildShowCreateModelRow(model, modelIdentifier, isTemporary);
return buildStringArrayResult("result", new String[] {resultRow});
}
}
| ShowCreateModelOperation |
java | apache__camel | catalog/camel-report-maven-plugin/src/test/java/org/apache/camel/maven/htmlxlsx/model/RouteStatisticTest.java | {
"start": 1065,
"end": 1441
} | class ____ extends GetterAndSetterTest<RouteStatistic> {
@Override
protected RouteStatistic getInstance() {
return TestUtil.routeStatistic();
}
@Test
public void testToString() {
String toString = getInstance().toString();
assertNotNull(toString);
assertTrue(toString.contains("RouteStatistic"));
}
}
| RouteStatisticTest |
java | apache__flink | flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/history/HistoryServerTest.java | {
"start": 23353,
"end": 23711
} | class ____ implements AutoCloseable {
private final JsonGenerator gen;
JsonArray(JsonGenerator gen, String name) throws IOException {
this.gen = gen;
gen.writeArrayFieldStart(name);
}
@Override
public void close() throws IOException {
gen.writeEndArray();
}
}
}
| JsonArray |
java | apache__dubbo | dubbo-metadata/dubbo-metadata-api/src/test/java/org/apache/dubbo/metadata/AbstractServiceNameMappingTest.java | {
"start": 1408,
"end": 3611
} | class ____ {
private MockServiceNameMapping mapping = new MockServiceNameMapping(ApplicationModel.defaultModel());
private MockServiceNameMapping2 mapping2 = new MockServiceNameMapping2(ApplicationModel.defaultModel());
URL url = URL.valueOf("dubbo://127.0.0.1:21880/" + AbstractServiceNameMappingTest.class.getName());
@BeforeEach
public void setUp() throws Exception {}
@AfterEach
public void clearup() {
mapping.removeCachedMapping(ServiceNameMapping.buildMappingKey(url));
}
@Test
void testGetServices() {
url = url.addParameter(PROVIDED_BY, "app1,app2");
Set<String> services = mapping.getMapping(url);
Assertions.assertTrue(services.contains("app1"));
Assertions.assertTrue(services.contains("app2"));
// // remove mapping cache, check get() works.
// mapping.removeCachedMapping(ServiceNameMapping.buildMappingKey(url));
// services = mapping.initInterfaceAppMapping(url);
// Assertions.assertTrue(services.contains("remote-app1"));
// Assertions.assertTrue(services.contains("remote-app2"));
// Assertions.assertNotNull(mapping.getCachedMapping(url));
// Assertions.assertIterableEquals(mapping.getCachedMapping(url), services);
}
@Test
void testGetAndListener() {
URL registryURL = URL.valueOf("registry://127.0.0.1:7777/test");
registryURL = registryURL.addParameter(SUBSCRIBED_SERVICE_NAMES_KEY, "registry-app1");
Set<String> services = mapping2.getAndListen(registryURL, url, null);
Assertions.assertTrue(services.contains("registry-app1"));
// remove mapping cache, check get() works.
mapping2.removeCachedMapping(ServiceNameMapping.buildMappingKey(url));
mapping2.enabled = true;
services = mapping2.getAndListen(registryURL, url, new MappingListener() {
@Override
public void onEvent(MappingChangedEvent event) {}
@Override
public void stop() {}
});
Assertions.assertTrue(services.contains("remote-app3"));
}
private | AbstractServiceNameMappingTest |
java | apache__camel | components/camel-jetty/src/test/java/org/apache/camel/component/jetty/async/JettyAsyncTest.java | {
"start": 1111,
"end": 1871
} | class ____ extends BaseJettyTest {
@Test
public void testJettyAsync() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
String reply = template.requestBody("http://localhost:{{port}}/myservice", null, String.class);
assertEquals("Bye World", reply);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
context.addComponent("async", new MyAsyncComponent());
from("jetty:http://localhost:{{port}}/myservice").to("async:bye:world").to("mock:result");
}
};
}
}
| JettyAsyncTest |
java | apache__camel | components/camel-dynamic-router/src/main/java/org/apache/camel/component/dynamicrouter/filter/PrioritizedFilter.java | {
"start": 2600,
"end": 3407
} | class ____ {
/**
* Create this processor with all properties.
*
* @param id the identifier
* @param priority the priority of this processor
* @param predicate the rule expression
* @param endpoint the destination endpoint for matching exchanges
* @param statistics the object that holds routing statistics for this filter
*/
public PrioritizedFilter getInstance(
final String id,
final int priority,
final Predicate predicate,
final String endpoint,
final PrioritizedFilterStatistics statistics) {
return new PrioritizedFilter(id, priority, predicate, endpoint, statistics);
}
}
}
| PrioritizedFilterFactory |
java | spring-projects__spring-boot | module/spring-boot-data-redis/src/dockerTest/java/org/springframework/boot/data/redis/testcontainers/RedisStackContainerConnectionDetailsFactoryTests.java | {
"start": 1858,
"end": 2595
} | class ____ {
@Container
@ServiceConnection
static final RedisStackContainer redis = TestImage.container(RedisStackContainer.class);
@Autowired(required = false)
private DataRedisConnectionDetails connectionDetails;
@Autowired
private RedisConnectionFactory connectionFactory;
@Test
void connectionCanBeMadeToRedisContainer() {
assertThat(this.connectionDetails).isNotNull();
try (RedisConnection connection = this.connectionFactory.getConnection()) {
assertThat(connection.commands().echo("Hello, World".getBytes())).isEqualTo("Hello, World".getBytes());
}
}
@Configuration(proxyBeanMethods = false)
@ImportAutoConfiguration(DataRedisAutoConfiguration.class)
static | RedisStackContainerConnectionDetailsFactoryTests |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/scan/ScanBean.java | {
"start": 697,
"end": 870
} | class ____ {
private final String value;
public ScanBean(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
}
| ScanBean |
java | google__dagger | dagger-runtime/main/java/dagger/Component.java | {
"start": 13668,
"end": 13769
} | interface ____ {
* MyWidget myWidget();
*
* {@literal @}Component.Builder
* | MyComponent |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java | {
"start": 22370,
"end": 23126
} | class ____ {
BlockIteratorState() {
lastSavedMs = iterStartMs = Time.now();
curFinalizedDir = null;
curFinalizedSubDir = null;
curEntry = null;
atEnd = false;
}
// The wall-clock ms since the epoch at which this iterator was last saved.
@JsonProperty
private long lastSavedMs;
// The wall-clock ms since the epoch at which this iterator was created.
@JsonProperty
private long iterStartMs;
@JsonProperty
private String curFinalizedDir;
@JsonProperty
private String curFinalizedSubDir;
@JsonProperty
private String curEntry;
@JsonProperty
private boolean atEnd;
}
/**
* A BlockIterator implementation for FsVolumeImpl.
*/
private | BlockIteratorState |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/identifier/CompositeIdGenerationTypeTest.java | {
"start": 5936,
"end": 6138
} | class ____ {
@GeneratedValue
private Long id;
@UuidGenerator
private String uuid;
public Long getId() {
return id;
}
public String getUuid() {
return uuid;
}
}
}
| MultipleEmbeddedPK |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/TestNMProtoUtils.java | {
"start": 1826,
"end": 4947
} | class ____ {
@Test
public void testConvertProtoToDeletionTask() throws Exception {
DeletionService deletionService = mock(DeletionService.class);
DeletionServiceDeleteTaskProto.Builder protoBuilder =
DeletionServiceDeleteTaskProto.newBuilder();
int id = 0;
protoBuilder.setId(id);
DeletionServiceDeleteTaskProto proto = protoBuilder.build();
DeletionTask deletionTask =
NMProtoUtils.convertProtoToDeletionTask(proto, deletionService);
assertEquals(DeletionTaskType.FILE, deletionTask.getDeletionTaskType());
assertEquals(id, deletionTask.getTaskId());
}
@Test
public void testConvertProtoToFileDeletionTask() throws Exception {
DeletionService deletionService = mock(DeletionService.class);
int id = 0;
String user = "user";
Path subdir = new Path("subdir");
Path basedir = new Path("basedir");
DeletionServiceDeleteTaskProto.Builder protoBuilder =
DeletionServiceDeleteTaskProto.newBuilder();
protoBuilder
.setId(id)
.setUser("user")
.setSubdir(subdir.getName())
.addBasedirs(basedir.getName());
DeletionServiceDeleteTaskProto proto = protoBuilder.build();
DeletionTask deletionTask =
NMProtoUtils.convertProtoToFileDeletionTask(proto, deletionService, id);
assertEquals(DeletionTaskType.FILE.name(),
deletionTask.getDeletionTaskType().name());
assertEquals(id, deletionTask.getTaskId());
assertEquals(subdir, ((FileDeletionTask) deletionTask).getSubDir());
assertEquals(basedir,
((FileDeletionTask) deletionTask).getBaseDirs().get(0));
}
@Test
public void testConvertProtoToDockerContainerDeletionTask() throws Exception {
DeletionService deletionService = mock(DeletionService.class);
int id = 0;
String user = "user";
String dockerContainerId = "container_e123_12321231_00001";
DeletionServiceDeleteTaskProto.Builder protoBuilder =
DeletionServiceDeleteTaskProto.newBuilder();
protoBuilder
.setId(id)
.setUser(user)
.setDockerContainerId(dockerContainerId);
DeletionServiceDeleteTaskProto proto = protoBuilder.build();
DeletionTask deletionTask =
NMProtoUtils.convertProtoToDockerContainerDeletionTask(proto,
deletionService, id);
assertEquals(DeletionTaskType.DOCKER_CONTAINER.name(),
deletionTask.getDeletionTaskType().name());
assertEquals(id, deletionTask.getTaskId());
assertEquals(dockerContainerId,
((DockerContainerDeletionTask) deletionTask).getContainerId());
}
@Test
public void testConvertProtoToDeletionTaskRecoveryInfo() throws Exception {
long delTime = System.currentTimeMillis();
List<Integer> successorTaskIds = Arrays.asList(1);
DeletionTask deletionTask = mock(DeletionTask.class);
DeletionTaskRecoveryInfo info =
new DeletionTaskRecoveryInfo(deletionTask, successorTaskIds, delTime);
assertEquals(deletionTask, info.getTask());
assertEquals(successorTaskIds, info.getSuccessorTaskIds());
assertEquals(delTime, info.getDeletionTimestamp());
}
} | TestNMProtoUtils |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.