language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/convert/ConvertingDeserializerTest.java | {
"start": 1604,
"end": 1860
} | class ____ {
@JsonDeserialize(converter=PointConverter.class)
public Point value;
protected PointWrapper() { }
protected PointWrapper(int x, int y) {
value = new Point(x, y);
}
}
static | PointWrapper |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/request/ElasticInferenceServiceDenseTextEmbeddingsRequestEntityTests.java | {
"start": 813,
"end": 6385
} | class ____ extends ESTestCase {
public void testToXContent_SingleInput_UnspecifiedUsageContext() throws IOException {
var entity = new ElasticInferenceServiceDenseTextEmbeddingsRequestEntity(
List.of("abc"),
"my-model-id",
ElasticInferenceServiceUsageContext.UNSPECIFIED,
null
);
String xContentString = xContentEntityToString(entity);
assertThat(xContentString, equalToIgnoringWhitespaceInJsonString("""
{
"input": ["abc"],
"model": "my-model-id"
}"""));
}
public void testToXContent_MultipleInputs_UnspecifiedUsageContext() throws IOException {
var entity = new ElasticInferenceServiceDenseTextEmbeddingsRequestEntity(
List.of("abc", "def"),
"my-model-id",
ElasticInferenceServiceUsageContext.UNSPECIFIED,
null
);
String xContentString = xContentEntityToString(entity);
assertThat(xContentString, equalToIgnoringWhitespaceInJsonString("""
{
"input": [
"abc",
"def"
],
"model": "my-model-id"
}
"""));
}
public void testToXContent_SingleInput_SearchUsageContext() throws IOException {
var entity = new ElasticInferenceServiceDenseTextEmbeddingsRequestEntity(
List.of("abc"),
"my-model-id",
ElasticInferenceServiceUsageContext.SEARCH,
null
);
String xContentString = xContentEntityToString(entity);
assertThat(xContentString, equalToIgnoringWhitespaceInJsonString("""
{
"input": ["abc"],
"model": "my-model-id",
"usage_context": "search"
}
"""));
}
public void testToXContent_SingleInput_IngestUsageContext() throws IOException {
var entity = new ElasticInferenceServiceDenseTextEmbeddingsRequestEntity(
List.of("abc"),
"my-model-id",
ElasticInferenceServiceUsageContext.INGEST,
null
);
String xContentString = xContentEntityToString(entity);
assertThat(xContentString, equalToIgnoringWhitespaceInJsonString("""
{
"input": ["abc"],
"model": "my-model-id",
"usage_context": "ingest"
}
"""));
}
public void testToXContent_SingleInput_DimensionsSpecified() throws IOException {
var entity = new ElasticInferenceServiceDenseTextEmbeddingsRequestEntity(
List.of("abc"),
"my-model-id",
ElasticInferenceServiceUsageContext.UNSPECIFIED,
100
);
String xContentString = xContentEntityToString(entity);
assertThat(xContentString, equalToIgnoringWhitespaceInJsonString("""
{
"input": ["abc"],
"model": "my-model-id",
"dimensions": 100
}
"""));
}
public void testToXContent_MultipleInputs_SearchUsageContext() throws IOException {
var entity = new ElasticInferenceServiceDenseTextEmbeddingsRequestEntity(
List.of("first input", "second input", "third input"),
"my-dense-model",
ElasticInferenceServiceUsageContext.SEARCH,
null
);
String xContentString = xContentEntityToString(entity);
assertThat(xContentString, equalToIgnoringWhitespaceInJsonString("""
{
"input": [
"first input",
"second input",
"third input"
],
"model": "my-dense-model",
"usage_context": "search"
}
"""));
}
public void testToXContent_MultipleInputs_IngestUsageContext() throws IOException {
var entity = new ElasticInferenceServiceDenseTextEmbeddingsRequestEntity(
List.of("document one", "document two"),
"embedding-model-v2",
ElasticInferenceServiceUsageContext.INGEST,
null
);
String xContentString = xContentEntityToString(entity);
assertThat(xContentString, equalToIgnoringWhitespaceInJsonString("""
{
"input": [
"document one",
"document two"
],
"model": "embedding-model-v2",
"usage_context": "ingest"
}
"""));
}
public void testToXContent_EmptyInput_UnspecifiedUsageContext() throws IOException {
var entity = new ElasticInferenceServiceDenseTextEmbeddingsRequestEntity(
List.of(""),
"my-model-id",
ElasticInferenceServiceUsageContext.UNSPECIFIED,
null
);
String xContentString = xContentEntityToString(entity);
assertThat(xContentString, equalToIgnoringWhitespaceInJsonString("""
{
"input": [""],
"model": "my-model-id"
}
"""));
}
private String xContentEntityToString(ElasticInferenceServiceDenseTextEmbeddingsRequestEntity entity) throws IOException {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
return Strings.toString(builder);
}
}
| ElasticInferenceServiceDenseTextEmbeddingsRequestEntityTests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java | {
"start": 1487,
"end": 5126
} | class ____ implements RpcMultiplexer {
// Config keys
public static final String IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY =
"faircallqueue.multiplexer.weights";
public static final Logger LOG =
LoggerFactory.getLogger(WeightedRoundRobinMultiplexer.class);
private final int numQueues; // The number of queues under our provisioning
private final AtomicInteger currentQueueIndex; // Current queue we're serving
private final AtomicInteger requestsLeft; // Number of requests left for this queue
private int[] queueWeights; // The weights for each queue
public WeightedRoundRobinMultiplexer(int aNumQueues, String ns,
Configuration conf) {
if (aNumQueues <= 0) {
throw new IllegalArgumentException("Requested queues (" + aNumQueues +
") must be greater than zero.");
}
this.numQueues = aNumQueues;
this.queueWeights = conf.getInts(ns + "." +
IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY);
if (this.queueWeights.length == 0) {
this.queueWeights = getDefaultQueueWeights(this.numQueues);
} else if (this.queueWeights.length != this.numQueues) {
throw new IllegalArgumentException(ns + "." +
IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY + " must specify exactly " +
this.numQueues + " weights: one for each priority level.");
}
this.currentQueueIndex = new AtomicInteger(0);
this.requestsLeft = new AtomicInteger(this.queueWeights[0]);
LOG.info("WeightedRoundRobinMultiplexer is being used.");
}
/**
* Creates default weights for each queue. The weights are 2^N.
*/
private int[] getDefaultQueueWeights(int aNumQueues) {
int[] weights = new int[aNumQueues];
int weight = 1; // Start low
for(int i = aNumQueues - 1; i >= 0; i--) { // Start at lowest queue
weights[i] = weight;
weight *= 2; // Double every iteration
}
return weights;
}
/**
* Move to the next queue.
*/
private void moveToNextQueue() {
int thisIdx = this.currentQueueIndex.get();
// Wrap to fit in our bounds
int nextIdx = (thisIdx + 1) % this.numQueues;
// Set to next index: once this is called, requests will start being
// drawn from nextIdx, but requestsLeft will continue to decrement into
// the negatives
this.currentQueueIndex.set(nextIdx);
// Finally, reset requestsLeft. This will enable moveToNextQueue to be
// called again, for the new currentQueueIndex
this.requestsLeft.set(this.queueWeights[nextIdx]);
LOG.debug("Moving to next queue from queue index {} to index {}, " +
"number of requests left for current queue: {}.",
thisIdx, nextIdx, requestsLeft);
}
/**
* Advances the index, which will change the current index
* if called enough times.
*/
private void advanceIndex() {
// Since we did read, we should decrement
int requestsLeftVal = this.requestsLeft.decrementAndGet();
// Strict compare with zero (instead of inequality) so that if another
// thread decrements requestsLeft, only one thread will be responsible
// for advancing currentQueueIndex
if (requestsLeftVal == 0) {
// This is guaranteed to be called exactly once per currentQueueIndex
this.moveToNextQueue();
}
}
/**
* Gets the current index. Should be accompanied by a call to
* advanceIndex at some point.
*/
private int getCurrentIndex() {
return this.currentQueueIndex.get();
}
/**
* Use the mux by getting and advancing index.
*/
public int getAndAdvanceCurrentIndex() {
int idx = this.getCurrentIndex();
this.advanceIndex();
return idx;
}
}
| WeightedRoundRobinMultiplexer |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/Table.java | {
"start": 804,
"end": 6695
} | class ____ {
private List<Cell> headers = new ArrayList<>();
private List<List<Cell>> rows = new ArrayList<>();
private Map<String, List<Cell>> map = new HashMap<>();
private Map<String, Cell> headerMap = new HashMap<>();
private List<Cell> currentCells;
private boolean inHeaders = false;
private boolean withTime = false;
public static final String EPOCH = "epoch";
public static final String TIMESTAMP = "timestamp";
public Table startHeaders() {
inHeaders = true;
currentCells = new ArrayList<>();
return this;
}
public Table startHeadersWithTimestamp() {
startHeaders();
this.withTime = true;
addCell("epoch", "alias:t,time;desc:seconds since 1970-01-01 00:00:00");
addCell("timestamp", "alias:ts,hms,hhmmss;desc:time in HH:MM:SS");
return this;
}
public Table endHeaders() {
if (currentCells == null || currentCells.isEmpty()) {
throw new IllegalStateException("no headers added...");
}
inHeaders = false;
headers = currentCells;
currentCells = null;
/* Create associative structure for columns that
* contain the same cells as the rows:
*
* header1 => [Cell, Cell, ...]
* header2 => [Cell, Cell, ...]
* header3 => [Cell, Cell, ...]
*
* Also populate map to look up headers by name.
*
*/
for (Cell header : headers) {
map.put(header.value.toString(), new ArrayList<Cell>());
headerMap.put(header.value.toString(), header);
}
return this;
}
private static final DateFormatter FORMATTER = DateFormatter.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
public Table startRow() {
if (headers.isEmpty()) {
throw new IllegalStateException("no headers added...");
}
currentCells = new ArrayList<>(headers.size());
if (withTime) {
long time = System.currentTimeMillis();
addCell(TimeUnit.SECONDS.convert(time, TimeUnit.MILLISECONDS));
addCell(FORMATTER.format(Instant.ofEpochMilli(time)));
}
return this;
}
public Table endRow(boolean check) {
if (currentCells == null) {
throw new IllegalStateException("no row started...");
}
if (check && (currentCells.size() != headers.size())) {
StringBuilder s = new StringBuilder();
s.append("mismatch on number of cells ");
s.append(currentCells.size());
s.append(" in a row compared to header ");
s.append(headers.size());
throw new IllegalStateException(s.toString());
}
rows.add(currentCells);
currentCells = null;
return this;
}
public Table endRow() {
endRow(true);
return this;
}
public Table addCell(Object value) {
return addCell(value, "");
}
public Table addCell(Object value, String attributes) {
if (currentCells == null) {
throw new IllegalStateException("no block started...");
}
if (inHeaders == false) {
if (currentCells.size() == headers.size()) {
throw new IllegalStateException("can't add more cells to a row than the header");
}
}
Map<String, String> mAttr;
if (attributes.length() == 0) {
if (inHeaders) {
mAttr = emptyMap();
} else {
// get the attributes of the header cell we are going to add to
mAttr = headers.get(currentCells.size()).attr;
}
} else {
mAttr = new HashMap<>();
if (inHeaders == false) {
// get the attributes of the header cell we are going to add
mAttr.putAll(headers.get(currentCells.size()).attr);
}
String[] sAttrs = attributes.split(";");
for (String sAttr : sAttrs) {
if (sAttr.length() == 0) {
continue;
}
int idx = sAttr.indexOf(':');
mAttr.put(sAttr.substring(0, idx), sAttr.substring(idx + 1));
}
}
Cell cell = new Cell(value, mAttr);
int cellIndex = currentCells.size();
currentCells.add(cell);
// If we're in a value row, also populate the named column.
if (inHeaders == false) {
String hdr = (String) headers.get(cellIndex).value;
map.get(hdr).add(cell);
}
return this;
}
public List<Cell> getHeaders() {
return this.headers;
}
public List<List<Cell>> getRows() {
return rows;
}
public Map<String, List<Cell>> getAsMap() {
return this.map;
}
public Map<String, Cell> getHeaderMap() {
return this.headerMap;
}
public Cell findHeaderByName(String header) {
for (Cell cell : headers) {
if (cell.value.toString().equals(header)) {
return cell;
}
}
return null;
}
public Map<String, String> getAliasMap() {
Map<String, String> headerAliasMap = new HashMap<>();
for (int i = 0; i < headers.size(); i++) {
Cell headerCell = headers.get(i);
String headerName = headerCell.value.toString();
if (headerCell.attr.containsKey("alias")) {
String[] aliases = Strings.splitStringByCommaToArray(headerCell.attr.get("alias"));
for (String alias : aliases) {
headerAliasMap.put(alias, headerName);
}
}
headerAliasMap.put(headerName, headerName);
}
return headerAliasMap;
}
public static | Table |
java | quarkusio__quarkus | integration-tests/hibernate-orm-panache/src/test/java/io/quarkus/it/panache/defaultpu/MockPersonRepository.java | {
"start": 437,
"end": 493
} | class ____ extends PersonRepository {
}
| MockPersonRepository |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/common/WindowTableFunctionTestPrograms.java | {
"start": 1507,
"end": 23385
} | class ____ {
static final Row[] BEFORE_DATA = {
Row.of("2020-04-15 08:00:05", new BigDecimal(4.00), "C"),
Row.of("2020-04-15 08:00:07", new BigDecimal(2.00), "A"),
Row.of("2020-04-15 08:00:09", new BigDecimal(5.00), "D"),
Row.of("2020-04-15 08:00:11", new BigDecimal(3.00), "B"),
Row.of("2020-04-15 08:00:13", new BigDecimal(1.00), "E"),
Row.of("2020-04-15 08:00:17", new BigDecimal(6.00), "F")
};
static final Row[] AFTER_DATA = {
Row.of("2020-04-15 08:00:21", new BigDecimal(4.00), "A"),
Row.of("2020-04-15 08:00:27", new BigDecimal(5.00), "C")
};
static final SourceTestStep SOURCE =
SourceTestStep.newBuilder("bid_t")
.addSchema(
"ts STRING",
"price DECIMAL(10,2)",
"item STRING",
"`bid_time` AS TO_TIMESTAMP(`ts`)",
"`proc_time` AS PROCTIME()",
"WATERMARK for `bid_time` AS `bid_time` - INTERVAL '1' SECOND")
.producedBeforeRestore(BEFORE_DATA)
.producedAfterRestore(AFTER_DATA)
.build();
static final String TUMBLE_TVF =
"TABLE(TUMBLE(TABLE bid_t, DESCRIPTOR(%s), INTERVAL '10' SECOND))";
static final String TUMBLE_TVF_OFFSET =
"TABLE(TUMBLE(TABLE bid_t, DESCRIPTOR(%s), INTERVAL '10' SECOND, INTERVAL '%s' SECOND))";
static final String HOP_TVF =
"TABLE(HOP(TABLE bid_t, DESCRIPTOR(%s), INTERVAL '5' SECOND, INTERVAL '10' SECOND))";
static final String CUMULATE_TVF =
"TABLE(CUMULATE(TABLE bid_t, DESCRIPTOR(%s), INTERVAL '5' SECOND, INTERVAL '10' SECOND))";
static final String[] SINK_TVF_SCHEMA = {
"bid_time TIMESTAMP(3)",
"price DECIMAL(10,2)",
"item STRING",
"window_start TIMESTAMP(3)",
"window_end TIMESTAMP(3)",
"window_time TIMESTAMP_LTZ"
};
static final String[] SINK_TVF_AGG_SCHEMA = {
"window_start TIMESTAMP(3)", "window_end TIMESTAMP(3)", "price DECIMAL(10,2)"
};
static final String[] SINK_TVF_AGG_PROC_TIME_SCHEMA = {"price DECIMAL(10,2)"};
static final String QUERY_TVF =
"INSERT INTO sink_t SELECT\n "
+ " bid_time,\n"
+ " price,\n"
+ " item,\n"
+ " window_start,\n"
+ " window_end,\n"
+ " window_time\n"
+ " FROM\n"
+ " %s";
static final String QUERY_TVF_AGG =
"INSERT INTO sink_t SELECT\n"
+ " window_start,\n"
+ " window_end,\n"
+ " SUM(price)\n"
+ " FROM\n"
+ " %s\n"
+ " GROUP BY window_start, window_end";
static final String QUERY_TVF_AGG_PROC_TIME =
"INSERT INTO sink_t SELECT\n"
+ " SUM(price)\n"
+ " FROM\n"
+ " %s\n"
+ " GROUP BY window_start, window_end";
public static final TableTestProgram WINDOW_TABLE_FUNCTION_TUMBLE_TVF =
TableTestProgram.of(
"window-table-function-tumble-tvf",
"validates window table function using tumble tvf windows")
.setupTableSource(SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_TVF_SCHEMA)
.consumedBeforeRestore(
"+I[2020-04-15T08:00:05, 4.00, C, 2020-04-15T08:00, 2020-04-15T08:00:10, 2020-04-15T08:00:09.999Z]",
"+I[2020-04-15T08:00:07, 2.00, A, 2020-04-15T08:00, 2020-04-15T08:00:10, 2020-04-15T08:00:09.999Z]",
"+I[2020-04-15T08:00:09, 5.00, D, 2020-04-15T08:00, 2020-04-15T08:00:10, 2020-04-15T08:00:09.999Z]",
"+I[2020-04-15T08:00:11, 3.00, B, 2020-04-15T08:00:10, 2020-04-15T08:00:20, 2020-04-15T08:00:19.999Z]",
"+I[2020-04-15T08:00:13, 1.00, E, 2020-04-15T08:00:10, 2020-04-15T08:00:20, 2020-04-15T08:00:19.999Z]",
"+I[2020-04-15T08:00:17, 6.00, F, 2020-04-15T08:00:10, 2020-04-15T08:00:20, 2020-04-15T08:00:19.999Z]")
.consumedAfterRestore(
"+I[2020-04-15T08:00:21, 4.00, A, 2020-04-15T08:00:20, 2020-04-15T08:00:30, 2020-04-15T08:00:29.999Z]",
"+I[2020-04-15T08:00:27, 5.00, C, 2020-04-15T08:00:20, 2020-04-15T08:00:30, 2020-04-15T08:00:29.999Z]")
.build())
.setupConfig(TableConfigOptions.LOCAL_TIME_ZONE, "UTC")
.runSql(String.format(QUERY_TVF, String.format(TUMBLE_TVF, "bid_time")))
.build();
public static final TableTestProgram WINDOW_TABLE_FUNCTION_TUMBLE_TVF_POSITIVE_OFFSET =
TableTestProgram.of(
"window-table-function-tumble-tvf-positive-offset",
"validates window table function using tumble tvf windows with positive offset")
.setupTableSource(SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_TVF_SCHEMA)
.consumedBeforeRestore(
"+I[2020-04-15T08:00:05, 4.00, C, 2020-04-15T07:59:56, 2020-04-15T08:00:06, 2020-04-15T08:00:05.999Z]",
"+I[2020-04-15T08:00:07, 2.00, A, 2020-04-15T08:00:06, 2020-04-15T08:00:16, 2020-04-15T08:00:15.999Z]",
"+I[2020-04-15T08:00:09, 5.00, D, 2020-04-15T08:00:06, 2020-04-15T08:00:16, 2020-04-15T08:00:15.999Z]",
"+I[2020-04-15T08:00:11, 3.00, B, 2020-04-15T08:00:06, 2020-04-15T08:00:16, 2020-04-15T08:00:15.999Z]",
"+I[2020-04-15T08:00:13, 1.00, E, 2020-04-15T08:00:06, 2020-04-15T08:00:16, 2020-04-15T08:00:15.999Z]",
"+I[2020-04-15T08:00:17, 6.00, F, 2020-04-15T08:00:16, 2020-04-15T08:00:26, 2020-04-15T08:00:25.999Z]")
.consumedAfterRestore(
"+I[2020-04-15T08:00:21, 4.00, A, 2020-04-15T08:00:16, 2020-04-15T08:00:26, 2020-04-15T08:00:25.999Z]",
"+I[2020-04-15T08:00:27, 5.00, C, 2020-04-15T08:00:26, 2020-04-15T08:00:36, 2020-04-15T08:00:35.999Z]")
.build())
.setupConfig(TableConfigOptions.LOCAL_TIME_ZONE, "UTC")
.runSql(
String.format(
QUERY_TVF, String.format(TUMBLE_TVF_OFFSET, "bid_time", "6")))
.build();
public static final TableTestProgram WINDOW_TABLE_FUNCTION_TUMBLE_TVF_NEGATIVE_OFFSET =
TableTestProgram.of(
"window-table-function-tumble-tvf-negative-offset",
"validates window table function using tumble tvf windows with negative offset")
.setupTableSource(SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_TVF_SCHEMA)
.consumedBeforeRestore(
"+I[2020-04-15T08:00:05, 4.00, C, 2020-04-15T08:00:04, 2020-04-15T08:00:14, 2020-04-15T08:00:13.999Z]",
"+I[2020-04-15T08:00:07, 2.00, A, 2020-04-15T08:00:04, 2020-04-15T08:00:14, 2020-04-15T08:00:13.999Z]",
"+I[2020-04-15T08:00:09, 5.00, D, 2020-04-15T08:00:04, 2020-04-15T08:00:14, 2020-04-15T08:00:13.999Z]",
"+I[2020-04-15T08:00:11, 3.00, B, 2020-04-15T08:00:04, 2020-04-15T08:00:14, 2020-04-15T08:00:13.999Z]",
"+I[2020-04-15T08:00:13, 1.00, E, 2020-04-15T08:00:04, 2020-04-15T08:00:14, 2020-04-15T08:00:13.999Z]",
"+I[2020-04-15T08:00:17, 6.00, F, 2020-04-15T08:00:14, 2020-04-15T08:00:24, 2020-04-15T08:00:23.999Z]")
.consumedAfterRestore(
"+I[2020-04-15T08:00:21, 4.00, A, 2020-04-15T08:00:14, 2020-04-15T08:00:24, 2020-04-15T08:00:23.999Z]",
"+I[2020-04-15T08:00:27, 5.00, C, 2020-04-15T08:00:24, 2020-04-15T08:00:34, 2020-04-15T08:00:33.999Z]")
.build())
.setupConfig(TableConfigOptions.LOCAL_TIME_ZONE, "UTC")
.runSql(
String.format(
QUERY_TVF, String.format(TUMBLE_TVF_OFFSET, "bid_time", "-6")))
.build();
public static final TableTestProgram WINDOW_TABLE_FUNCTION_TUMBLE_TVF_AGG =
TableTestProgram.of(
"window-table-function-tumble-tvf-agg",
"validates window table function using tumble tvf windows with aggregation")
.setupTableSource(SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_TVF_AGG_SCHEMA)
.consumedBeforeRestore(
"+I[2020-04-15T08:00, 2020-04-15T08:00:10, 11.00]")
.consumedAfterRestore(
"+I[2020-04-15T08:00:10, 2020-04-15T08:00:20, 10.00]",
"+I[2020-04-15T08:00:20, 2020-04-15T08:00:30, 9.00]")
.build())
.runSql(String.format(QUERY_TVF_AGG, String.format(TUMBLE_TVF, "bid_time")))
.build();
public static final TableTestProgram WINDOW_TABLE_FUNCTION_TUMBLE_TVF_AGG_PROC_TIME =
TableTestProgram.of(
"window-table-function-tumble-tvf-agg-proc-time",
"validates window table function using tumble tvf windows with aggregation and processing time")
.setupTableSource(SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_TVF_AGG_PROC_TIME_SCHEMA)
.consumedBeforeRestore("+I[21.00]")
.consumedAfterRestore("+I[9.00]")
.build())
.runSql(
String.format(
QUERY_TVF_AGG_PROC_TIME,
String.format(TUMBLE_TVF, "proc_time")))
.build();
public static final TableTestProgram WINDOW_TABLE_FUNCTION_HOP_TVF =
TableTestProgram.of(
"window-table-function-hop-tvf",
"validates window table function using hop tvf windows")
.setupTableSource(SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_TVF_SCHEMA)
.consumedBeforeRestore(
"+I[2020-04-15T08:00:05, 4.00, C, 2020-04-15T08:00:05, 2020-04-15T08:00:15, 2020-04-15T08:00:14.999Z]",
"+I[2020-04-15T08:00:05, 4.00, C, 2020-04-15T08:00, 2020-04-15T08:00:10, 2020-04-15T08:00:09.999Z]",
"+I[2020-04-15T08:00:07, 2.00, A, 2020-04-15T08:00:05, 2020-04-15T08:00:15, 2020-04-15T08:00:14.999Z]",
"+I[2020-04-15T08:00:07, 2.00, A, 2020-04-15T08:00, 2020-04-15T08:00:10, 2020-04-15T08:00:09.999Z]",
"+I[2020-04-15T08:00:09, 5.00, D, 2020-04-15T08:00:05, 2020-04-15T08:00:15, 2020-04-15T08:00:14.999Z]",
"+I[2020-04-15T08:00:09, 5.00, D, 2020-04-15T08:00, 2020-04-15T08:00:10, 2020-04-15T08:00:09.999Z]",
"+I[2020-04-15T08:00:11, 3.00, B, 2020-04-15T08:00:10, 2020-04-15T08:00:20, 2020-04-15T08:00:19.999Z]",
"+I[2020-04-15T08:00:11, 3.00, B, 2020-04-15T08:00:05, 2020-04-15T08:00:15, 2020-04-15T08:00:14.999Z]",
"+I[2020-04-15T08:00:13, 1.00, E, 2020-04-15T08:00:10, 2020-04-15T08:00:20, 2020-04-15T08:00:19.999Z]",
"+I[2020-04-15T08:00:13, 1.00, E, 2020-04-15T08:00:05, 2020-04-15T08:00:15, 2020-04-15T08:00:14.999Z]",
"+I[2020-04-15T08:00:17, 6.00, F, 2020-04-15T08:00:15, 2020-04-15T08:00:25, 2020-04-15T08:00:24.999Z]",
"+I[2020-04-15T08:00:17, 6.00, F, 2020-04-15T08:00:10, 2020-04-15T08:00:20, 2020-04-15T08:00:19.999Z]")
.consumedAfterRestore(
"+I[2020-04-15T08:00:21, 4.00, A, 2020-04-15T08:00:20, 2020-04-15T08:00:30, 2020-04-15T08:00:29.999Z]",
"+I[2020-04-15T08:00:21, 4.00, A, 2020-04-15T08:00:15, 2020-04-15T08:00:25, 2020-04-15T08:00:24.999Z]",
"+I[2020-04-15T08:00:27, 5.00, C, 2020-04-15T08:00:25, 2020-04-15T08:00:35, 2020-04-15T08:00:34.999Z]",
"+I[2020-04-15T08:00:27, 5.00, C, 2020-04-15T08:00:20, 2020-04-15T08:00:30, 2020-04-15T08:00:29.999Z]")
.build())
.setupConfig(TableConfigOptions.LOCAL_TIME_ZONE, "UTC")
.runSql(String.format(QUERY_TVF, String.format(HOP_TVF, "bid_time")))
.build();
public static final TableTestProgram WINDOW_TABLE_FUNCTION_HOP_TVF_AGG =
TableTestProgram.of(
"window-table-function-hop-tvf-agg",
"validates window table function using hop tvf windows with aggregation")
.setupTableSource(SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_TVF_AGG_SCHEMA)
.consumedBeforeRestore(
"+I[2020-04-15T08:00, 2020-04-15T08:00:10, 11.00]",
"+I[2020-04-15T08:00:05, 2020-04-15T08:00:15, 15.00]")
.consumedAfterRestore(
"+I[2020-04-15T08:00:10, 2020-04-15T08:00:20, 10.00]",
"+I[2020-04-15T08:00:15, 2020-04-15T08:00:25, 10.00]",
"+I[2020-04-15T08:00:20, 2020-04-15T08:00:30, 9.00]",
"+I[2020-04-15T08:00:25, 2020-04-15T08:00:35, 5.00]")
.build())
.runSql(String.format(QUERY_TVF_AGG, String.format(HOP_TVF, "bid_time")))
.build();
public static final TableTestProgram WINDOW_TABLE_FUNCTION_HOP_TVF_AGG_PROC_TIME =
TableTestProgram.of(
"window-table-function-hop-tvf-agg-proc-time",
"validates window table function using hop tvf windows with aggregation and processing time")
.setupTableSource(SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_TVF_AGG_PROC_TIME_SCHEMA)
.consumedBeforeRestore("+I[21.00]", "+I[21.00]")
.consumedAfterRestore("+I[9.00]")
.build())
.runSql(
String.format(
QUERY_TVF_AGG_PROC_TIME, String.format(HOP_TVF, "proc_time")))
.build();
public static final TableTestProgram WINDOW_TABLE_FUNCTION_CUMULATE_TVF =
TableTestProgram.of(
"window-table-function-cumulate-tvf",
"validates window table function using cumulate tvf windows")
.setupTableSource(SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_TVF_SCHEMA)
.consumedBeforeRestore(
"+I[2020-04-15T08:00:05, 4.00, C, 2020-04-15T08:00, 2020-04-15T08:00:10, 2020-04-15T08:00:09.999Z]",
"+I[2020-04-15T08:00:07, 2.00, A, 2020-04-15T08:00, 2020-04-15T08:00:10, 2020-04-15T08:00:09.999Z]",
"+I[2020-04-15T08:00:09, 5.00, D, 2020-04-15T08:00, 2020-04-15T08:00:10, 2020-04-15T08:00:09.999Z]",
"+I[2020-04-15T08:00:11, 3.00, B, 2020-04-15T08:00:10, 2020-04-15T08:00:15, 2020-04-15T08:00:14.999Z]",
"+I[2020-04-15T08:00:11, 3.00, B, 2020-04-15T08:00:10, 2020-04-15T08:00:20, 2020-04-15T08:00:19.999Z]",
"+I[2020-04-15T08:00:13, 1.00, E, 2020-04-15T08:00:10, 2020-04-15T08:00:15, 2020-04-15T08:00:14.999Z]",
"+I[2020-04-15T08:00:13, 1.00, E, 2020-04-15T08:00:10, 2020-04-15T08:00:20, 2020-04-15T08:00:19.999Z]",
"+I[2020-04-15T08:00:17, 6.00, F, 2020-04-15T08:00:10, 2020-04-15T08:00:20, 2020-04-15T08:00:19.999Z]")
.consumedAfterRestore(
"+I[2020-04-15T08:00:21, 4.00, A, 2020-04-15T08:00:20, 2020-04-15T08:00:25, 2020-04-15T08:00:24.999Z]",
"+I[2020-04-15T08:00:21, 4.00, A, 2020-04-15T08:00:20, 2020-04-15T08:00:30, 2020-04-15T08:00:29.999Z]",
"+I[2020-04-15T08:00:27, 5.00, C, 2020-04-15T08:00:20, 2020-04-15T08:00:30, 2020-04-15T08:00:29.999Z]")
.build())
.setupConfig(TableConfigOptions.LOCAL_TIME_ZONE, "UTC")
.runSql(String.format(QUERY_TVF, String.format(CUMULATE_TVF, "bid_time")))
.build();
public static final TableTestProgram WINDOW_TABLE_FUNCTION_CUMULATE_TVF_AGG =
TableTestProgram.of(
"window-table-function-cumulate-tvf-agg",
"validates window table function using cumulate tvf windows with aggregation")
.setupTableSource(SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_TVF_AGG_SCHEMA)
.consumedBeforeRestore(
"+I[2020-04-15T08:00, 2020-04-15T08:00:10, 11.00]",
"+I[2020-04-15T08:00:10, 2020-04-15T08:00:15, 4.00]")
.consumedAfterRestore(
"+I[2020-04-15T08:00:10, 2020-04-15T08:00:20, 10.00]",
"+I[2020-04-15T08:00:20, 2020-04-15T08:00:25, 4.00]",
"+I[2020-04-15T08:00:20, 2020-04-15T08:00:30, 9.00]")
.build())
.runSql(String.format(QUERY_TVF_AGG, String.format(CUMULATE_TVF, "bid_time")))
.build();
public static final TableTestProgram WINDOW_TABLE_FUNCTION_CUMULATE_TVF_AGG_PROC_TIME =
TableTestProgram.of(
"window-table-function-cumulate-tvf-agg-proc-time",
"validates window table function using cumulate tvf windows with aggregation")
.setupTableSource(SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_TVF_AGG_PROC_TIME_SCHEMA)
.consumedBeforeRestore("+I[21.00]", "+I[21.00]")
.consumedAfterRestore("+I[9.00]")
.build())
.runSql(
String.format(
QUERY_TVF_AGG_PROC_TIME,
String.format(CUMULATE_TVF, "proc_time")))
.build();
}
| WindowTableFunctionTestPrograms |
java | spring-projects__spring-security | oauth2/oauth2-resource-server/src/main/java/org/springframework/security/oauth2/server/resource/web/reactive/function/client/ServletBearerExchangeFilterFunction.java | {
"start": 2040,
"end": 3595
} | class ____ implements ExchangeFilterFunction {
static final String SECURITY_REACTOR_CONTEXT_ATTRIBUTES_KEY = "org.springframework.security.SECURITY_CONTEXT_ATTRIBUTES";
@Override
public Mono<ClientResponse> filter(ClientRequest request, ExchangeFunction next) {
// @formatter:off
return oauth2Token().map((token) -> bearer(request, token))
.defaultIfEmpty(request)
.flatMap(next::exchange);
// @formatter:on
}
private Mono<OAuth2Token> oauth2Token() {
// @formatter:off
return Mono.deferContextual(Mono::just)
.cast(Context.class)
.flatMap(this::currentAuthentication)
.filter((authentication) -> authentication.getCredentials() instanceof OAuth2Token)
.map(Authentication::getCredentials)
.cast(OAuth2Token.class);
// @formatter:on
}
private Mono<Authentication> currentAuthentication(Context ctx) {
return Mono.justOrEmpty(getAttribute(ctx, Authentication.class));
}
private <T> T getAttribute(Context ctx, Class<T> clazz) {
// NOTE: SecurityReactorContextConfiguration.SecurityReactorContextSubscriber adds
// this key
if (!ctx.hasKey(SECURITY_REACTOR_CONTEXT_ATTRIBUTES_KEY)) {
return null;
}
Map<Class<T>, T> attributes = ctx.get(SECURITY_REACTOR_CONTEXT_ATTRIBUTES_KEY);
return attributes.get(clazz);
}
private ClientRequest bearer(ClientRequest request, OAuth2Token token) {
// @formatter:off
return ClientRequest.from(request)
.headers((headers) -> headers.setBearerAuth(token.getTokenValue()))
.build();
// @formatter:on
}
}
| ServletBearerExchangeFilterFunction |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DigitalOceanEndpointBuilderFactory.java | {
"start": 7842,
"end": 11531
} | interface ____
extends
EndpointProducerBuilder {
default DigitalOceanEndpointBuilder basic() {
return (DigitalOceanEndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedDigitalOceanEndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedDigitalOceanEndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* To use a existing configured DigitalOceanClient as client.
*
* The option is a:
* <code>com.myjeeva.digitalocean.impl.DigitalOceanClient</code> type.
*
* Group: advanced
*
* @param digitalOceanClient the value to set
* @return the dsl builder
*/
default AdvancedDigitalOceanEndpointBuilder digitalOceanClient(com.myjeeva.digitalocean.impl.DigitalOceanClient digitalOceanClient) {
doSetProperty("digitalOceanClient", digitalOceanClient);
return this;
}
/**
* To use a existing configured DigitalOceanClient as client.
*
* The option will be converted to a
* <code>com.myjeeva.digitalocean.impl.DigitalOceanClient</code> type.
*
* Group: advanced
*
* @param digitalOceanClient the value to set
* @return the dsl builder
*/
default AdvancedDigitalOceanEndpointBuilder digitalOceanClient(String digitalOceanClient) {
doSetProperty("digitalOceanClient", digitalOceanClient);
return this;
}
}
public | AdvancedDigitalOceanEndpointBuilder |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/impl/DefaultArtifactDeployer.java | {
"start": 1520,
"end": 2437
} | class ____ implements ArtifactDeployer {
@Override
public void deploy(@Nonnull ArtifactDeployerRequest request) {
requireNonNull(request, "request");
InternalSession session = InternalSession.from(request.getSession());
Collection<ProducedArtifact> artifacts = requireNonNull(request.getArtifacts(), "request.artifacts");
RemoteRepository repository = requireNonNull(request.getRepository(), "request.repository");
try {
DeployRequest deployRequest = new DeployRequest()
.setRepository(session.toRepository(repository))
.setArtifacts(session.toArtifacts(artifacts));
session.getRepositorySystem().deploy(session.getSession(), deployRequest);
} catch (DeploymentException e) {
throw new ArtifactDeployerException("Unable to deploy artifacts", e);
}
}
}
| DefaultArtifactDeployer |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/interceptor/SimpleTraceInterceptor.java | {
"start": 1292,
"end": 2754
} | class ____ extends AbstractTraceInterceptor {
/**
* Create a new SimpleTraceInterceptor with a static logger.
*/
public SimpleTraceInterceptor() {
}
/**
* Create a new SimpleTraceInterceptor with dynamic or static logger,
* according to the given flag.
* @param useDynamicLogger whether to use a dynamic logger or a static logger
* @see #setUseDynamicLogger
*/
public SimpleTraceInterceptor(boolean useDynamicLogger) {
setUseDynamicLogger(useDynamicLogger);
}
@Override
protected @Nullable Object invokeUnderTrace(MethodInvocation invocation, Log logger) throws Throwable {
String invocationDescription = getInvocationDescription(invocation);
writeToLog(logger, "Entering " + invocationDescription);
try {
Object rval = invocation.proceed();
writeToLog(logger, "Exiting " + invocationDescription);
return rval;
}
catch (Throwable ex) {
writeToLog(logger, "Exception thrown in " + invocationDescription, ex);
throw ex;
}
}
/**
* Return a description for the given method invocation.
* @param invocation the invocation to describe
* @return the description
*/
protected String getInvocationDescription(MethodInvocation invocation) {
Object target = invocation.getThis();
Assert.state(target != null, "Target must not be null");
String className = target.getClass().getName();
return "method '" + invocation.getMethod().getName() + "' of class [" + className + "]";
}
}
| SimpleTraceInterceptor |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/orphan/Mail.java | {
"start": 142,
"end": 653
} | class ____ {
private Integer id;
private String alias;
private User user;
/*package*/ Mail() {
}
/*package*/ Mail(String alias, User user) {
this.alias = alias;
this.user = user;
}
public Integer getId() {
return id;
}
protected void setId(Integer id) {
this.id = id;
}
public String getAlias() {
return alias;
}
public void setAlias(String alias) {
this.alias = alias;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
}
| Mail |
java | apache__rocketmq | test/src/main/java/org/apache/rocketmq/test/client/mq/MQAsyncProducer.java | {
"start": 1140,
"end": 3034
} | class ____ {
private static Logger logger = LoggerFactory.getLogger(MQAsyncProducer.class);
private AbstractMQProducer producer = null;
private long msgNum;
private int intervalMills;
private Thread sendT;
private AtomicBoolean bPause = new AtomicBoolean(false);
public MQAsyncProducer(final AbstractMQProducer producer, final long msgNum,
final int intervalMills) {
this.producer = producer;
this.msgNum = msgNum;
this.intervalMills = intervalMills;
sendT = new Thread(new Runnable() {
public void run() {
for (int i = 0; i < msgNum; i++) {
if (!bPause.get()) {
producer.send();
TestUtil.waitForMonment(intervalMills);
} else {
while (true) {
if (bPause.get()) {
TestUtil.waitForMonment(10);
} else
break;
}
}
}
}
});
}
public void start() {
sendT.start();
}
public void waitSendAll(int waitMills) {
long startTime = System.currentTimeMillis();
while ((producer.getAllMsgBody().size() + producer.getSendErrorMsg().size()) < msgNum) {
if (System.currentTimeMillis() - startTime < waitMills) {
TestUtil.waitForMonment(200);
} else {
logger.error(String.format("time elapse:%s, but the message sending has not finished",
System.currentTimeMillis() - startTime));
break;
}
}
}
public void pauseProducer() {
bPause.set(true);
}
public void notifyProducer() {
bPause.set(false);
}
}
| MQAsyncProducer |
java | alibaba__nacos | client/src/main/java/com/alibaba/nacos/client/config/utils/ContentUtils.java | {
"start": 835,
"end": 2758
} | class ____ {
private static final int SHOW_CONTENT_SIZE = 100;
/**
* Verify increment pub content.
*
* @param content content
* @throws IllegalArgumentException if content is not valid
*/
public static void verifyIncrementPubContent(String content) {
if (content == null || content.length() == 0) {
throw new IllegalArgumentException("publish/delete content can not be null");
}
for (int i = 0; i < content.length(); i++) {
char c = content.charAt(i);
if (c == '\r' || c == '\n') {
throw new IllegalArgumentException("publish/delete content can not contain return and linefeed");
}
if (c == Constants.WORD_SEPARATOR.charAt(0)) {
throw new IllegalArgumentException("publish/delete content can not contain(char)2");
}
}
}
public static String getContentIdentity(String content) {
int index = content.indexOf(WORD_SEPARATOR);
if (index == -1) {
throw new IllegalArgumentException("content does not contain separator");
}
return content.substring(0, index);
}
public static String getContent(String content) {
int index = content.indexOf(WORD_SEPARATOR);
if (index == -1) {
throw new IllegalArgumentException("content does not contain separator");
}
return content.substring(index + 1);
}
/**
* Truncate content.
*
* @param content content
* @return truncated content
*/
public static String truncateContent(String content) {
if (content == null) {
return "";
} else if (content.length() <= SHOW_CONTENT_SIZE) {
return content;
} else {
return content.substring(0, SHOW_CONTENT_SIZE) + "...";
}
}
}
| ContentUtils |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/ResourceManagerPartitionLifecycleTest.java | {
"start": 10951,
"end": 11125
} | interface ____ {
void accept(TestingTaskExecutorGatewayBuilder taskExecutorGatewayBuilder) throws Exception;
}
@FunctionalInterface
private | TaskExecutorSetup |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-api/src/main/java/org/apache/dubbo/remoting/exchange/codec/ExchangeCodec.java | {
"start": 2690,
"end": 23216
} | class ____ extends TelnetCodec {
// header length.
protected static final int HEADER_LENGTH = 16;
// magic header.
protected static final short MAGIC = (short) 0xdabb;
protected static final byte MAGIC_HIGH = Bytes.short2bytes(MAGIC)[0];
protected static final byte MAGIC_LOW = Bytes.short2bytes(MAGIC)[1];
// message flag.
protected static final byte FLAG_REQUEST = (byte) 0x80;
protected static final byte FLAG_TWOWAY = (byte) 0x40;
protected static final byte FLAG_EVENT = (byte) 0x20;
protected static final int SERIALIZATION_MASK = 0x1f;
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(ExchangeCodec.class);
public Short getMagicCode() {
return MAGIC;
}
@Override
public void encode(Channel channel, ChannelBuffer buffer, Object msg) throws IOException {
if (msg instanceof Request) {
encodeRequest(channel, buffer, (Request) msg);
} else if (msg instanceof Response) {
encodeResponse(channel, buffer, (Response) msg);
} else {
super.encode(channel, buffer, msg);
}
}
@Override
public Object decode(Channel channel, ChannelBuffer buffer) throws IOException {
int readable = buffer.readableBytes();
byte[] header = new byte[Math.min(readable, HEADER_LENGTH)];
buffer.readBytes(header);
return decode(channel, buffer, readable, header);
}
@Override
protected Object decode(Channel channel, ChannelBuffer buffer, int readable, byte[] header) throws IOException {
// check magic number.
if (readable > 0 && header[0] != MAGIC_HIGH || readable > 1 && header[1] != MAGIC_LOW) {
int length = header.length;
if (header.length < readable) {
header = Bytes.copyOf(header, readable);
buffer.readBytes(header, length, readable - length);
}
for (int i = 1; i < header.length - 1; i++) {
if (header[i] == MAGIC_HIGH && header[i + 1] == MAGIC_LOW) {
buffer.readerIndex(buffer.readerIndex() - header.length + i);
header = Bytes.copyOf(header, i);
break;
}
}
return super.decode(channel, buffer, readable, header);
}
// check length.
if (readable < HEADER_LENGTH) {
return DecodeResult.NEED_MORE_INPUT;
}
// get data length.
int len = Bytes.bytes2int(header, 12);
// When receiving response, how to exceed the length, then directly construct a response to the client.
// see more detail from https://github.com/apache/dubbo/issues/7021.
Object obj = finishRespWhenOverPayload(channel, len, header);
if (null != obj) {
return obj;
}
int tt = len + HEADER_LENGTH;
if (readable < tt) {
return DecodeResult.NEED_MORE_INPUT;
}
// limit input stream.
ChannelBufferInputStream is = new ChannelBufferInputStream(buffer, len);
try {
return decodeBody(channel, is, header);
} finally {
if (is.available() > 0) {
try {
if (logger.isWarnEnabled()) {
logger.warn(TRANSPORT_SKIP_UNUSED_STREAM, "", "", "Skip input stream " + is.available());
}
StreamUtils.skipUnusedStream(is);
} catch (IOException e) {
logger.warn(TRANSPORT_SKIP_UNUSED_STREAM, "", "", e.getMessage(), e);
}
}
}
}
protected Object decodeBody(Channel channel, InputStream is, byte[] header) throws IOException {
byte flag = header[2], proto = (byte) (flag & SERIALIZATION_MASK);
// get request id.
long id = Bytes.bytes2long(header, 4);
if ((flag & FLAG_REQUEST) == 0) {
// decode response.
Response res = new Response(id);
if ((flag & FLAG_EVENT) != 0) {
res.setEvent(true);
}
// get status.
byte status = header[3];
res.setStatus(status);
try {
if (status == Response.OK) {
Object data;
if (res.isEvent()) {
byte[] eventPayload = CodecSupport.getPayload(is);
if (CodecSupport.isHeartBeat(eventPayload, proto)) {
// heart beat response data is always null;
data = null;
} else {
data = decodeEventData(
channel,
CodecSupport.deserialize(
channel.getUrl(), new ByteArrayInputStream(eventPayload), proto),
eventPayload);
}
} else {
data = decodeResponseData(
channel,
CodecSupport.deserialize(channel.getUrl(), is, proto),
getRequestData(channel, res, id));
}
res.setResult(data);
} else {
res.setErrorMessage(CodecSupport.deserialize(channel.getUrl(), is, proto)
.readUTF());
}
} catch (Throwable t) {
res.setStatus(Response.CLIENT_ERROR);
res.setErrorMessage(StringUtils.toString(t));
}
return res;
} else {
// decode request.
Request req;
try {
Object data;
if ((flag & FLAG_EVENT) != 0) {
byte[] eventPayload = CodecSupport.getPayload(is);
if (CodecSupport.isHeartBeat(eventPayload, proto)) {
// heart beat response data is always null;
req = new HeartBeatRequest(id);
((HeartBeatRequest) req).setProto(proto);
data = null;
} else {
req = new Request(id);
data = decodeEventData(
channel,
CodecSupport.deserialize(
channel.getUrl(), new ByteArrayInputStream(eventPayload), proto),
eventPayload);
}
req.setEvent(true);
} else {
req = new Request(id);
data = decodeRequestData(channel, CodecSupport.deserialize(channel.getUrl(), is, proto));
}
req.setData(data);
} catch (Throwable t) {
// bad request
req = new Request(id);
req.setBroken(true);
req.setData(t);
}
req.setVersion(Version.getProtocolVersion());
req.setTwoWay((flag & FLAG_TWOWAY) != 0);
return req;
}
}
protected Object getRequestData(Channel channel, Response response, long id) {
DefaultFuture future = DefaultFuture.getFuture(id);
if (future != null) {
Request req = future.getRequest();
if (req != null) {
return req.getData();
}
}
logger.warn(
PROTOCOL_TIMEOUT_SERVER,
"",
"",
"The timeout response finally returned at "
+ (new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format(new Date()))
+ ", response status is " + response.getStatus() + ", response id is " + response.getId()
+ (channel == null
? ""
: ", channel: " + channel.getLocalAddress() + " -> " + channel.getRemoteAddress())
+ ", please check provider side for detailed result.");
throw new IllegalArgumentException("Failed to find any request match the response, response id: " + id);
}
protected void encodeRequest(Channel channel, ChannelBuffer buffer, Request req) throws IOException {
Serialization serialization = getSerialization(channel, req);
// header.
byte[] header = new byte[HEADER_LENGTH];
// set magic number.
Bytes.short2bytes(MAGIC, header);
// set request and serialization flag.
header[2] = (byte) (FLAG_REQUEST | serialization.getContentTypeId());
if (req.isTwoWay()) {
header[2] |= FLAG_TWOWAY;
}
if (req.isEvent()) {
header[2] |= FLAG_EVENT;
}
// set request id.
Bytes.long2bytes(req.getId(), header, 4);
// encode request data.
int savedWriteIndex = buffer.writerIndex();
buffer.writerIndex(savedWriteIndex + HEADER_LENGTH);
ChannelBufferOutputStream bos = new ChannelBufferOutputStream(buffer);
if (req.isHeartbeat()) {
// heartbeat request data is always null
bos.write(CodecSupport.getNullBytesOf(serialization));
} else {
ObjectOutput out = serialization.serialize(channel.getUrl(), bos);
if (req.isEvent()) {
encodeEventData(channel, out, req.getData());
} else {
encodeRequestData(channel, out, req.getData(), req.getVersion());
}
out.flushBuffer();
if (out instanceof Cleanable) {
((Cleanable) out).cleanup();
}
}
bos.flush();
bos.close();
int len = bos.writtenBytes();
checkPayload(channel, req.getPayload(), len);
Bytes.int2bytes(len, header, 12);
// write
buffer.writerIndex(savedWriteIndex);
buffer.writeBytes(header); // write header.
buffer.writerIndex(savedWriteIndex + HEADER_LENGTH + len);
}
protected void encodeResponse(Channel channel, ChannelBuffer buffer, Response res) throws IOException {
int savedWriteIndex = buffer.writerIndex();
try {
Serialization serialization = getSerialization(channel, res);
// header.
byte[] header = new byte[HEADER_LENGTH];
// set magic number.
Bytes.short2bytes(MAGIC, header);
// set request and serialization flag.
header[2] = serialization.getContentTypeId();
if (res.isHeartbeat()) {
header[2] |= FLAG_EVENT;
}
// set response status.
byte status = res.getStatus();
header[3] = status;
// set request id.
Bytes.long2bytes(res.getId(), header, 4);
buffer.writerIndex(savedWriteIndex + HEADER_LENGTH);
int len;
try (ChannelBufferOutputStream bos = new ChannelBufferOutputStream(buffer)) {
// encode response data or error message.
if (status == Response.OK) {
if (res.isHeartbeat()) {
// heartbeat response data is always null
bos.write(CodecSupport.getNullBytesOf(serialization));
} else {
ObjectOutput out = serialization.serialize(channel.getUrl(), bos);
if (res.isEvent()) {
encodeEventData(channel, out, res.getResult());
} else {
encodeResponseData(channel, out, res.getResult(), res.getVersion());
}
out.flushBuffer();
if (out instanceof Cleanable) {
((Cleanable) out).cleanup();
}
}
} else {
ObjectOutput out = serialization.serialize(channel.getUrl(), bos);
out.writeUTF(res.getErrorMessage());
out.flushBuffer();
if (out instanceof Cleanable) {
((Cleanable) out).cleanup();
}
}
bos.flush();
len = bos.writtenBytes();
}
checkPayload(channel, len);
Bytes.int2bytes(len, header, 12);
// write
buffer.writerIndex(savedWriteIndex);
buffer.writeBytes(header); // write header.
buffer.writerIndex(savedWriteIndex + HEADER_LENGTH + len);
} catch (Throwable t) {
// clear buffer
buffer.writerIndex(savedWriteIndex);
// send error message to Consumer, otherwise, Consumer will wait till timeout.
if (!res.isEvent() && res.getStatus() != Response.BAD_RESPONSE) {
Response r = new Response(res.getId(), res.getVersion());
r.setStatus(Response.BAD_RESPONSE);
if (t instanceof ExceedPayloadLimitException) {
logger.warn(TRANSPORT_EXCEED_PAYLOAD_LIMIT, "", "", t.getMessage(), t);
try {
r.setErrorMessage(t.getMessage());
r.setStatus(Response.SERIALIZATION_ERROR);
channel.send(r);
return;
} catch (RemotingException e) {
logger.warn(
TRANSPORT_FAILED_RESPONSE,
"",
"",
"Failed to send bad_response info back: " + t.getMessage() + ", cause: "
+ e.getMessage(),
e);
}
} else {
// FIXME log error message in Codec and handle in caught() of IoHanndler?
logger.warn(
TRANSPORT_FAILED_RESPONSE,
"",
"",
"Fail to encode response: " + res + ", send bad_response info instead, cause: "
+ t.getMessage(),
t);
try {
r.setErrorMessage("Failed to send response: " + res + ", cause: " + StringUtils.toString(t));
channel.send(r);
return;
} catch (RemotingException e) {
logger.warn(
TRANSPORT_FAILED_RESPONSE,
"",
"",
"Failed to send bad_response info back: " + res + ", cause: " + e.getMessage(),
e);
}
}
}
// Rethrow exception
if (t instanceof IOException) {
throw (IOException) t;
} else if (t instanceof RuntimeException) {
throw (RuntimeException) t;
} else if (t instanceof Error) {
throw (Error) t;
} else {
throw new RuntimeException(t.getMessage(), t);
}
}
}
@Override
protected Object decodeData(ObjectInput in) throws IOException {
return decodeRequestData(in);
}
protected Object decodeRequestData(ObjectInput in) throws IOException {
try {
return in.readObject();
} catch (ClassNotFoundException e) {
throw new IOException(StringUtils.toString("Read object failed.", e));
}
}
protected Object decodeResponseData(ObjectInput in) throws IOException {
try {
return in.readObject();
} catch (ClassNotFoundException e) {
throw new IOException(StringUtils.toString("Read object failed.", e));
}
}
@Override
protected void encodeData(ObjectOutput out, Object data) throws IOException {
encodeRequestData(out, data);
}
private void encodeEventData(ObjectOutput out, Object data) throws IOException {
out.writeEvent((String) data);
}
@Deprecated
protected void encodeHeartbeatData(ObjectOutput out, Object data) throws IOException {
encodeEventData(out, data);
}
protected void encodeRequestData(ObjectOutput out, Object data) throws IOException {
out.writeObject(data);
}
protected void encodeResponseData(ObjectOutput out, Object data) throws IOException {
out.writeObject(data);
}
@Override
protected Object decodeData(Channel channel, ObjectInput in) throws IOException {
return decodeRequestData(channel, in);
}
protected Object decodeEventData(Channel channel, ObjectInput in, byte[] eventBytes) throws IOException {
try {
if (eventBytes != null) {
int dataLen = eventBytes.length;
int threshold = ConfigurationUtils.getSystemConfiguration(
channel.getUrl().getScopeModel())
.getInt("deserialization.event.size", 15);
if (dataLen > threshold) {
throw new IllegalArgumentException("Event data too long, actual size " + threshold + ", threshold "
+ threshold + " rejected for security consideration.");
}
}
return in.readEvent();
} catch (IOException | ClassNotFoundException e) {
throw new IOException(StringUtils.toString("Decode dubbo protocol event failed.", e));
}
}
protected Object decodeRequestData(Channel channel, ObjectInput in) throws IOException {
return decodeRequestData(in);
}
protected Object decodeResponseData(Channel channel, ObjectInput in) throws IOException {
return decodeResponseData(in);
}
protected Object decodeResponseData(Channel channel, ObjectInput in, Object requestData) throws IOException {
return decodeResponseData(channel, in);
}
@Override
protected void encodeData(Channel channel, ObjectOutput out, Object data) throws IOException {
encodeRequestData(channel, out, data);
}
private void encodeEventData(Channel channel, ObjectOutput out, Object data) throws IOException {
encodeEventData(out, data);
}
@Deprecated
protected void encodeHeartbeatData(Channel channel, ObjectOutput out, Object data) throws IOException {
encodeHeartbeatData(out, data);
}
protected void encodeRequestData(Channel channel, ObjectOutput out, Object data) throws IOException {
encodeRequestData(out, data);
}
protected void encodeResponseData(Channel channel, ObjectOutput out, Object data) throws IOException {
encodeResponseData(out, data);
}
protected void encodeRequestData(Channel channel, ObjectOutput out, Object data, String version)
throws IOException {
encodeRequestData(out, data);
}
protected void encodeResponseData(Channel channel, ObjectOutput out, Object data, String version)
throws IOException {
encodeResponseData(out, data);
}
private Object finishRespWhenOverPayload(Channel channel, long size, byte[] header) {
byte flag = header[2];
if ((flag & FLAG_REQUEST) == 0) {
int payload = getPayload(channel);
boolean overPayload = isOverPayload(payload, size);
if (overPayload) {
long reqId = Bytes.bytes2long(header, 4);
Response res = new Response(reqId);
if ((flag & FLAG_EVENT) != 0) {
res.setEvent(true);
}
res.setStatus(Response.CLIENT_ERROR);
String errorMsg =
"Data length too large: " + size + ", max payload: " + payload + ", channel: " + channel;
logger.error(TRANSPORT_EXCEED_PAYLOAD_LIMIT, "", "", errorMsg);
res.setErrorMessage(errorMsg);
return res;
}
}
return null;
}
}
| ExchangeCodec |
java | spring-projects__spring-boot | documentation/spring-boot-actuator-docs/src/test/java/org/springframework/boot/actuate/docs/health/HealthEndpointDocumentationTests.java | {
"start": 3295,
"end": 5239
} | class ____ extends MockMvcEndpointDocumentationTests {
private static final List<FieldDescriptor> componentFields = List.of(
fieldWithPath("status").description("Status of a specific part of the application"),
subsectionWithPath("details").description("Details of the health of a specific part of the application."));
@Test
void health() {
FieldDescriptor status = fieldWithPath("status").description("Overall status of the application.");
FieldDescriptor components = fieldWithPath("components").description("The components that make up the health.");
FieldDescriptor componentStatus = fieldWithPath("components.*.status")
.description("Status of a specific part of the application.");
FieldDescriptor nestedComponents = subsectionWithPath("components.*.components")
.description("The nested components that make up the health.")
.optional();
FieldDescriptor componentDetails = subsectionWithPath("components.*.details")
.description("Details of the health of a specific part of the application. "
+ "Presence is controlled by `management.endpoint.health.show-details`.")
.optional();
assertThat(this.mvc.get().uri("/actuator/health").accept(MediaType.APPLICATION_JSON)).hasStatusOk()
.apply(document("health",
responseFields(status, components, componentStatus, nestedComponents, componentDetails)));
}
@Test
void healthComponent() {
assertThat(this.mvc.get().uri("/actuator/health/db").accept(MediaType.APPLICATION_JSON)).hasStatusOk()
.apply(document("health/component", responseFields(componentFields)));
}
@Test
void healthComponentInstance() {
assertThat(this.mvc.get().uri("/actuator/health/broker/us1").accept(MediaType.APPLICATION_JSON)).hasStatusOk()
.apply(document("health/instance", responseFields(componentFields)));
}
@Configuration(proxyBeanMethods = false)
@ImportAutoConfiguration(DataSourceAutoConfiguration.class)
static | HealthEndpointDocumentationTests |
java | quarkusio__quarkus | extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/exporter/otlp/OtlpExporterProcessor.java | {
"start": 1954,
"end": 2313
} | class ____ {
private static final DotName METRIC_EXPORTER = DotName.createSimple(MetricExporter.class.getName());
private static final DotName LOG_RECORD_EXPORTER = DotName.createSimple(LogRecordExporter.class.getName());
private static final DotName OKHTTP_INTERCEPTOR = DotName.createSimple("okhttp3.Interceptor");
static | OtlpExporterProcessor |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/refresh/RefreshAndCollections.java | {
"start": 6061,
"end": 6913
} | class ____ {
@Id
@Column(name = "SKU_ID")
private Long id;
private String name;
@OneToOne(cascade = { CascadeType.PERSIST, CascadeType.MERGE, CascadeType.REFRESH })
@JoinColumn(name = "DEFAULT_PRODUCT_ID")
protected Product defaultProduct;
public Sku() {
}
public Sku(Long id, String name, Product defaultProduct) {
this.id = id;
this.name = name;
this.defaultProduct = defaultProduct;
defaultProduct.setDefaultSku( this );
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Product getDefaultProduct() {
return defaultProduct;
}
public void setDefaultProduct(Product defaultProduct) {
this.defaultProduct = defaultProduct;
}
}
}
| Sku |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/benchmark/BenchmarkMain.java | {
"start": 186,
"end": 3606
} | class ____ {
public static void main(String[] args) throws Exception {
// 注意,byte[]在jackson中是使用base64编码的,不正确的。
BenchmarkExecutor executor = new BenchmarkExecutor();
executor.setExecuteCount(10);
// executor.getCodecList().add(new FastjsonManualCodec());
executor.getCodecList().add(new FastjsonCodec());
// executor.getCodecList().add(new FastjsonBeanToArrayCodec());
// executor.getCodecList().add(new FastjsonGenCodec());
// executor.getCodecList().add(new FastjsonBeanToArrayCodec());
// executor.getCodecList().add(new JacksonCodec());
// executor.getCodecList().add(new Jackson2Codec());
// executor.getCodecList().add(new Jackson2AfterBurnCodec());
//
// executor.getCodecList().add(new SimpleJsonCodec());
// executor.getCodecList().add(new JsonLibCodec());
// executor.getCodecList().add(new JsonSmartCodec());
executor.setLoopCount(1000 * 1000 * 1);
// executor.getCaseList().add(new TradeObjectParse());
// executor.getCaseList().add(new EishayDecodeBytes());
// executor.getCaseList().add(new EishayEncodeOutputStream());
// executor.getCaseList().add(new EishayEncodeToBytes());
executor.getCaseList().add(new EishayDecode()); // 1069
//JDK8_162 1094
//JDK9_01 1214
//JDK9_04 1252
//JDK10 1088
// executor.getCaseList().add(new EishayDecodeByClassName());
// executor.getCaseList().add(new EishayTreeDecode());
// executor.getCaseList().add(new EishayEncode());
// executor.getCaseList().add(new EishayEncodeManual());
// executor.getCaseList().add(new IntArray1000Decode());
// executor.getCaseList().add(new StringArray1000Decode());
// executor.getCaseList().add(new Map1000StringDecode());
// executor.getCaseList().add(new Entity100StringDecode());
// executor.getCaseList().add(new ListBoolean1000Encode());
// executor.getCaseList().add(new ArrayBoolean1000Encode());
// executor.getCaseList().add(new IntArray1000Decode());
// executor.getCaseList().add(new StringArray1000Decode());
// executor.getCaseList().add(new GroupEncode());
// executor.getCaseList().add(new CategoryEncode());
// executor.getCaseList().add(new GroupEncode());
// executor.getCaseList().add(new Map1Decode());
// executor.getCaseList().add(new Entity100IntDecode());
// executor.getCaseList().add(new Entity100StringDecode());
// executor.getCaseList().add(new Entity100IntEncode());
// executor.getCaseList().add(new ArrayByte1000Encode());
// executor.getCaseList().add(new ArrayInt1000Encode());
// executor.getCaseList().add(new ArrayLong1000Encode());
// executor.getCaseList().add(new ArrayString1000Encode());
// executor.getCaseList().add(new ArrayEmptyList1000Encode());
// executor.getCaseList().add(new ArrayEmptyMap1000Encode());
// executor.getCaseList().add(new ArrayObjectEmptyMap1000Encode());
// executor.getCaseList().add(new Map1000Encode());
executor.execute();
}
}
| BenchmarkMain |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/jta/OneToManyLazyJtaSessionClosedBeforeCommitTest.java | {
"start": 1464,
"end": 4121
} | class ____ {
private static final Integer PARENT_ID = 2;
private static final Integer ENTITY_ID = 1;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) throws Exception {
final var emf = scope.getEntityManagerFactory();
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().begin();
var entityManager = emf.createEntityManager();
try {
SetRefIngEntity refIngEntity = new SetRefIngEntity( 3, "ingEntityRef" );
entityManager.persist( refIngEntity );
SetRefEdEntity edEntity = new SetRefEdEntity( PARENT_ID, "edEntity" );
edEntity.setRef( refIngEntity );
entityManager.persist( edEntity );
SetRefIngEntity ingEntity = new SetRefIngEntity( ENTITY_ID, "ingEntity" );
Set<SetRefIngEntity> sries = new HashSet<>();
sries.add( ingEntity );
ingEntity.setReference( edEntity );
edEntity.setReffering( sries );
entityManager.persist( ingEntity );
entityManager.flush();
}
finally {
TestingJtaPlatformImpl.tryCommit();
entityManager.close();
}
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().begin();
entityManager = emf.createEntityManager();
try {
entityManager.unwrap( Session.class ).setHibernateFlushMode( FlushMode.MANUAL );
SetRefEdEntity edEntity = entityManager.find( SetRefEdEntity.class, PARENT_ID );
Set<SetRefIngEntity> reffering = edEntity.getReffering();
SetRefIngEntity ingEntity = reffering.iterator().next();
ingEntity.setReference( null );
reffering.remove( ingEntity );
entityManager.merge( ingEntity );
entityManager.flush();
// clear context in transaction
entityManager.clear();
entityManager.merge( edEntity );
entityManager.flush();
}
finally {
TestingJtaPlatformImpl.tryCommit();
entityManager.close();
}
}
@Test
public void testRevisionCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( entityManager -> {
assertEquals(
Arrays.asList( 1, 2 ),
AuditReaderFactory.get( entityManager ).getRevisions( SetRefIngEntity.class, ENTITY_ID )
);
assertEquals(
Arrays.asList( 1, 2 ),
AuditReaderFactory.get( entityManager ).getRevisions( SetRefEdEntity.class, PARENT_ID )
);
} );
}
@Test
public void testRevisionHistory(EntityManagerFactoryScope scope) {
scope.inEntityManager( entityManager -> {
assertEquals(
Arrays.asList( 1, 2 ),
AuditReaderFactory.get( entityManager ).getRevisions( SetRefIngEntity.class, ENTITY_ID )
);
assertEquals(
Arrays.asList( 1, 2 ),
AuditReaderFactory.get( entityManager ).getRevisions( SetRefEdEntity.class, PARENT_ID )
);
} );
}
}
| OneToManyLazyJtaSessionClosedBeforeCommitTest |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/server/reactive/ReactorHttpHandlerAdapter.java | {
"start": 1350,
"end": 2756
} | class ____ implements BiFunction<HttpServerRequest, HttpServerResponse, Mono<Void>> {
private static final Log logger = HttpLogging.forLogName(ReactorHttpHandlerAdapter.class);
private final HttpHandler httpHandler;
public ReactorHttpHandlerAdapter(HttpHandler httpHandler) {
Assert.notNull(httpHandler, "HttpHandler must not be null");
this.httpHandler = httpHandler;
}
@Override
public Mono<Void> apply(HttpServerRequest reactorRequest, HttpServerResponse reactorResponse) {
NettyDataBufferFactory bufferFactory = new NettyDataBufferFactory(reactorResponse.alloc());
try {
ReactorServerHttpRequest request = new ReactorServerHttpRequest(reactorRequest, bufferFactory);
ServerHttpResponse response = new ReactorServerHttpResponse(reactorResponse, bufferFactory);
if (request.getMethod() == HttpMethod.HEAD) {
response = new HttpHeadResponseDecorator(response);
}
return this.httpHandler.handle(request, response)
.doOnError(ex -> logger.trace(request.getLogPrefix() + "Failed to complete: " + ex.getMessage()))
.doOnSuccess(aVoid -> logger.trace(request.getLogPrefix() + "Handling completed"));
}
catch (URISyntaxException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Failed to get request URI: " + ex.getMessage());
}
reactorResponse.status(HttpResponseStatus.BAD_REQUEST);
return Mono.empty();
}
}
}
| ReactorHttpHandlerAdapter |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/LogProcessor.java | {
"start": 1353,
"end": 5538
} | class ____ extends BaseProcessorSupport implements Traceable, IdAware, RouteIdAware {
private static final Logger LOG = LoggerFactory.getLogger(LogProcessor.class);
private String id;
private String routeId;
private final Expression expression;
private final String message;
private final CamelLogger logger;
private final MaskingFormatter formatter;
private final Set<LogListener> listeners;
public LogProcessor(Expression expression, CamelLogger logger, MaskingFormatter formatter, Set<LogListener> listeners) {
this.expression = expression;
this.message = null;
this.logger = logger;
this.formatter = formatter;
this.listeners = listeners;
}
public LogProcessor(String message, CamelLogger logger, MaskingFormatter formatter, Set<LogListener> listeners) {
this.expression = null;
this.message = message;
this.logger = logger;
this.formatter = formatter;
this.listeners = listeners;
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
if (logger.shouldLog()) {
try {
String msg;
if (expression != null) {
msg = expression.evaluate(exchange, String.class);
} else {
msg = message;
}
if (formatter != null) {
msg = formatter.format(msg);
}
if (listeners != null && !listeners.isEmpty()) {
msg = fireListeners(exchange, msg);
}
logger.doLog(msg);
if (listeners != null && !listeners.isEmpty()) {
closeListeners(exchange, msg);
}
} catch (Exception e) {
exchange.setException(e);
}
}
callback.done(true);
return true;
}
private String fireListeners(Exchange exchange, String message) {
for (LogListener listener : listeners) {
if (listener == null) {
continue;
}
try {
String output = listener.onLog(exchange, logger, message);
message = output != null ? output : message;
} catch (Exception t) {
LOG.warn("Ignoring an exception: {} thrown by: {} caused by: {}", t.getClass().getName(),
listener.getClass().getName(), t.getMessage());
if (LOG.isDebugEnabled()) {
LOG.debug("", t);
}
}
}
return message;
}
private void closeListeners(Exchange exchange, String message) {
for (LogListener listener : listeners) {
if (listener == null) {
continue;
}
try {
listener.afterLog(exchange, logger, message);
} catch (Exception t) {
LOG.warn("Ignoring an exception: {} thrown by: {} caused by: {}", t.getClass().getName(),
listener.getClass().getName(), t.getMessage());
if (LOG.isDebugEnabled()) {
LOG.debug("", t);
}
}
}
}
@Override
public String toString() {
return id;
}
@Override
public String getTraceLabel() {
if (expression != null) {
return "log[" + expression + "]";
} else {
return "log[" + message + "]";
}
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
@Override
public String getRouteId() {
return routeId;
}
@Override
public void setRouteId(String routeId) {
this.routeId = routeId;
}
public String getMessage() {
return message;
}
public Expression getExpression() {
return expression;
}
public CamelLogger getLogger() {
return logger;
}
public MaskingFormatter getLogFormatter() {
return formatter;
}
}
| LogProcessor |
java | apache__kafka | server/src/test/java/org/apache/kafka/server/metrics/ClientMetricsInstanceTest.java | {
"start": 1122,
"end": 4416
} | class ____ {
private ClientMetricsInstance clientInstance;
@BeforeEach
public void setUp() throws UnknownHostException {
Uuid uuid = Uuid.randomUuid();
ClientMetricsInstanceMetadata instanceMetadata = new ClientMetricsInstanceMetadata(uuid,
ClientMetricsTestUtils.requestContext());
clientInstance = new ClientMetricsInstance(uuid, instanceMetadata, 0, 0, null, ClientMetricsConfigs.INTERVAL_MS_DEFAULT);
}
@Test
public void testMaybeUpdateRequestTimestampValid() {
// First request should be accepted.
assertTrue(clientInstance.maybeUpdateGetRequestTimestamp(System.currentTimeMillis()));
assertTrue(clientInstance.maybeUpdatePushRequestTimestamp(System.currentTimeMillis()));
}
@Test
public void testMaybeUpdateGetRequestAfterElapsedTimeValid() {
assertTrue(clientInstance.maybeUpdateGetRequestTimestamp(System.currentTimeMillis() - ClientMetricsConfigs.INTERVAL_MS_DEFAULT));
// Second request should be accepted as time since last request is greater than the push interval.
assertTrue(clientInstance.maybeUpdateGetRequestTimestamp(System.currentTimeMillis()));
}
@Test
public void testMaybeUpdateGetRequestWithImmediateRetryFail() {
assertTrue(clientInstance.maybeUpdateGetRequestTimestamp(System.currentTimeMillis()));
// Second request should be rejected as time since last request is less than the push interval.
assertFalse(clientInstance.maybeUpdateGetRequestTimestamp(System.currentTimeMillis()));
}
@Test
public void testMaybeUpdatePushRequestAfterElapsedTimeValid() {
assertTrue(clientInstance.maybeUpdatePushRequestTimestamp(System.currentTimeMillis() - ClientMetricsConfigs.INTERVAL_MS_DEFAULT));
// Second request should be accepted as time since last request is greater than the push interval.
assertTrue(clientInstance.maybeUpdatePushRequestTimestamp(System.currentTimeMillis()));
}
@Test
public void testMaybeUpdateGetRequestWithImmediateRetryAfterPushFail() {
assertTrue(clientInstance.maybeUpdatePushRequestTimestamp(System.currentTimeMillis()));
// Next request after push should be rejected as time since last request is less than the push interval.
assertFalse(clientInstance.maybeUpdateGetRequestTimestamp(System.currentTimeMillis() + 1));
}
@Test
public void testMaybeUpdatePushRequestWithImmediateRetryFail() {
assertTrue(clientInstance.maybeUpdatePushRequestTimestamp(System.currentTimeMillis()));
// Second request should be rejected as time since last request is less than the push interval.
assertFalse(clientInstance.maybeUpdatePushRequestTimestamp(System.currentTimeMillis()));
}
@Test
public void testMaybeUpdatePushRequestWithImmediateRetryAfterGetValid() {
assertTrue(clientInstance.maybeUpdatePushRequestTimestamp(System.currentTimeMillis() - ClientMetricsConfigs.INTERVAL_MS_DEFAULT));
assertTrue(clientInstance.maybeUpdateGetRequestTimestamp(System.currentTimeMillis()));
// Next request after get should be accepted.
assertTrue(clientInstance.maybeUpdatePushRequestTimestamp(System.currentTimeMillis() + 1));
}
}
| ClientMetricsInstanceTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/atomic/longarray/AtomicLongArrayAssert_isNullOrEmpty_Test.java | {
"start": 965,
"end": 1555
} | class ____ extends AtomicLongArrayAssertBaseTest {
@Override
protected AtomicLongArrayAssert invoke_api_method() {
assertions.isNullOrEmpty();
return assertions;
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertEmpty(info(), internalArray());
}
@Override
@Test
public void should_return_this() {
// Disable this test because isNullOrEmpty is void
}
@Test
void should_pass_if_AtomicLongArray_is_null() {
AtomicLongArray array = null;
assertThat(array).isNullOrEmpty();
}
}
| AtomicLongArrayAssert_isNullOrEmpty_Test |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/typelevel/MockitoBeansByNameIntegrationTests.java | {
"start": 2890,
"end": 3163
} | class ____ {
@Bean
ExampleService s1() {
return () -> "prod 1";
}
@Bean
ExampleService s2() {
return () -> "prod 2";
}
@Bean
ExampleService s3() {
return () -> "prod 3";
}
@Bean
ExampleService s4() {
return () -> "prod 4";
}
}
}
| Config |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/support/BootstrapTestUtilsMergedConfigTests.java | {
"start": 20439,
"end": 20550
} | class ____ {
}
@ContextConfiguration(classes = AppleConfig.class)
@ActiveProfiles("apples")
static | AppleConfig |
java | grpc__grpc-java | googleapis/src/main/java/io/grpc/googleapis/GoogleCloudToProdNameResolver.java | {
"start": 12912,
"end": 13024
} | interface ____ {
HttpURLConnection createConnection(String url) throws IOException;
}
}
| HttpConnectionProvider |
java | apache__camel | core/camel-management-api/src/main/java/org/apache/camel/api/management/mbean/ManagedMulticastMBean.java | {
"start": 916,
"end": 1882
} | interface ____ extends ManagedProcessorMBean {
@ManagedAttribute(description = "If enabled then the aggregate method on AggregationStrategy can be called concurrently.")
Boolean isParallelAggregate();
@ManagedAttribute(description = "If enabled then sending messages to the multicasts occurs concurrently.")
Boolean isParallelProcessing();
@ManagedAttribute(description = "If enabled then Camel will process replies out-of-order, eg in the order they come back.")
Boolean isStreaming();
@ManagedAttribute(description = "Will now stop further processing if an exception or failure occurred during processing.")
Boolean isStopOnException();
@ManagedAttribute(description = "Shares the UnitOfWork with the parent and the resource exchange")
Boolean isShareUnitOfWork();
@ManagedAttribute(description = "The total timeout specified in millis, when using parallel processing.")
Long getTimeout();
}
| ManagedMulticastMBean |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMMemoryStateStoreService.java | {
"start": 2493,
"end": 4119
} | class ____ extends NMStateStoreService {
private Map<ApplicationId, ContainerManagerApplicationProto> apps;
private Map<ContainerId, RecoveredContainerState> containerStates;
private Map<TrackerKey, TrackerState> trackerStates;
private Map<Integer, DeletionServiceDeleteTaskProto> deleteTasks;
private RecoveredNMTokensState nmTokenState;
private RecoveredContainerTokensState containerTokenState;
private Map<ApplicationAttemptId, MasterKey> applicationMasterKeys;
private Map<ContainerId, Long> activeTokens;
private Map<ApplicationId, LogDeleterProto> logDeleterState;
private RecoveredAMRMProxyState amrmProxyState;
public NMMemoryStateStoreService() {
super(NMMemoryStateStoreService.class.getName());
}
@Override
protected void initStorage(Configuration conf) {
apps = new HashMap<ApplicationId, ContainerManagerApplicationProto>();
containerStates = new HashMap<ContainerId, RecoveredContainerState>();
nmTokenState = new RecoveredNMTokensState();
applicationMasterKeys = new HashMap<ApplicationAttemptId, MasterKey>();
containerTokenState = new RecoveredContainerTokensState();
activeTokens = new HashMap<ContainerId, Long>();
trackerStates = new HashMap<TrackerKey, TrackerState>();
deleteTasks = new HashMap<Integer, DeletionServiceDeleteTaskProto>();
logDeleterState = new HashMap<ApplicationId, LogDeleterProto>();
amrmProxyState = new RecoveredAMRMProxyState();
}
@Override
protected void startStorage() {
}
@Override
protected void closeStorage() {
}
// Recovery Iterator Implementation.
private | NMMemoryStateStoreService |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java | {
"start": 6082,
"end": 19353
} | class ____ generated. Edit {@code " + getClass().getSimpleName() + "} instead.");
builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL);
if (warnExceptions.isEmpty()) {
builder.superclass(ABSTRACT_MULTIVALUE_FUNCTION_EVALUATOR);
} else {
builder.superclass(ABSTRACT_NULLABLE_MULTIVALUE_FUNCTION_EVALUATOR);
builder.addField(SOURCE, "source", Modifier.PRIVATE, Modifier.FINAL);
builder.addField(WARNINGS, "warnings", Modifier.PRIVATE);
}
builder.addField(EvaluatorImplementer.baseRamBytesUsed(implementation));
builder.addMethod(ctor());
builder.addMethod(name());
builder.addMethod(eval("evalNullable", true));
if (warnExceptions.isEmpty()) {
builder.addMethod(eval("evalNotNullable", false));
}
if (singleValueFunction != null) {
builder.addMethod(evalSingleValued("evalSingleValuedNullable", true));
if (warnExceptions.isEmpty()) {
builder.addMethod(evalSingleValued("evalSingleValuedNotNullable", false));
}
}
if (ascendingFunction != null) {
builder.addMethod(evalAscending("evalAscendingNullable", true));
builder.addMethod(evalAscending("evalAscendingNotNullable", false));
}
builder.addType(factory());
if (warnExceptions.isEmpty() == false) {
builder.addMethod(EvaluatorImplementer.warnings());
}
builder.addMethod(baseRamBytesUsed());
return builder.build();
}
private MethodSpec ctor() {
MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC);
if (warnExceptions.isEmpty() == false) {
builder.addParameter(SOURCE, "source");
}
builder.addParameter(EXPRESSION_EVALUATOR, "field");
builder.addParameter(DRIVER_CONTEXT, "driverContext");
builder.addStatement("super(driverContext, field)");
if (warnExceptions.isEmpty() == false) {
builder.addStatement("this.source = source");
}
return builder.build();
}
private MethodSpec name() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("name").addModifiers(Modifier.PUBLIC);
builder.addAnnotation(Override.class).returns(String.class);
builder.addStatement("return $S", declarationType.getSimpleName());
return builder.build();
}
private MethodSpec evalShell(
String name,
boolean override,
boolean nullable,
String javadoc,
Consumer<MethodSpec.Builder> preflight,
Consumer<MethodSpec.Builder> body
) {
MethodSpec.Builder builder = MethodSpec.methodBuilder(name);
builder.returns(BLOCK).addParameter(BLOCK, "fieldVal");
if (override) {
builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC);
} else {
builder.addModifiers(Modifier.PRIVATE);
}
builder.addJavadoc(javadoc);
TypeName blockType = blockType(fieldType);
preflight.accept(builder);
builder.addStatement("$T v = ($T) fieldVal", blockType, blockType);
builder.addStatement("int positionCount = v.getPositionCount()");
TypeName builderType;
if (nullable) {
builderType = builderType(blockType(resultType));
} else if (resultType.equals(BYTES_REF)) {
builderType = builderType(vectorType(resultType));
} else {
builderType = vectorFixedBuilderType(resultType);
}
builder.beginControlFlow(
"try ($T builder = driverContext.blockFactory().$L(positionCount))",
builderType,
Methods.buildFromFactory(builderType)
);
if (workType != null && false == workType.equals(fieldType) && workType.isPrimitive() == false) {
builder.addStatement("$T work = new $T()", workType, workType);
}
if (fieldType.equals(BYTES_REF)) {
if (fieldType.equals(workType)) {
builder.addStatement("$T firstScratch = new $T()", BYTES_REF, BYTES_REF);
builder.addStatement("$T nextScratch = new $T()", BYTES_REF, BYTES_REF);
} else {
builder.addStatement("$T valueScratch = new $T()", BYTES_REF, BYTES_REF);
}
}
builder.beginControlFlow("for (int p = 0; p < positionCount; p++)");
{
builder.addStatement("int valueCount = v.getValueCount(p)");
if (nullable) {
builder.beginControlFlow("if (valueCount == 0)");
builder.addStatement("builder.appendNull()");
builder.addStatement("continue");
builder.endControlFlow();
}
if (warnExceptions.isEmpty() == false) {
builder.beginControlFlow("try");
body.accept(builder);
String catchPattern = "catch (" + warnExceptions.stream().map(m -> "$T").collect(Collectors.joining(" | ")) + " e)";
builder.nextControlFlow(catchPattern, warnExceptions.stream().map(TypeName::get).toArray());
builder.addStatement("warnings().registerException(e)");
builder.addStatement("builder.appendNull()");
builder.endControlFlow();
} else {
body.accept(builder);
}
}
builder.endControlFlow();
builder.addStatement("return builder.build()$L", nullable ? "" : ".asBlock()");
builder.endControlFlow();
return builder.build();
}
private MethodSpec eval(String name, boolean nullable) {
String javadoc = "Evaluate blocks containing at least one multivalued field.";
return evalShell(name, true, nullable, javadoc, builder -> {
if (ascendingFunction == null) {
return;
}
builder.beginControlFlow("if (fieldVal.mvSortedAscending())");
builder.addStatement("return $L(fieldVal)", name.replace("eval", "evalAscending"));
builder.endControlFlow();
}, builder -> {
builder.addStatement("int first = v.getFirstValueIndex(p)");
if (singleValueFunction != null) {
builder.beginControlFlow("if (valueCount == 1)");
fetch(builder, "value", fieldType, "first", workType.equals(fieldType) ? "firstScratch" : "valueScratch");
singleValueFunction.call(builder);
writeResult(builder);
builder.addStatement("continue");
builder.endControlFlow();
}
builder.addStatement("int end = first + valueCount");
if (processFunction.getParameters().size() == 2) {
// process function evaluates pairwise
if (workType.equals(fieldType) || workType.isPrimitive()) {
fetch(builder, "value", workType, "first", "firstScratch");
builder.beginControlFlow("for (int i = first + 1; i < end; i++)");
{
if (fieldType.equals(BYTES_REF)) {
fetch(builder, "next", workType, "i", "nextScratch");
builder.addStatement("$T.$L(value, next)", declarationType, processFunction.getSimpleName());
} else {
fetch(builder, "next", fieldType, "i", "nextScratch");
builder.addStatement("value = $T.$L(value, next)", declarationType, processFunction.getSimpleName());
}
}
builder.endControlFlow();
if (finishFunction == null) {
builder.addStatement("$T result = value", resultType);
} else {
finishFunction.call(builder, "value");
}
} else {
builder.beginControlFlow("for (int i = first; i < end; i++)");
{
fetch(builder, "value", fieldType, "i", "valueScratch");
builder.addStatement("$T.$L(work, value)", declarationType, processFunction.getSimpleName());
}
builder.endControlFlow();
finishFunction.call(builder, "work");
}
} else {
// process function evaluates position at a time
String scratch = fieldType.equals(BYTES_REF) ? ", valueScratch" : "";
builder.addStatement(
"$T result = $T.$L(v, first, end$L)",
resultType,
declarationType,
processFunction.getSimpleName(),
scratch
);
}
writeResult(builder);
});
}
private MethodSpec evalSingleValued(String name, boolean nullable) {
String javadoc = "Evaluate blocks containing only single valued fields.";
return evalShell(name, true, nullable, javadoc, builder -> {}, builder -> {
builder.addStatement("assert valueCount == 1");
builder.addStatement("int first = v.getFirstValueIndex(p)");
fetch(builder, "value", fieldType, "first", workType.equals(fieldType) ? "firstScratch" : "valueScratch");
singleValueFunction.call(builder);
writeResult(builder);
});
}
private void fetch(MethodSpec.Builder builder, String into, TypeName intoType, String index, String scratchName) {
if (intoType.equals(BYTES_REF)) {
builder.addStatement("$T $L = v.getBytesRef($L, $L)", intoType, into, index, scratchName);
} else if (intoType.equals(fieldType) == false && intoType.isPrimitive()) {
builder.addStatement("$T $L = ($T) v.$L($L)", intoType, into, intoType, getMethod(fieldType), index);
} else {
builder.addStatement("$T $L = v.$L($L)", intoType, into, getMethod(fieldType), index);
}
}
private MethodSpec evalAscending(String name, boolean nullable) {
String javadoc = "Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order.";
return evalShell(name, false, nullable, javadoc, builder -> {}, builder -> {
builder.addStatement("int first = v.getFirstValueIndex(p)");
ascendingFunction.call(builder);
writeResult(builder);
});
}
private void writeResult(MethodSpec.Builder builder) {
if (fieldType.equals(BYTES_REF)) {
builder.addStatement("builder.appendBytesRef(result)");
} else {
builder.addStatement("builder.$L(result)", appendMethod(resultType));
}
}
private TypeSpec factory() {
TypeSpec.Builder builder = TypeSpec.classBuilder("Factory");
builder.addSuperinterface(EXPRESSION_EVALUATOR_FACTORY);
builder.addModifiers(Modifier.PUBLIC, Modifier.STATIC);
if (warnExceptions.isEmpty() == false) {
builder.addField(SOURCE, "source", Modifier.PRIVATE, Modifier.FINAL);
}
builder.addField(EXPRESSION_EVALUATOR_FACTORY, "field", Modifier.PRIVATE, Modifier.FINAL);
builder.addMethod(factoryCtor());
builder.addMethod(factoryGet());
builder.addMethod(factoryToString());
return builder.build();
}
private MethodSpec factoryCtor() {
MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC);
if (warnExceptions.isEmpty() == false) {
builder.addParameter(SOURCE, "source");
}
builder.addParameter(EXPRESSION_EVALUATOR_FACTORY, "field");
if (warnExceptions.isEmpty() == false) {
builder.addStatement("this.source = source");
}
builder.addStatement("this.field = field");
return builder.build();
}
private MethodSpec factoryGet() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("get").addAnnotation(Override.class);
builder.addModifiers(Modifier.PUBLIC);
builder.addParameter(DRIVER_CONTEXT, "context");
builder.returns(implementation);
List<String> args = new ArrayList<>();
if (warnExceptions.isEmpty() == false) {
args.add("source");
}
args.add("field.get(context)");
args.add("context");
builder.addStatement("return new $T($L)", implementation, args.stream().collect(Collectors.joining(", ")));
return builder.build();
}
private MethodSpec factoryToString() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("toString").addAnnotation(Override.class);
builder.addModifiers(Modifier.PUBLIC);
builder.returns(String.class);
builder.addStatement("return $S + field + $S", declarationType.getSimpleName() + "[field=", "]");
return builder.build();
}
/**
* Function "finishing" the computation on a multivalued field. It converts {@link #workType} into {@link #resultType}.
*/
private static | is |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/ConcatenatedExtractTest.java | {
"start": 1109,
"end": 3375
} | class ____ {
private String[] testStringArray1 = {"1", "2", "3"};
private int[] testIntArray1 = {1, 2, 3};
private String[] testStringArray2 = {"4", "5", "6"};
private int[] testIntArray2 = {4, 5, 6};
private String[] testStringArray3 = {"7", "8", "9"};
private int[] testIntArray3 = {7, 8, 9};
private Tuple2<String[], int[]>[] testTuple2Array;
private Tuple2<String[], int[]> testTuple2;
private Tuple2<Tuple2<String[], int[]>, Tuple2<String[], int[]>[]> testData;
@SuppressWarnings("unchecked")
@BeforeEach
void setupData() {
testTuple2Array = new Tuple2[2];
testTuple2Array[0] = new Tuple2<>(testStringArray1, testIntArray2);
testTuple2Array[1] = new Tuple2<>(testStringArray2, testIntArray1);
testTuple2 = new Tuple2<>(testStringArray3, testIntArray3);
testData = new Tuple2<>(testTuple2, testTuple2Array);
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
void test1() {
Extractor ext =
new ConcatenatedExtract(new FieldFromTuple(0), new FieldFromTuple(1))
.add(new FieldsFromArray(Integer.class, 2, 1, 0));
int[] expected = {testIntArray3[2], testIntArray3[1], testIntArray3[0]};
assertThat(((Integer[]) ext.extract(testData))[0]).isEqualTo(expected[0]);
assertThat(((Integer[]) ext.extract(testData))[1]).isEqualTo(expected[1]);
assertThat(((Integer[]) ext.extract(testData))[2]).isEqualTo(expected[2]);
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void test2() {
Extractor ext =
new ConcatenatedExtract(
new FieldFromTuple(1), // Tuple2<String[],int[]>[]
new FieldsFromArray(Tuple2.class, 1)) // Tuple2<String[],int[]>[]
.add(new FieldFromArray(0)) // Tuple2<String[],int[]>
.add(new ArrayFromTuple(0)) // Object[] (Containing String[])
.add(new FieldFromArray(0)) // String[]
.add(new FieldFromArray(1)); // String
String expected2 = testStringArray2[1];
assertThat(ext.extract(testData)).isEqualTo(expected2);
}
}
| ConcatenatedExtractTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/reactive/server/HttpHandlerConnectorTests.java | {
"start": 4883,
"end": 5452
} | class ____ implements HttpHandler {
private ServerHttpRequest savedRequest;
private final Function<ServerHttpResponse, Mono<Void>> responseMonoFunction;
public TestHttpHandler(Function<ServerHttpResponse, Mono<Void>> function) {
this.responseMonoFunction = function;
}
public ServerHttpRequest getSavedRequest() {
return this.savedRequest;
}
@Override
public Mono<Void> handle(ServerHttpRequest request, ServerHttpResponse response) {
this.savedRequest = request;
return this.responseMonoFunction.apply(response);
}
}
}
| TestHttpHandler |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/pattern/ThrowablePatternConverterTest.java | {
"start": 4395,
"end": 4551
} | class ____ extends AbstractPropertyTest {
PropertyTest() {
super("%ex", THROWING_METHOD);
}
}
abstract static | PropertyTest |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/throwables/Throwables_assertHasRootCauseMessage_Test.java | {
"start": 996,
"end": 3088
} | class ____ extends ThrowablesBaseTest {
@Test
void should_fail_if_actual_is_null() {
// WHEN
var error = expectAssertionError(() -> throwables.assertHasRootCauseMessage(INFO, null, "message"));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_root_cause_is_null() {
// GIVEN
Throwable throwable = new RuntimeException();
// WHEN
var assertionError = expectAssertionError(() -> throwables.assertHasRootCauseMessage(INFO, throwable, "message"));
// THEN
then(assertionError).hasMessage(shouldHaveRootCauseWithMessage(throwable, null, "message").create());
}
@Test
void should_fail_if_root_cause_has_no_message() {
// GIVEN
Throwable root = new RuntimeException();
Throwable throwable = new RuntimeException(root);
// WHEN
var assertionError = expectAssertionError(() -> throwables.assertHasRootCauseMessage(INFO, throwable, "message"));
// THEN
then(assertionError).hasMessage(shouldHaveRootCauseWithMessage(throwable, root, "message").create());
}
@Test
void should_fail_if_root_cause_message_is_different() {
// GIVEN
Throwable root = new RuntimeException("fail");
Throwable throwable = new RuntimeException(root);
// WHEN
var assertionError = expectAssertionError(() -> throwables.assertHasRootCauseMessage(INFO, throwable, "message"));
// THEN
then(assertionError).hasMessage(shouldHaveRootCauseWithMessage(throwable, root, "message").create());
}
@Test
void should_pass_if_throwable_has_root_cause_with_message_equal_to_expected() {
// GIVEN
Throwable throwable = new RuntimeException(new RuntimeException("expected message"));
// THEN
throwables.assertHasRootCauseMessage(INFO, throwable, "expected message");
}
@Test
void should_pass_if_actual_root_cause_has_no_message_and_expected_message_is_null() {
// GIVEN
Throwable throwable = new RuntimeException(new RuntimeException());
// THEN
throwables.assertHasRootCauseMessage(INFO, throwable, null);
}
}
| Throwables_assertHasRootCauseMessage_Test |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/cache/DefaultCacheAwareContextLoaderDelegate.java | {
"start": 13286,
"end": 13712
} | class ____ test class [%s]. \
This can occur if AOT processing has not taken place for the test suite. It \
can also occur if AOT processing failed for the test class, in which case you \
can consult the logs generated during AOT processing.""".formatted(testClass.getName()));
return new AotMergedContextConfiguration(testClass, contextInitializerClass, mergedConfig, this);
}
return mergedConfig;
}
}
| for |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/AutoValueBoxedValuesTest.java | {
"start": 19600,
"end": 20169
} | class ____ {",
" public abstract long longId();",
" @SuppressWarnings(\"AutoValueBoxedValues\")",
" public abstract Long longIdSuppressWarnings();"),
linesWithoutBuilder(
" static Test create(long longId, Long longIdSuppressWarnings) {",
" return new AutoValue_Test(longId, longIdSuppressWarnings);",
" }"),
linesWithBuilder(
" @AutoValue.Builder",
" abstract static | Test |
java | google__dagger | javatests/dagger/functional/modules/ModuleIncludesCollectedFromModuleSuperclasses.java | {
"start": 1352,
"end": 1457
} | class ____ {
<T> Foo<T> createFoo() {
return new Foo<T>();
}
}
@Module
static | FooCreator |
java | google__guice | extensions/grapher/src/com/google/inject/grapher/TransitiveDependencyVisitor.java | {
"start": 1644,
"end": 3112
} | class ____
extends DefaultBindingTargetVisitor<Object, Collection<Key<?>>> {
private Collection<Key<?>> visitHasDependencies(HasDependencies hasDependencies) {
Set<Key<?>> dependencies = Sets.newHashSet();
for (Dependency<?> dependency : hasDependencies.getDependencies()) {
dependencies.add(dependency.getKey());
}
return dependencies;
}
@Override
public Collection<Key<?>> visit(ConstructorBinding<?> binding) {
return visitHasDependencies(binding);
}
@Override
public Collection<Key<?>> visit(ConvertedConstantBinding<?> binding) {
return visitHasDependencies(binding);
}
@Override
public Collection<Key<?>> visit(InstanceBinding<?> binding) {
return visitHasDependencies(binding);
}
@Override
public Collection<Key<?>> visit(LinkedKeyBinding<?> binding) {
return ImmutableSet.<Key<?>>of(binding.getLinkedKey());
}
@Override
public Collection<Key<?>> visit(ProviderBinding<?> binding) {
return ImmutableSet.<Key<?>>of(binding.getProvidedKey());
}
@Override
public Collection<Key<?>> visit(ProviderInstanceBinding<?> binding) {
return visitHasDependencies(binding);
}
@Override
public Collection<Key<?>> visit(ProviderKeyBinding<?> binding) {
return ImmutableSet.<Key<?>>of(binding.getProviderKey());
}
/** @since 4.0 */
@Override
public Collection<Key<?>> visitOther(Binding<?> binding) {
return ImmutableSet.of();
}
}
| TransitiveDependencyVisitor |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/test/java/org/apache/camel/component/salesforce/internal/client/AbstractClientBaseTest.java | {
"start": 2466,
"end": 8362
} | class ____ extends AbstractClientBase {
Client(final SalesforceSession session, final SalesforceLoginConfig loginConfig) throws SalesforceException {
super(null, session, mock(SalesforceHttpClient.class), loginConfig,
1 /* 1 second termination timeout */);
}
@Override
protected SalesforceException createRestException(final Response response, final InputStream responseContent) {
return null;
}
@Override
protected void setAccessToken(final Request request) {
}
}
SalesforceSession session = mock(SalesforceSession.class);
// having client as a field also tests that the same client instance can be
// stopped and started again
final Client client;
public AbstractClientBaseTest() throws SalesforceException {
client = new Client(session, new SalesforceLoginConfig());
when(session.getAccessToken()).thenReturn("token");
}
@BeforeEach
public void startClient() throws Exception {
client.start();
}
@Test
public void shouldDetermineHeadersForRequest() {
final CamelContext context = new DefaultCamelContext();
final Exchange exchange = new DefaultExchange(context);
final Message in = new DefaultMessage(context);
in.setHeader("sforce-auto-assign", "TRUE");
in.setHeader("SFORCE-CALL-OPTIONS", new String[] { "client=SampleCaseSensitiveToken/100", "defaultNamespace=battle" });
in.setHeader("Sforce-Limit-Info", singletonList("per-app-api-usage"));
in.setHeader("x-sfdc-packageversion-clientPackage", "1.0");
in.setHeader("Sforce-Query-Options", "batchSize=1000");
in.setHeader("Non-Related", "Header");
exchange.setIn(in);
final Map<String, List<String>> headers = AbstractClientBase.determineHeaders(exchange);
assertThat(headers).containsOnly(entry("sforce-auto-assign", singletonList("TRUE")),
entry("SFORCE-CALL-OPTIONS", asList("client=SampleCaseSensitiveToken/100", "defaultNamespace=battle")),
entry("Sforce-Limit-Info", singletonList("per-app-api-usage")),
entry("x-sfdc-packageversion-clientPackage", singletonList("1.0")),
entry("Sforce-Query-Options", singletonList("batchSize=1000")));
}
@Test
public void shouldDetermineHeadersFromResponse() {
final Response response = mock(Response.class);
final HttpFields.Mutable httpHeaders = HttpFields.build();
httpHeaders.add("Date", "Mon, 20 May 2013 22:21:46 GMT");
httpHeaders.add("Sforce-Limit-Info", "api-usage=18/5000");
httpHeaders.add("Last-Modified", "Mon, 20 May 2013 20:49:32 GMT");
httpHeaders.add("Content-Type", "application/json;charset=UTF-8");
httpHeaders.add("Transfer-Encoding", "chunked");
when(response.getHeaders()).thenReturn(httpHeaders);
final Map<String, String> headers = AbstractClientBase.determineHeadersFrom(response);
assertThat(headers).containsEntry("Sforce-Limit-Info", "api-usage=18/5000");
}
@Test
public void shouldNotHangIfRequestsHaveFinished() throws Exception {
final Request request = mock(Request.class);
final ArgumentCaptor<Response.CompleteListener> listener = ArgumentCaptor.forClass(Response.CompleteListener.class);
doNothing().when(request).send(listener.capture());
client.doHttpRequest(request, (response, headers, exception) -> {
});
final Result result = mock(Result.class);
final Response response = mock(Response.class);
when(result.getResponse()).thenReturn(response);
when(response.getHeaders()).thenReturn(HttpFields.build());
final HttpRequest salesforceRequest = mock(HttpRequest.class);
when(result.getRequest()).thenReturn(salesforceRequest);
final HttpConversation conversation = mock(HttpConversation.class);
when(salesforceRequest.getConversation()).thenReturn(conversation);
when(conversation.getAttribute(SalesforceSecurityHandler.AUTHENTICATION_REQUEST_ATTRIBUTE))
.thenReturn(salesforceRequest);
final ExecutorService executor = mock(ExecutorService.class);
when(client.httpClient.getWorkerPool()).thenReturn(executor);
// completes the request
listener.getValue().onComplete(result);
StopWatch watch = new StopWatch();
// should not wait
client.stop();
final long elapsed = watch.taken();
assertTrue(elapsed < 10);
}
@Test
public void shouldTimeoutWhenRequestsAreStillOngoing() throws Exception {
client.doHttpRequest(mock(Request.class), (response, headers, exception) -> {
});
// the request never completes
StopWatch watch = new StopWatch();
// will wait for 1 second
client.stop();
final long elapsed = watch.taken();
assertTrue(elapsed > 900 && elapsed < 1100);
}
@Test
public void shouldNotLoginWhenAccessTokenIsNullAndLazyLoginIsTrue() throws SalesforceException {
SalesforceLoginConfig loginConfig = new SalesforceLoginConfig();
loginConfig.setLazyLogin(true);
Client lazyClient = new Client(session, loginConfig);
when(session.getAccessToken()).thenReturn(null);
lazyClient.start();
verify(session, never()).login(null);
}
@Test
public void shouldLoginWhenAccessTokenIsNullAndLazyLoginIsFalse() throws SalesforceException {
SalesforceLoginConfig loginConfig = new SalesforceLoginConfig();
loginConfig.setLazyLogin(false);
Client eagerClient = new Client(session, loginConfig);
when(session.getAccessToken()).thenReturn(null);
eagerClient.start();
verify(session).login(null);
}
}
| Client |
java | quarkusio__quarkus | integration-tests/reactive-messaging-kafka/src/main/java/io/quarkus/it/kafka/KafkaReceivers.java | {
"start": 849,
"end": 1253
} | class ____ {
private final List<Person> people = new CopyOnWriteArrayList<>();
private final List<Fruit> fruits = new CopyOnWriteArrayList<>();
private final List<Record<Pet, Person>> pets = new CopyOnWriteArrayList<>();
private Map<String, String> dataWithMetadata = new ConcurrentHashMap<>();
private List<String> dataForKeyed = new CopyOnWriteArrayList<>();
static | KafkaReceivers |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_168_int.java | {
"start": 402,
"end": 5052
} | class ____ extends MysqlTest {
public void test_1() throws Exception {
String sql = "/*+engine=MPP*/ SELECT ceil(SMALLINT'123')";
//
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("/*+engine=MPP*/\n" + "SELECT ceil(SMALLINT '123')", stmt.toString());
}
public void test_2() throws Exception {
String sql = "/*+engine=MPP*/ SELECT floor(SMALLINT'123')";
//
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, DbType.mysql, SQLParserFeature.PipesAsConcat);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("/*+engine=MPP*/\n" + "SELECT floor(SMALLINT '123')", stmt.toString());
}
public void test_tiny_1() throws Exception {
String sql = "/*+engine=MPP*/ SELECT ceil(TINYINT'123')";
//
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("/*+engine=MPP*/\n" + "SELECT ceil(TINYINT '123')", stmt.toString());
}
public void test_tiny_2() throws Exception {
String sql = "/*+engine=MPP*/ SELECT floor(TINYINT'123')";
//
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, DbType.mysql, SQLParserFeature.PipesAsConcat);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("/*+engine=MPP*/\n" + "SELECT floor(TINYINT '123')", stmt.toString());
}
public void test_big_1() throws Exception {
String sql = "/*+engine=MPP*/ SELECT ceil(BIGINT'123')";
//
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("/*+engine=MPP*/\n" + "SELECT ceil(BIGINT '123')", stmt.toString());
}
public void test_big_2() throws Exception {
String sql = "/*+engine=MPP*/ SELECT floor(BIGINT'123')";
//
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, DbType.mysql, SQLParserFeature.PipesAsConcat);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("/*+engine=MPP*/\n" + "SELECT floor(BIGINT '123')", stmt.toString());
}
public void test_real_1() throws Exception {
String sql = "/*+engine=MPP*/ SELECT floor(REAL '-123.0')";
//
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, DbType.mysql, SQLParserFeature.PipesAsConcat);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("/*+engine=MPP*/\n" + "SELECT floor(REAL '-123.0')", stmt.toString());
}
public void test_real_2() throws Exception {
String sql = "/*+engine=MPP*/ SELECT ceil(REAL '-123.0')";
//
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, DbType.mysql, SQLParserFeature.PipesAsConcat);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("/*+engine=MPP*/\n" + "SELECT ceil(REAL '-123.0')", stmt.toString());
}
public void test_double_3() throws Exception {
String sql = "/*+engine=MPP*/ SELECT floor(CAST(NULL as DOUBLE))";
//
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, DbType.mysql, SQLParserFeature.PipesAsConcat);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("/*+engine=MPP*/\n" + "SELECT floor(CAST(NULL AS DOUBLE))", stmt.toString());
}
public void test_double_4() throws Exception {
String sql = "/*+engine=MPP*/ SELECT floor(CAST(NULL as DECIMAL(25,5)))";
//
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, DbType.mysql, SQLParserFeature.PipesAsConcat);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("/*+engine=MPP*/\n" + "SELECT floor(CAST(NULL AS DECIMAL(25, 5)))", stmt.toString());
}
}
| MySqlSelectTest_168_int |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/persistenceunit/TwoPersistenceUnits2LCDisabledEnabledTest.java | {
"start": 932,
"end": 2603
} | class ____ {
@Test
@JiraKey( value = "HHH-11516" )
@Jpa(
annotatedClasses = {TwoPersistenceUnits2LCDisabledEnabledTest.AnEntity.class},
integrationSettings = {
@Setting(name = CacheSettings.JAKARTA_SHARED_CACHE_MODE, value = "ENABLE_SELECTIVE"),
@Setting(name = AvailableSettings.USE_SECOND_LEVEL_CACHE, value = "true")
},
settingProviders = {
@SettingProvider(
settingName = AvailableSettings.LOADED_CLASSES,
provider = LoadedClassesSettingProvider.class
)
}
)
public void testEnabled(EntityManagerFactoryScope scope) {
final EntityPersister persister =
scope.getEntityManagerFactory().unwrap( SessionFactoryImplementor.class )
.getMappingMetamodel().getEntityDescriptor( AnEntity.class );
assertNotNull( persister.getCacheAccessStrategy() );
}
@Test
@JiraKey( value = "HHH-11516" )
@Jpa(
annotatedClasses = {TwoPersistenceUnits2LCDisabledEnabledTest.AnEntity.class},
integrationSettings = {
@Setting(name = CacheSettings.JAKARTA_SHARED_CACHE_MODE, value = "ENABLE_SELECTIVE"),
@Setting(name = AvailableSettings.USE_SECOND_LEVEL_CACHE, value = "false")
},
settingProviders = {
@SettingProvider(
settingName = AvailableSettings.LOADED_CLASSES,
provider = LoadedClassesSettingProvider.class
)
}
)
public void testDisabled(EntityManagerFactoryScope scope) {
final EntityPersister persister =
scope.getEntityManagerFactory().unwrap( SessionFactoryImplementor.class )
.getMappingMetamodel().getEntityDescriptor( AnEntity.class );
assertNull( persister.getCacheAccessStrategy() );
}
public static | TwoPersistenceUnits2LCDisabledEnabledTest |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/SalesforceSession.java | {
"start": 2724,
"end": 18334
} | class ____ extends ServiceSupport {
private static final String JWT_SIGNATURE_ALGORITHM = "SHA256withRSA";
private static final int JWT_CLAIM_WINDOW = 270; // 4.5 min
private static final String JWT_HEADER
= Base64.getUrlEncoder().encodeToString("{\"alg\":\"RS256\"}".getBytes(StandardCharsets.UTF_8));
private static final String OAUTH2_REVOKE_PATH = "/services/oauth2/revoke?token=";
private static final String OAUTH2_TOKEN_PATH = "/services/oauth2/token";
private static final Logger LOG = LoggerFactory.getLogger(SalesforceSession.class);
private final SalesforceHttpClient httpClient;
private final long timeout;
private final SalesforceLoginConfig config;
private final ObjectMapper objectMapper;
private final Set<SalesforceSessionListener> listeners;
private volatile String accessToken;
private volatile String instanceUrl;
private volatile String id;
private volatile String orgId;
private final CamelContext camelContext;
private final AtomicBoolean loggingIn = new AtomicBoolean();
private CountDownLatch latch = new CountDownLatch(1);
public SalesforceSession(CamelContext camelContext, SalesforceHttpClient httpClient, long timeout,
SalesforceLoginConfig config) {
this.camelContext = camelContext;
// validate parameters
ObjectHelper.notNull(httpClient, "httpClient");
ObjectHelper.notNull(config, "SalesforceLoginConfig");
config.validate();
this.httpClient = httpClient;
this.timeout = timeout;
this.config = config;
this.objectMapper = JsonUtils.createObjectMapper();
this.listeners = new CopyOnWriteArraySet<>();
}
public void attemptLoginUntilSuccessful(long backoffIncrement, long maxBackoff) {
// if another thread is logging in, we will just wait until it's successful
if (!loggingIn.compareAndSet(false, true)) {
LOG.debug("waiting on login from another thread");
// TODO: This is janky
try {
while (latch == null) {
Thread.sleep(100);
}
latch.await();
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
throw new RuntimeException("Failed to login.", ex);
}
LOG.debug("done waiting");
return;
}
LOG.debug("Attempting to login, no other threads logging in");
latch = new CountDownLatch(1);
long backoff = 0;
try {
for (;;) {
try {
if (isStoppingOrStopped()) {
return;
}
login(getAccessToken());
break;
} catch (SalesforceException e) {
backoff = backoff + backoffIncrement;
if (backoff > maxBackoff) {
backoff = maxBackoff;
}
LOG.warn(String.format("Salesforce login failed. Pausing for %d milliseconds", backoff), e);
try {
Thread.sleep(backoff);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
throw new RuntimeException("Failed to login.", ex);
}
}
}
} finally {
loggingIn.set(false);
latch.countDown();
}
}
public String login(String oldToken) throws SalesforceException {
lock.lock();
try {
// check if we need a new session
// this way there's always a single valid session
if (accessToken == null || accessToken.equals(oldToken)) {
// try revoking the old access token before creating a new one
accessToken = oldToken;
if (accessToken != null) {
try {
logout();
} catch (SalesforceException e) {
LOG.warn("Error revoking old access token: {}", e.getMessage(), e);
}
accessToken = null;
}
// login to Salesforce and get session id
final Request loginPost = getLoginRequest(null);
try {
final ContentResponse loginResponse = loginPost.send();
parseLoginResponse(loginResponse, loginResponse.getContentAsString());
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new SalesforceException("Login error: interrupted", e);
} catch (TimeoutException e) {
throw new SalesforceException("Login request timeout: " + e.getMessage(), e);
} catch (ExecutionException e) {
throw new SalesforceException("Unexpected login error: " + e.getCause().getMessage(), e.getCause());
}
}
return accessToken;
} finally {
lock.unlock();
}
}
/**
* Creates login request, allows SalesforceSecurityHandler to create a login request for a failed authentication
* conversation
*
* @return login POST request.
*/
public Request getLoginRequest(HttpConversation conversation) {
final String loginUrl = (instanceUrl == null ? config.getLoginUrl() : instanceUrl) + OAUTH2_TOKEN_PATH;
LOG.info("Login at Salesforce loginUrl: {}", loginUrl);
final Fields fields = new Fields(true);
fields.put("client_id", config.getClientId());
fields.put("format", "json");
final AuthenticationType type = config.getType();
switch (type) {
case USERNAME_PASSWORD:
fields.put("client_secret", config.getClientSecret());
fields.put("grant_type", "password");
fields.put("username", config.getUserName());
fields.put("password", config.getPassword());
break;
case REFRESH_TOKEN:
fields.put("client_secret", config.getClientSecret());
fields.put("grant_type", "refresh_token");
fields.put("refresh_token", config.getRefreshToken());
break;
case JWT:
fields.put("grant_type", "urn:ietf:params:oauth:grant-type:jwt-bearer");
fields.put("assertion", generateJwtAssertion());
break;
case CLIENT_CREDENTIALS:
fields.put("grant_type", "client_credentials");
fields.put("client_secret", config.getClientSecret());
break;
default:
throw new IllegalArgumentException("Unsupported login configuration type: " + type);
}
final Request post;
if (conversation == null) {
post = httpClient.POST(loginUrl);
} else {
post = httpClient.newHttpRequest(conversation, URI.create(loginUrl)).method(HttpMethod.POST);
}
return post.body(new FormRequestContent(fields)).timeout(timeout, TimeUnit.MILLISECONDS);
}
String generateJwtAssertion() {
final long utcPlusWindow = Clock.systemUTC().millis() / 1000 + JWT_CLAIM_WINDOW;
final String audience = config.getJwtAudience() != null ? config.getJwtAudience() : config.getLoginUrl();
final StringBuilder claim = new StringBuilder().append("{\"iss\":\"").append(config.getClientId())
.append("\",\"sub\":\"").append(config.getUserName())
.append("\",\"aud\":\"").append(audience).append("\",\"exp\":\"").append(utcPlusWindow)
.append("\"}");
final StringBuilder token = new StringBuilder(JWT_HEADER).append('.')
.append(Base64.getUrlEncoder().encodeToString(claim.toString().getBytes(StandardCharsets.UTF_8)));
final KeyStoreParameters keyStoreParameters = config.getKeystore();
keyStoreParameters.setCamelContext(camelContext);
try {
final KeyStore keystore = keyStoreParameters.createKeyStore();
final Enumeration<String> aliases = keystore.aliases();
String alias = null;
while (aliases.hasMoreElements()) {
String tmp = aliases.nextElement();
if (keystore.isKeyEntry(tmp)) {
if (alias == null) {
alias = tmp;
} else {
throw new IllegalArgumentException(
"The given keystore `" + keyStoreParameters.getResource()
+ "` contains more than one key entry, expecting only one");
}
}
}
PrivateKey key = (PrivateKey) keystore.getKey(alias, keyStoreParameters.getPassword().toCharArray());
Signature signature = Signature.getInstance(JWT_SIGNATURE_ALGORITHM);
signature.initSign(key);
signature.update(token.toString().getBytes(StandardCharsets.UTF_8));
byte[] signed = signature.sign();
token.append('.').append(Base64.getUrlEncoder().encodeToString(signed));
// Clean the private key from memory
try {
key.destroy();
} catch (javax.security.auth.DestroyFailedException ex) {
LOG.debug("Error destroying private key: {}", ex.getMessage());
}
} catch (IOException | GeneralSecurityException e) {
throw new IllegalStateException(e);
}
return token.toString();
}
/**
* Parses login response, allows SalesforceSecurityHandler to parse a login request for a failed authentication
* conversation.
*/
public void parseLoginResponse(ContentResponse loginResponse, String responseContent)
throws SalesforceException {
lock.lock();
try {
final int responseStatus = loginResponse.getStatus();
switch (responseStatus) {
case HttpStatus.OK_200:
// parse the response to get token
LoginToken token = objectMapper.readValue(responseContent, LoginToken.class);
// don't log token or instance URL for security reasons
LOG.info("Login successful");
accessToken = token.getAccessToken();
instanceUrl = Optional.ofNullable(config.getInstanceUrl()).orElse(token.getInstanceUrl());
id = token.getId();
orgId = id.substring(id.indexOf("id/") + 3, id.indexOf("id/") + 21);
// strip trailing '/'
int lastChar = instanceUrl.length() - 1;
if (instanceUrl.charAt(lastChar) == '/') {
instanceUrl = instanceUrl.substring(0, lastChar);
}
// notify all session listeners
for (SalesforceSessionListener listener : listeners) {
try {
listener.onLogin(accessToken, instanceUrl);
} catch (Exception t) {
LOG.warn("Unexpected error from listener {}: {}", listener, t.getMessage());
}
}
break;
case HttpStatus.BAD_REQUEST_400:
// parse the response to get error
final LoginError error = objectMapper.readValue(responseContent, LoginError.class);
final String errorCode = error.getError();
final String msg = String.format("Login error code:[%s] description:[%s]", error.getError(),
error.getErrorDescription());
final List<RestError> errors = new ArrayList<>();
errors.add(new RestError(errorCode, msg));
throw new SalesforceException(errors, HttpStatus.BAD_REQUEST_400);
default:
throw new SalesforceException(
String.format("Login error status:[%s] reason:[%s]", responseStatus, loginResponse.getReason()),
responseStatus);
}
} catch (IOException e) {
String msg = "Login error: response parse exception " + e.getMessage();
throw new SalesforceException(msg, e);
} finally {
lock.unlock();
}
}
public void logout() throws SalesforceException {
lock.lock();
try {
if (accessToken == null) {
return;
}
try {
String logoutUrl
= (instanceUrl == null ? config.getLoginUrl() : instanceUrl) + OAUTH2_REVOKE_PATH + accessToken;
final Request logoutGet = httpClient.newRequest(logoutUrl).timeout(timeout, TimeUnit.MILLISECONDS);
final ContentResponse logoutResponse = logoutGet.send();
final int statusCode = logoutResponse.getStatus();
if (statusCode == HttpStatus.OK_200) {
LOG.debug("Logout successful");
} else {
LOG.debug("Failed to revoke OAuth token. This is expected if the token is invalid or already expired");
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new SalesforceException("Interrupted while logging out", e);
} catch (ExecutionException e) {
final Throwable ex = e.getCause();
throw new SalesforceException("Unexpected logout exception: " + ex.getMessage(), ex);
} catch (TimeoutException e) {
throw new SalesforceException("Logout request TIMEOUT!", e);
} finally {
// reset session
accessToken = null;
instanceUrl = null;
// notify all session listeners about logout
for (SalesforceSessionListener listener : listeners) {
try {
listener.onLogout();
} catch (Exception t) {
LOG.warn("Unexpected error from listener {}: {}", listener, t.getMessage());
}
}
}
} finally {
lock.unlock();
}
}
public String getAccessToken() {
return accessToken;
}
public String getInstanceUrl() {
return instanceUrl;
}
public String getId() {
return id;
}
public String getOrgId() {
return orgId;
}
public boolean addListener(SalesforceSessionListener listener) {
return listeners.add(listener);
}
public boolean removeListener(SalesforceSessionListener listener) {
return listeners.remove(listener);
}
@Override
public void doStart() throws Exception {
// auto-login at start if needed
login(accessToken);
}
@Override
public void doStop() throws Exception {
// logout
logout();
}
public long getTimeout() {
return timeout;
}
public | SalesforceSession |
java | netty__netty | codec-base/src/main/java/io/netty/handler/codec/MessageToByteEncoder.java | {
"start": 1306,
"end": 1657
} | class ____ extends {@link MessageToByteEncoder}<{@link Integer}> {
* {@code @Override}
* public void encode({@link ChannelHandlerContext} ctx, {@link Integer} msg, {@link ByteBuf} out)
* throws {@link Exception} {
* out.writeInt(msg);
* }
* }
* </pre>
*/
public abstract | IntegerEncoder |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/maptypehandler/MapTypeHandlerTest.java | {
"start": 1211,
"end": 2542
} | class ____ {
private static SqlSessionFactory sqlSessionFactory;
@BeforeAll
static void setUp() throws Exception {
// create an SqlSessionFactory
try (Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/maptypehandler/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
// populate in-memory database
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/maptypehandler/CreateDB.sql");
}
@Test
void shouldGetAUserFromAnnotation() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
User user = mapper.getUser(1, "User1");
Assertions.assertEquals("User1", user.getName());
}
}
@Test
void shouldNotUseMapTypeHandlerEvenIfTheParamIsAMap() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Map<String, Object> params = new HashMap<>();
params.put("id", 1);
params.put("name", "User1");
User user = mapper.getUserXML(params);
assertThat(user).extracting(User::getId, User::getName).containsExactly(1, "User1");
}
}
}
| MapTypeHandlerTest |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/tags/form/SimpleFloatEditor.java | {
"start": 764,
"end": 1011
} | class ____ extends PropertyEditorSupport {
@Override
public void setAsText(String text) throws IllegalArgumentException {
setValue(Float.valueOf(text));
}
@Override
public String getAsText() {
return getValue() + "f";
}
}
| SimpleFloatEditor |
java | apache__kafka | server/src/test/java/org/apache/kafka/server/config/MinInSyncReplicasConfigTest.java | {
"start": 1131,
"end": 1483
} | class ____ {
@ClusterTest(serverProperties = {
@ClusterConfigProperty(key = TopicConfig.MIN_IN_SYNC_REPLICAS_CONFIG, value = "5")
})
public void testDefaultKafkaConfig(ClusterInstance cluster) {
assertEquals(5, cluster.brokers().get(0).logManager().initialDefaultConfig().minInSyncReplicas);
}
}
| MinInSyncReplicasConfigTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcBase.java | {
"start": 2745,
"end": 8326
} | class ____ {
protected final static String SERVER_PRINCIPAL_KEY =
"test.ipc.server.principal";
protected final static String CLIENT_PRINCIPAL_KEY =
"test.ipc.client.principal";
protected final static String ADDRESS = "0.0.0.0";
protected final static int PORT = 0;
protected static InetSocketAddress addr;
protected static Configuration conf;
protected void setupConf() {
conf = new Configuration();
// Set RPC engine to protobuf RPC engine
RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine2.class);
UserGroupInformation.setConfiguration(conf);
}
protected static RPC.Builder newServerBuilder(
Configuration serverConf) throws IOException {
// Create server side implementation
PBServerImpl serverImpl = new PBServerImpl();
BlockingService service = TestRpcServiceProtos.TestProtobufRpcProto
.newReflectiveBlockingService(serverImpl);
// Get RPC server for server side implementation
RPC.Builder builder = new RPC.Builder(serverConf)
.setProtocol(TestRpcService.class)
.setInstance(service).setBindAddress(ADDRESS).setPort(PORT);
return builder;
}
protected static RPC.Server setupTestServer(Configuration serverConf,
int numHandlers) throws IOException {
return setupTestServer(serverConf, numHandlers, null);
}
protected static RPC.Server setupTestServer(Configuration serverConf,
int numHandlers,
SecretManager<?> serverSm)
throws IOException {
RPC.Builder builder = newServerBuilder(serverConf);
if (numHandlers > 0) {
builder.setNumHandlers(numHandlers);
}
if (serverSm != null) {
builder.setSecretManager(serverSm);
}
return setupTestServer(builder);
}
protected static RPC.Server setupTestServer(
RPC.Builder builder) throws IOException {
RPC.Server server = builder.build();
server.start();
addr = NetUtils.getConnectAddress(server);
return server;
}
protected static TestRpcService getClient(InetSocketAddress serverAddr, Configuration clientConf)
throws ServiceException {
return getClient(serverAddr, clientConf, null);
}
protected static TestRpcService getClient(InetSocketAddress serverAddr,
Configuration clientConf, RetryPolicy connectionRetryPolicy) throws ServiceException {
return getClient(serverAddr, clientConf, connectionRetryPolicy, null);
}
protected static TestRpcService getClient(InetSocketAddress serverAddr,
Configuration clientConf, final RetryPolicy connectionRetryPolicy,
AtomicBoolean fallbackToSimpleAuth)
throws ServiceException {
try {
return RPC.getProtocolProxy(
TestRpcService.class,
0,
serverAddr,
UserGroupInformation.getCurrentUser(),
clientConf,
NetUtils.getDefaultSocketFactory(clientConf),
RPC.getRpcTimeout(clientConf),
connectionRetryPolicy, fallbackToSimpleAuth).getProxy();
} catch (IOException e) {
throw new ServiceException(e);
}
}
/**
* Try to obtain a proxy of TestRpcService with an index.
* @param serverAddr input server address
* @param clientConf input client configuration
* @param retryPolicy input retryPolicy
* @param index input index
* @return one proxy of TestRpcService
*/
protected static TestRpcService getMultipleClientWithIndex(InetSocketAddress serverAddr,
Configuration clientConf, RetryPolicy retryPolicy, int index)
throws ServiceException, IOException {
MockConnectionId connectionId = new MockConnectionId(serverAddr,
TestRpcService.class, UserGroupInformation.getCurrentUser(),
RPC.getRpcTimeout(clientConf), retryPolicy, clientConf, index);
return getClient(connectionId, clientConf);
}
/**
* Obtain a TestRpcService Proxy by a connectionId.
* @param connId input connectionId
* @param clientConf input configuration
* @return a TestRpcService Proxy
* @throws ServiceException a ServiceException
*/
protected static TestRpcService getClient(ConnectionId connId,
Configuration clientConf) throws ServiceException {
try {
return RPC.getProtocolProxy(
TestRpcService.class,
0,
connId,
clientConf,
NetUtils.getDefaultSocketFactory(clientConf),
null).getProxy();
} catch (IOException e) {
throw new ServiceException(e);
}
}
protected static void stop(Server server, TestRpcService... proxies) {
if (proxies != null) {
for (TestRpcService proxy : proxies) {
if (proxy != null) {
try {
RPC.stopProxy(proxy);
} catch (Exception ignored) {}
}
}
}
if (server != null) {
try {
server.stop();
} catch (Exception ignored) {}
}
}
/**
* Count the number of threads that have a stack frame containing
* the given string
*/
protected static int countThreads(String search) {
ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
int count = 0;
ThreadInfo[] infos = threadBean.getThreadInfo(threadBean.getAllThreadIds(), 20);
for (ThreadInfo info : infos) {
if (info == null) continue;
for (StackTraceElement elem : info.getStackTrace()) {
if (elem.getClassName().contains(search)) {
count++;
break;
}
}
}
return count;
}
public static | TestRpcBase |
java | FasterXML__jackson-databind | src/test/java/perf/ManualReadWithTypeResolution.java | {
"start": 343,
"end": 4640
} | class ____
{
private final String _desc1, _desc2;
private final byte[] _input;
private final Class<?> _inputType;
private final TypeReference<?> _inputTypeRef;
private final ObjectMapper _mapper;
private final int REPS;
protected int hash;
// wait for 3 seconds
protected long startMeasure = System.currentTimeMillis() + 3000L;
protected int roundsDone = 0;
private double[] timeMsecs;
public static void main(String[] args) throws Exception {
new ManualReadWithTypeResolution().doTest();
}
private ManualReadWithTypeResolution() throws IOException {
_desc1 = "Raw type";
_desc2 = "Generic type";
_mapper = new JsonMapper();
_input = "[\"value\",\"123\"]".getBytes("UTF-8");
_inputType = List.class;
_inputTypeRef = new TypeReference<List<String>>() { };
/*
_input = "{\"id\":124}".getBytes("UTF-8");
_inputType = Map.class;
_inputTypeRef = new TypeReference<Map<String,Object>>() { };
*/
REPS = (int) ((double) (15 * 1000 * 1000) / (double) _input.length);
}
// When comparing to simple streaming parsing, uncomment:
private void doTest() throws Exception
{
System.out.printf("Read %d bytes to bind; will do %d repetitions\n",
_input.length, REPS);
System.out.print("Warming up");
int i = 0;
final int TYPES = 2;
timeMsecs = new double[TYPES];
while (true) {
Thread.sleep(100L);
final int type = (i++ % TYPES);
String msg;
double msesc;
switch (type) {
case 0:
msesc = testDeser(REPS, _input, _mapper, _inputType);
msg = _desc1;
break;
case 1:
msesc = testDeser(REPS, _input, _mapper, _inputTypeRef);
msg = _desc2;
break;
default:
throw new Error();
}
updateStats(type, (i % 17) == 0, msg, msesc);
}
}
protected final double testDeser(int reps, byte[] json, ObjectMapper mapper, Class<?> type)
throws IOException
{
long start = System.nanoTime();
Object result = null;
while (--reps >= 0) {
result = mapper.readValue(json, type);
}
hash = result.hashCode();
return _msecsFromNanos(System.nanoTime() - start);
}
protected final double testDeser(int reps, byte[] json, ObjectMapper mapper, TypeReference<?> type)
throws IOException
{
long start = System.nanoTime();
Object result = null;
while (--reps >= 0) {
result = mapper.readValue(json, type);
}
hash = result.hashCode();
return _msecsFromNanos(System.nanoTime() - start);
}
private void updateStats(int type, boolean doGc, String msg, double msecs)
throws Exception
{
final boolean lf = (type == (timeMsecs.length - 1));
if (startMeasure == 0L) { // skip first N seconds
timeMsecs[type] += msecs;
} else {
if (lf) {
if (System.currentTimeMillis() >= startMeasure) {
startMeasure = 0L;
System.out.println(" complete!");
} else {
System.out.print(".");
}
}
return;
}
System.out.printf("Test '%s' [hash: 0x%s] -> %.1f msecs\n", msg, Integer.toHexString(hash), msecs);
if (lf) {
++roundsDone;
if ((roundsDone % 3) == 0 ) {
double den = (double) roundsDone;
System.out.printf("Averages after %d rounds (%s/%s): %.1f / %.1f msecs\n",
(int) den, _desc1, _desc2,
timeMsecs[0] / den, timeMsecs[1] / den);
}
System.out.println();
}
if (doGc) {
System.out.println("[GC]");
Thread.sleep(100L);
System.gc();
Thread.sleep(100L);
}
}
protected final double _msecsFromNanos(long nanos) {
return (nanos / 1000000.0);
}
}
| ManualReadWithTypeResolution |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging-amqp/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/amqp/SecuredAmqpBroker.java | {
"start": 399,
"end": 1643
} | class ____ {
private static EmbeddedActiveMQ server;
private SecuredAmqpBroker() {
// avoid direct instantiation.
}
public static void start() {
try {
server = new EmbeddedActiveMQ();
server.setSecurityManager(new ActiveMQSecurityManager() {
@Override
public boolean validateUser(String username, String password) {
return username.equalsIgnoreCase("artemis") && password.equalsIgnoreCase("artemis");
}
@Override
public boolean validateUserAndRole(String username, String password, Set<Role> set, CheckType checkType) {
return username.equalsIgnoreCase("artemis") && password.equalsIgnoreCase("artemis");
}
});
server.start();
await().until(() -> server.getActiveMQServer().isStarted());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static void stop() {
try {
if (server != null) {
server.stop();
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| SecuredAmqpBroker |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java | {
"start": 19026,
"end": 19370
} | class ____ extends RuntimeException {
private static final long serialVersionUID = 1L;
public CountersExceededException(String msg) {
super(msg);
}
// Only allows chaining of related exceptions
public CountersExceededException(CountersExceededException cause) {
super(cause);
}
}
}
| CountersExceededException |
java | apache__kafka | storage/src/main/java/org/apache/kafka/storage/internals/log/LazyIndex.java | {
"start": 2055,
"end": 2362
} | interface ____ extends Closeable {
File file();
void updateParentDir(File file);
void renameTo(File file) throws IOException;
boolean deleteIfExists() throws IOException;
void close() throws IOException;
void closeHandler();
}
private static | IndexWrapper |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java | {
"start": 3932,
"end": 4963
} | class ____ implements AggregatorState {
private final GroupingState internalState;
private SingleState(BigArrays bigArrays, int limit, boolean ascending) {
this.internalState = new GroupingState(bigArrays, limit, ascending);
}
public void add(long value) {
internalState.add(0, value);
}
@Override
public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
try (var intValues = driverContext.blockFactory().newConstantIntVector(0, 1)) {
internalState.toIntermediate(blocks, offset, intValues, driverContext);
}
}
Block toBlock(BlockFactory blockFactory) {
try (var intValues = blockFactory.newConstantIntVector(0, 1)) {
return internalState.toBlock(blockFactory, intValues);
}
}
@Override
public void close() {
Releasables.closeExpectNoException(internalState);
}
}
}
| SingleState |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/ColumnDefaultValue.java | {
"start": 1744,
"end": 3320
} | class ____ extends DefaultValue {
private final Literal<?> value;
public ColumnDefaultValue(String sql, Literal<?> value) {
this(sql, null /* no expression */, value);
}
public ColumnDefaultValue(Expression expr, Literal<?> value) {
this(null /* no sql */, expr, value);
}
public ColumnDefaultValue(String sql, Expression expr, Literal<?> value) {
super(sql, expr);
this.value = value;
}
/**
* Returns the default value literal. This is the literal value corresponding to
* {@link #getSql()}. For example if the SQL is "current_date()", this literal value
* will be the evaluated current_date() at the time the column was added/altered.
* Spark always sets this value when passing ColumnDefaultValue to createTable/alterTable,
* but {@link Table#columns()} may not do so as some data sources have its own system to do
* column default value back-fill.
*/
@Nullable
public Literal<?> getValue() {
return value;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ColumnDefaultValue that)) return false;
return Objects.equals(getSql(), that.getSql()) &&
Objects.equals(getExpression(), that.getExpression()) &&
value.equals(that.value);
}
@Override
public int hashCode() {
return Objects.hash(getSql(), getExpression(), value);
}
@Override
public String toString() {
return String.format(
"ColumnDefaultValue{sql=%s, expression=%s, value=%s}",
getSql(), getExpression(), value);
}
}
| ColumnDefaultValue |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/reflection/TypeParameterResolverTest.java | {
"start": 20718,
"end": 20768
} | interface ____ extends ParentIface<AA> {
}
| IfaceA |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/reservedstate/ReservedStateHandler.java | {
"start": 1097,
"end": 1488
} | interface ____ declares
* the basic contract for implementing cluster state update handlers that result in a cluster state that is effectively immutable
* by the REST handlers. The only way the reserved cluster state can be updated is through the 'operator mode' actions, e.g. updating
* the file settings.
* </p>
*
* @param <T> The type used to represent the state update
*/
public | class |
java | netty__netty | codec-socks/src/test/java/io/netty/handler/codec/socks/SocksCommonTestUtils.java | {
"start": 797,
"end": 868
} | class ____ {
/**
* A constructor to stop this | SocksCommonTestUtils |
java | apache__camel | components/camel-aws/camel-aws2-eventbridge/src/test/java/org/apache/camel/component/aws2/eventbridge/localstack/EventbridgeRemoveTargetsIT.java | {
"start": 1497,
"end": 3926
} | class ____ extends Aws2EventbridgeBase {
@EndpointInject
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint result;
@Test
public void sendIn() throws Exception {
result.expectedMessageCount(1);
template.send("direct:evs", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(EventbridgeConstants.RULE_NAME, "firstrule");
}
});
template.send("direct:evs-targets", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(EventbridgeConstants.RULE_NAME, "firstrule");
Target target = Target.builder().id("sqs-queue").arn("arn:aws:sqs:eu-west-1:780410022472:camel-connector-test")
.build();
List<Target> targets = new ArrayList<Target>();
targets.add(target);
exchange.getIn().setHeader(EventbridgeConstants.TARGETS, targets);
}
});
template.send("direct:evs-remove-targets", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(EventbridgeConstants.RULE_NAME, "firstrule");
List<String> targets = new ArrayList<String>();
targets.add("sqs-queue");
exchange.getIn().setHeader(EventbridgeConstants.TARGETS_IDS, targets);
}
});
MockEndpoint.assertIsSatisfied(context);
assertEquals(1, result.getExchanges().size());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
String awsEndpoint
= "aws2-eventbridge://default?operation=putRule&eventPatternFile=file:src/test/resources/eventpattern.json";
String target = "aws2-eventbridge://default?operation=putTargets";
String removeTargets = "aws2-eventbridge://default?operation=removeTargets";
from("direct:evs").to(awsEndpoint);
from("direct:evs-targets").to(target);
from("direct:evs-remove-targets").to(removeTargets).to("mock:result");
}
};
}
}
| EventbridgeRemoveTargetsIT |
java | quarkusio__quarkus | integration-tests/gradle/src/test/java/io/quarkus/gradle/InjectBeanFromTestConfigTest.java | {
"start": 152,
"end": 575
} | class ____ extends QuarkusGradleWrapperTestBase {
@Test
public void testBasicMultiModuleBuild() throws Exception {
final File projectDir = getProjectDir("inject-bean-from-test-config");
BuildResult build = runGradleWrapper(projectDir, "clean", ":application:test");
assertThat(BuildResult.isSuccessful(build.getTasks().get(":application:test"))).isTrue();
}
}
| InjectBeanFromTestConfigTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/shard/IndexShard.java | {
"start": 217381,
"end": 232606
} | class ____ implements ReferenceManager.RefreshListener {
private final MeanMetric refreshMetric;
private long currentRefreshStartTime;
private Thread callingThread = null;
private RefreshMetricUpdater(MeanMetric refreshMetric) {
this.refreshMetric = refreshMetric;
}
@Override
public void beforeRefresh() {
if (Assertions.ENABLED) {
assert callingThread == null
: "beforeRefresh was called by " + callingThread.getName() + " without a corresponding call to afterRefresh";
callingThread = Thread.currentThread();
}
currentRefreshStartTime = System.nanoTime();
}
@Override
public void afterRefresh(boolean didRefresh) {
if (Assertions.ENABLED) {
assert callingThread != null : "afterRefresh called but not beforeRefresh";
assert callingThread == Thread.currentThread()
: "beforeRefreshed called by a different thread. current ["
+ Thread.currentThread().getName()
+ "], thread that called beforeRefresh ["
+ callingThread.getName()
+ "]";
callingThread = null;
}
refreshMetric.inc(System.nanoTime() - currentRefreshStartTime);
}
}
/**
* Reset the current engine to a new one.
*
* Calls {@link Engine#prepareForEngineReset()} on the current engine, then closes it, and loads a new engine without
* doing any translog recovery.
*
* In general, resetting the engine should be done with care, to consider any in-progress operations and listeners.
* At the moment, this is implemented in serverless for a special case that ensures the engine is prepared for reset.
* Reseting the engine can prevent non-blocking engine refreshes (see {@link Engine#maybeRefresh(String, ActionListener)} to be
* immediately executed, so it is expected that the new engine instance provides refreshed readers (if supported) after the reset.
*
* @param postResetNewEngineConsumer A consumer that will be called with the newly created engine after the reset
* is complete, allowing for post-reset operations on the new engine instance.
* The provided engine reference should not be retained by the consumer.
* @throws Exception if reset could not be completed or previous engine could not be closed
*/
public void resetEngine(Consumer<Engine> postResetNewEngineConsumer) throws Exception {
assert Thread.holdsLock(mutex) == false : "resetting engine under mutex";
assert waitForEngineOrClosedShardListeners.isDone();
assert assertNoEngineResetLock();
Engine previousEngine = null;
Exception primaryException = null;
try {
synchronized (engineMutex) {
verifyNotClosed();
try {
engineResetLock.writeLock().lock();
try {
var engine = getCurrentEngine(false);
engine.prepareForEngineReset();
var newEngine = createEngine(newEngineConfig(replicationTracker));
getAndSetCurrentEngine(newEngine);
onNewEngine(newEngine);
postResetNewEngineConsumer.accept(newEngine);
previousEngine = engine;
} finally {
if (previousEngine != null) {
// Downgrade to read lock for closing the engine
engineResetLock.readLock().lock();
}
engineResetLock.writeLock().unlock();
}
} catch (Exception e) {
// we want to fail the shard in the case prepareForEngineReset throws
failShard("unable to reset engine", e);
throw e;
}
}
onSettingsChanged();
} catch (Exception e) {
primaryException = e;
throw e;
} finally {
if (previousEngine != null) {
assert engineResetLock.isReadLockedByCurrentThread();
try {
IOUtils.close(previousEngine);
} catch (Exception e) {
failShard("unable to close previous engine after reset", e);
if (primaryException != null) {
primaryException.addSuppressed(e);
} else {
throw e;
}
} finally {
engineResetLock.readLock().unlock();
}
}
}
}
/**
* Rollback the current engine to the safe commit, then replay local translog up to the global checkpoint.
*/
void rollbackEngineToGlobalCheckpoint() throws IOException {
assert Thread.holdsLock(mutex) == false : "resetting engine under mutex";
assert assertNoEngineResetLock();
assert getActiveOperationsCount() == OPERATIONS_BLOCKED
: "engine rollback without blocking operations; active operations are [" + getActiveOperationsCount() + ']';
sync(); // persist the global checkpoint to disk
final SeqNoStats seqNoStats = seqNoStats();
final TranslogStats translogStats = translogStats();
// flush to make sure the latest commit, which will be opened by the read-only engine, includes all operations.
flush(new FlushRequest().waitIfOngoing(true));
SetOnce<Engine> newEngineReference = new SetOnce<>();
final long globalCheckpoint = getLastKnownGlobalCheckpoint();
assert globalCheckpoint == getLastSyncedGlobalCheckpoint();
synchronized (engineMutex) {
verifyNotClosed();
// we must create both new read-only engine and new read-write engine under engineMutex to ensure snapshotStoreMetadata,
// acquireXXXCommit and close works.
final Engine readOnlyEngine = new ReadOnlyEngine(
newEngineConfig(replicationTracker),
seqNoStats,
translogStats,
false,
Function.identity(),
true,
false
) {
@Override
public IndexCommitRef acquireLastIndexCommit(boolean flushFirst) {
synchronized (engineMutex) {
if (newEngineReference.get() == null) {
throw new AlreadyClosedException("engine was closed");
}
// ignore flushFirst since we flushed above and we do not want to interfere with ongoing translog replay
return newEngineReference.get().acquireLastIndexCommit(false);
}
}
@Override
public IndexCommitRef acquireSafeIndexCommit() {
synchronized (engineMutex) {
if (newEngineReference.get() == null) {
throw new AlreadyClosedException("engine was closed");
}
return newEngineReference.get().acquireSafeIndexCommit();
}
}
@Override
public void close() throws IOException {
Engine newEngine;
synchronized (engineMutex) {
newEngine = newEngineReference.get();
if (newEngine == getEngineOrNull()) {
// we successfully installed the new engine so do not close it.
newEngine = null;
}
}
IOUtils.close(super::close, newEngine);
}
};
IOUtils.close(getAndSetCurrentEngine(readOnlyEngine));
newEngineReference.set(engineFactory.newReadWriteEngine(newEngineConfig(replicationTracker)));
onNewEngine(newEngineReference.get());
}
final Engine.TranslogRecoveryRunner translogRunner = (engine, snapshot) -> runTranslogRecovery(
engine,
snapshot,
Engine.Operation.Origin.LOCAL_RESET,
() -> {
// TODO: add a dedicate recovery stats for the reset translog
}
);
newEngineReference.get().recoverFromTranslog(translogRunner, globalCheckpoint);
newEngineReference.get().refresh("reset_engine");
synchronized (engineMutex) {
verifyNotClosed();
IOUtils.close(getAndSetCurrentEngine(newEngineReference.get()));
// We set active because we are now writing operations to the engine; this way,
// if we go idle after some time and become inactive, we still give sync'd flush a chance to run.
active.set(true);
}
// time elapses after the engine is created above (pulling the config settings) until we set the engine reference, during
// which settings changes could possibly have happened, so here we forcefully push any config changes to the new engine.
onSettingsChanged();
}
/**
* Returns the maximum sequence number of either update or delete operations have been processed in this shard
* or the sequence number from {@link #advanceMaxSeqNoOfUpdatesOrDeletes(long)}. An index request is considered
* as an update operation if it overwrites the existing documents in Lucene index with the same document id.
* <p>
* The primary captures this value after executes a replication request, then transfers it to a replica before
* executing that replication request on a replica.
*/
public long getMaxSeqNoOfUpdatesOrDeletes() {
return getEngine().getMaxSeqNoOfUpdatesOrDeletes();
}
/**
* A replica calls this method to advance the max_seq_no_of_updates marker of its engine to at least the max_seq_no_of_updates
* value (piggybacked in a replication request) that it receives from its primary before executing that replication request.
* The receiving value is at least as high as the max_seq_no_of_updates on the primary was when any of the operations of that
* replication request were processed on it.
* <p>
* A replica shard also calls this method to bootstrap the max_seq_no_of_updates marker with the value that it received from
* the primary in peer-recovery, before it replays remote translog operations from the primary. The receiving value is at least
* as high as the max_seq_no_of_updates on the primary was when any of these operations were processed on it.
* <p>
* These transfers guarantee that every index/delete operation when executing on a replica engine will observe this marker a value
* which is at least the value of the max_seq_no_of_updates marker on the primary after that operation was executed on the primary.
*
* @see #acquireReplicaOperationPermit(long, long, long, ActionListener, Executor)
* @see RecoveryTarget#indexTranslogOperations(List, int, long, long, RetentionLeases, long, ActionListener)
*/
public void advanceMaxSeqNoOfUpdatesOrDeletes(long seqNo) {
getEngine().advanceMaxSeqNoOfUpdatesOrDeletes(seqNo);
}
/**
* Performs the pre-closing checks on the {@link IndexShard}.
*
* @throws IllegalStateException if the sanity checks failed
*/
public void verifyShardBeforeIndexClosing() throws IllegalStateException {
getEngine().verifyEngineBeforeIndexClosing();
}
RetentionLeaseSyncer getRetentionLeaseSyncer() {
return retentionLeaseSyncer;
}
public long getRelativeTimeInNanos() {
return relativeTimeInNanosSupplier.getAsLong();
}
@Override
public String toString() {
return "IndexShard(shardRouting=" + shardRouting + ")";
}
/**
* @deprecated use {@link #waitForPrimaryTermAndGeneration(long, long, ActionListener)} instead.
*/
@Deprecated
public void waitForSegmentGeneration(long segmentGeneration, ActionListener<Long> listener) {
waitForPrimaryTermAndGeneration(getOperationPrimaryTerm(), segmentGeneration, listener);
}
private void checkAndCallWaitForEngineOrClosedShardListeners() {
if (getEngineOrNull() != null || state == IndexShardState.CLOSED) {
waitForEngineOrClosedShardListeners.onResponse(null);
}
}
/**
* Registers a listener for an event when the shard opens the engine or is the shard is closed
*/
public void waitForEngineOrClosedShard(ActionListener<Void> listener) {
waitForEngineOrClosedShardListeners.addListener(listener);
}
/**
* Registers a listener for an event when the shard advances to the provided primary term and segment generation.
* Completes the listener with a {@link IndexShardClosedException} if the shard is closed.
*/
public void waitForPrimaryTermAndGeneration(long primaryTerm, long segmentGeneration, ActionListener<Long> listener) {
waitForEngineOrClosedShard(listener.delegateFailureAndWrap((l, ignored) -> {
if (state == IndexShardState.CLOSED) {
l.onFailure(new IndexShardClosedException(shardId));
} else {
getEngine().addPrimaryTermAndGenerationListener(primaryTerm, segmentGeneration, l);
}
}));
}
/**
* Ensures that the shard is ready to perform mutable operations.
* This method is particularly useful when the shard initializes its internal
* {@link org.elasticsearch.index.engine.Engine} lazily, as it may take some time before becoming mutable.
*
* The provided listener will be notified once the shard is ready for mutating operations.
*
* @param listener the listener to be notified when the shard is mutable
*/
public void ensureMutable(ActionListener<Void> listener, boolean permitAcquired) {
indexEventListener.beforeIndexShardMutableOperation(this, permitAcquired, listener);
}
// package-private for tests
EngineResetLock getEngineResetLock() {
return engineResetLock;
}
private boolean assertNoEngineResetLock() {
assert engineResetLock.isReadLockedByCurrentThread() == false
: "Expected current thread ["
+ Thread.currentThread()
+ "] to not hold an engine read lock (lock ordering should be: engineMutex -> engineResetLock -> mutex)";
assert engineResetLock.isWriteLockedByCurrentThread() == false
: "Expected current thread ["
+ Thread.currentThread()
+ "] to not hold the engine write lock (lock ordering should be: engineMutex -> engineResetLock -> mutex)";
return true;
}
}
| RefreshMetricUpdater |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/IndexModuleTests.java | {
"start": 42347,
"end": 42621
} | class ____ implements IndexModule.DirectoryWrapper {
@Override
public Directory wrap(Directory delegate, ShardRouting shardRouting) {
return new WrappedDirectory(delegate, shardRouting);
}
}
private static final | TestDirectoryWrapper |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/env/EnvironmentEndpoint.java | {
"start": 12489,
"end": 13248
} | class ____ {
private final @Nullable Object value;
private final @Nullable String origin;
private final String @Nullable [] originParents;
private PropertyValueDescriptor(@Nullable Object value, @Nullable Origin origin) {
this.value = value;
this.origin = (origin != null) ? origin.toString() : null;
List<Origin> originParents = Origin.parentsFrom(origin);
this.originParents = originParents.isEmpty() ? null
: originParents.stream().map(Object::toString).toArray(String[]::new);
}
public @Nullable Object getValue() {
return this.value;
}
public @Nullable String getOrigin() {
return this.origin;
}
public String @Nullable [] getOriginParents() {
return this.originParents;
}
}
}
| PropertyValueDescriptor |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/validation/annotation/ValidationAnnotationUtils.java | {
"start": 1023,
"end": 2710
} | class ____ {
private static final Object[] EMPTY_OBJECT_ARRAY = new Object[0];
/**
* Determine any validation hints by the given annotation.
* <p>This implementation checks for Spring's
* {@link org.springframework.validation.annotation.Validated},
* {@code @jakarta.validation.Valid}, and custom annotations whose
* name starts with "Valid" which may optionally declare validation
* hints through the "value" attribute.
* @param ann the annotation (potentially a validation annotation)
* @return the validation hints to apply (possibly an empty array),
* or {@code null} if this annotation does not trigger any validation
*/
public static Object @Nullable [] determineValidationHints(Annotation ann) {
// Direct presence of @Validated ?
if (ann instanceof Validated validated) {
return validated.value();
}
// Direct presence of @Valid ?
Class<? extends Annotation> annotationType = ann.annotationType();
if ("jakarta.validation.Valid".equals(annotationType.getName())) {
return EMPTY_OBJECT_ARRAY;
}
// Meta presence of @Validated ?
Validated validatedAnn = AnnotationUtils.getAnnotation(ann, Validated.class);
if (validatedAnn != null) {
return validatedAnn.value();
}
// Custom validation annotation ?
if (annotationType.getSimpleName().startsWith("Valid")) {
return convertValidationHints(AnnotationUtils.getValue(ann));
}
// No validation triggered
return null;
}
private static Object[] convertValidationHints(@Nullable Object hints) {
if (hints == null) {
return EMPTY_OBJECT_ARRAY;
}
return (hints instanceof Object[] objectHints ? objectHints : new Object[] {hints});
}
}
| ValidationAnnotationUtils |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/aggregate/DB2AggregateSupport.java | {
"start": 33139,
"end": 33939
} | class ____ extends AggregateJsonWriteExpression
implements WriteExpressionRenderer {
private final String path;
RootJsonWriteExpression(SelectableMapping aggregateColumn, SelectableMapping[] columns) {
this.path = aggregateColumn.getSelectionExpression();
initializeSubExpressions( aggregateColumn, columns );
}
@Override
public void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression aggregateColumnWriteExpression,
String qualifier) {
final String basePath;
if ( qualifier == null || qualifier.isBlank() ) {
basePath = path;
}
else {
basePath = qualifier + "." + path;
}
append( sqlAppender, basePath, translator, aggregateColumnWriteExpression );
}
}
private static | RootJsonWriteExpression |
java | apache__camel | components/camel-thrift/src/test/java/org/apache/camel/component/thrift/generated/Calculator.java | {
"start": 144948,
"end": 145239
} | class ____ implements org.apache.thrift.scheme.SchemeFactory {
@Override
public calculate_resultStandardScheme getScheme() {
return new calculate_resultStandardScheme();
}
}
private static | calculate_resultStandardSchemeFactory |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/execution/librarycache/BlobLibraryCacheManagerTest.java | {
"start": 2872,
"end": 3150
} | class ____: {0}")
public static List<Boolean> useSystemClassLoader() {
return Arrays.asList(true, false);
}
@Parameterized.Parameter public boolean wrapsSystemClassLoader;
/**
* Tests that the {@link BlobLibraryCacheManager} cleans up after the | loader |
java | quarkusio__quarkus | extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/KnownOidcProvidersTest.java | {
"start": 710,
"end": 33712
} | class ____ {
@Test
public void testAcceptGitHubProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.GITHUB));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertFalse(config.isDiscoveryEnabled().get());
assertEquals("https://github.com/login/oauth", config.getAuthServerUrl().get());
assertEquals("authorize", config.getAuthorizationPath().get());
assertEquals("access_token", config.getTokenPath().get());
assertEquals("https://api.github.com/user", config.getUserInfoPath().get());
assertFalse(config.authentication.idTokenRequired.get());
assertTrue(config.authentication.userInfoRequired.get());
assertTrue(config.token.verifyAccessTokenWithUserInfo.get());
assertEquals(List.of("user:email"), config.authentication.scopes.get());
assertEquals("name", config.getToken().getPrincipalClaim().get());
}
@Test
public void testOverrideGitHubProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setDiscoveryEnabled(true);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.setAuthorizationPath("authorization");
tenant.setTokenPath("tokens");
tenant.setUserInfoPath("userinfo");
tenant.authentication.setIdTokenRequired(true);
tenant.authentication.setUserInfoRequired(false);
tenant.token.setVerifyAccessTokenWithUserInfo(false);
tenant.authentication.setScopes(List.of("write"));
tenant.token.setPrincipalClaim("firstname");
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.GITHUB));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertTrue(config.isDiscoveryEnabled().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertEquals("authorization", config.getAuthorizationPath().get());
assertEquals("tokens", config.getTokenPath().get());
assertEquals("userinfo", config.getUserInfoPath().get());
assertTrue(config.authentication.idTokenRequired.get());
assertFalse(config.authentication.userInfoRequired.get());
assertFalse(config.token.verifyAccessTokenWithUserInfo.get());
assertEquals(List.of("write"), config.authentication.scopes.get());
assertEquals("firstname", config.getToken().getPrincipalClaim().get());
}
@Test
public void testAcceptTwitterProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.TWITTER));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertFalse(config.isDiscoveryEnabled().get());
assertEquals("https://api.twitter.com/2/oauth2", config.getAuthServerUrl().get());
assertEquals("https://twitter.com/i/oauth2/authorize", config.getAuthorizationPath().get());
assertEquals("token", config.getTokenPath().get());
assertEquals("https://api.twitter.com/2/users/me", config.getUserInfoPath().get());
assertFalse(config.authentication.idTokenRequired.get());
assertTrue(config.authentication.userInfoRequired.get());
assertFalse(config.authentication.addOpenidScope.get());
assertEquals(List.of("offline.access", "tweet.read", "users.read"), config.authentication.scopes.get());
assertTrue(config.authentication.pkceRequired.get());
}
@Test
public void testOverrideTwitterProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setDiscoveryEnabled(true);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.setAuthorizationPath("authorization");
tenant.setTokenPath("tokens");
tenant.setUserInfoPath("userinfo");
tenant.authentication.setIdTokenRequired(true);
tenant.authentication.setUserInfoRequired(false);
tenant.authentication.setAddOpenidScope(true);
tenant.authentication.setPkceRequired(false);
tenant.authentication.setScopes(List.of("write"));
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.TWITTER));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertTrue(config.isDiscoveryEnabled().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertEquals("authorization", config.getAuthorizationPath().get());
assertEquals("tokens", config.getTokenPath().get());
assertEquals("userinfo", config.getUserInfoPath().get());
assertTrue(config.authentication.idTokenRequired.get());
assertFalse(config.authentication.userInfoRequired.get());
assertEquals(List.of("write"), config.authentication.scopes.get());
assertTrue(config.authentication.addOpenidScope.get());
assertFalse(config.authentication.pkceRequired.get());
}
@Test
public void testAcceptMastodonProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.MASTODON));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertFalse(config.isDiscoveryEnabled().get());
assertEquals("https://mastodon.social", config.getAuthServerUrl().get());
assertEquals("/oauth/authorize", config.getAuthorizationPath().get());
assertEquals("/oauth/token", config.getTokenPath().get());
assertEquals("/api/v1/accounts/verify_credentials", config.getUserInfoPath().get());
assertFalse(config.authentication.idTokenRequired.get());
assertTrue(config.authentication.userInfoRequired.get());
assertFalse(config.authentication.addOpenidScope.get());
assertEquals(List.of("read"), config.authentication.scopes.get());
}
@Test
public void testOverrideMastodonProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setDiscoveryEnabled(true);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.setAuthorizationPath("authorization");
tenant.setTokenPath("tokens");
tenant.setUserInfoPath("userinfo");
tenant.authentication.setIdTokenRequired(true);
tenant.authentication.setUserInfoRequired(false);
tenant.authentication.setAddOpenidScope(true);
tenant.authentication.setScopes(List.of("write"));
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.MASTODON));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertTrue(config.isDiscoveryEnabled().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertEquals("authorization", config.getAuthorizationPath().get());
assertEquals("tokens", config.getTokenPath().get());
assertEquals("userinfo", config.getUserInfoPath().get());
assertTrue(config.authentication.idTokenRequired.get());
assertFalse(config.authentication.userInfoRequired.get());
assertEquals(List.of("write"), config.authentication.scopes.get());
assertTrue(config.authentication.addOpenidScope.get());
}
@Test
public void testAcceptXProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.X));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertFalse(config.isDiscoveryEnabled().get());
assertEquals("https://api.twitter.com/2/oauth2", config.getAuthServerUrl().get());
assertEquals("https://twitter.com/i/oauth2/authorize", config.getAuthorizationPath().get());
assertEquals("token", config.getTokenPath().get());
assertEquals("https://api.twitter.com/2/users/me", config.getUserInfoPath().get());
assertFalse(config.authentication.idTokenRequired.get());
assertTrue(config.authentication.userInfoRequired.get());
assertFalse(config.authentication.addOpenidScope.get());
assertEquals(List.of("offline.access", "tweet.read", "users.read"), config.authentication.scopes.get());
assertTrue(config.authentication.pkceRequired.get());
}
@Test
public void testOverrideXProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setDiscoveryEnabled(true);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.setAuthorizationPath("authorization");
tenant.setTokenPath("tokens");
tenant.setUserInfoPath("userinfo");
tenant.authentication.setIdTokenRequired(true);
tenant.authentication.setUserInfoRequired(false);
tenant.authentication.setAddOpenidScope(true);
tenant.authentication.setPkceRequired(false);
tenant.authentication.setScopes(List.of("write"));
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.X));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertTrue(config.isDiscoveryEnabled().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertEquals("authorization", config.getAuthorizationPath().get());
assertEquals("tokens", config.getTokenPath().get());
assertEquals("userinfo", config.getUserInfoPath().get());
assertTrue(config.authentication.idTokenRequired.get());
assertFalse(config.authentication.userInfoRequired.get());
assertEquals(List.of("write"), config.authentication.scopes.get());
assertTrue(config.authentication.addOpenidScope.get());
assertFalse(config.authentication.pkceRequired.get());
}
@Test
public void testAcceptFacebookProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.FACEBOOK));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertFalse(config.isDiscoveryEnabled().get());
assertEquals("https://www.facebook.com", config.getAuthServerUrl().get());
assertEquals("https://facebook.com/dialog/oauth/", config.getAuthorizationPath().get());
assertEquals("https://www.facebook.com/.well-known/oauth/openid/jwks/", config.getJwksPath().get());
assertEquals("https://graph.facebook.com/v12.0/oauth/access_token", config.getTokenPath().get());
assertEquals(List.of("email", "public_profile"), config.authentication.scopes.get());
assertTrue(config.authentication.forceRedirectHttpsScheme.get());
}
@Test
public void testOverrideFacebookProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setDiscoveryEnabled(true);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.setAuthorizationPath("authorization");
tenant.setJwksPath("jwks");
tenant.setTokenPath("tokens");
tenant.authentication.setScopes(List.of("write"));
tenant.authentication.setForceRedirectHttpsScheme(false);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.FACEBOOK));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertTrue(config.isDiscoveryEnabled().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertEquals("authorization", config.getAuthorizationPath().get());
assertFalse(config.getAuthentication().isForceRedirectHttpsScheme().get());
assertEquals("jwks", config.getJwksPath().get());
assertEquals("tokens", config.getTokenPath().get());
assertEquals(List.of("write"), config.authentication.scopes.get());
}
@Test
public void testAcceptGoogleProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.GOOGLE));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertEquals("https://accounts.google.com", config.getAuthServerUrl().get());
assertEquals("name", config.getToken().getPrincipalClaim().get());
assertEquals(List.of("openid", "email", "profile"), config.authentication.scopes.get());
assertTrue(config.token.verifyAccessTokenWithUserInfo.get());
}
@Test
public void testOverrideGoogleProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.authentication.setScopes(List.of("write"));
tenant.token.setPrincipalClaim("firstname");
tenant.token.setVerifyAccessTokenWithUserInfo(false);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.GOOGLE));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertEquals("firstname", config.getToken().getPrincipalClaim().get());
assertEquals(List.of("write"), config.authentication.scopes.get());
assertFalse(config.token.verifyAccessTokenWithUserInfo.get());
}
@Test
public void testAcceptMicrosoftProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.MICROSOFT));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertEquals("https://login.microsoftonline.com/common/v2.0", config.getAuthServerUrl().get());
assertEquals(List.of("openid", "email", "profile"), config.authentication.scopes.get());
assertEquals("any", config.getToken().getIssuer().get());
}
@Test
public void testOverrideMicrosoftProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.getToken().setIssuer("http://localhost/wiremock");
tenant.authentication.setScopes(List.of("write"));
tenant.authentication.setForceRedirectHttpsScheme(false);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.MICROSOFT));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertEquals(List.of("write"), config.authentication.scopes.get());
assertEquals("http://localhost/wiremock", config.getToken().getIssuer().get());
assertFalse(config.authentication.forceRedirectHttpsScheme.get());
}
@Test
public void testAcceptAppleProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.APPLE));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertEquals("https://appleid.apple.com/", config.getAuthServerUrl().get());
assertEquals(List.of("openid", "email", "name"), config.authentication.scopes.get());
assertEquals(ResponseMode.FORM_POST, config.authentication.responseMode.get());
assertEquals(Method.POST_JWT, config.credentials.clientSecret.method.get());
assertEquals("https://appleid.apple.com/", config.credentials.jwt.audience.get());
assertEquals(SignatureAlgorithm.ES256.getAlgorithm(), config.credentials.jwt.signatureAlgorithm.get());
assertTrue(config.authentication.forceRedirectHttpsScheme.get());
}
@Test
public void testOverrideAppleProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.authentication.setScopes(List.of("write"));
tenant.authentication.setResponseMode(ResponseMode.QUERY);
tenant.credentials.clientSecret.setMethod(Method.POST);
tenant.credentials.jwt.setAudience("http://localhost/audience");
tenant.credentials.jwt.setSignatureAlgorithm(SignatureAlgorithm.ES256.getAlgorithm());
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.APPLE));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertEquals(List.of("write"), config.authentication.scopes.get());
assertEquals(ResponseMode.QUERY, config.authentication.responseMode.get());
assertEquals(Method.POST, config.credentials.clientSecret.method.get());
assertEquals("http://localhost/audience", config.credentials.jwt.audience.get());
assertEquals(SignatureAlgorithm.ES256.getAlgorithm(), config.credentials.jwt.signatureAlgorithm.get());
}
@Test
public void testAcceptSpotifyProperties() {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.SPOTIFY));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertEquals("https://accounts.spotify.com", config.getAuthServerUrl().get());
assertEquals(List.of("user-read-private", "user-read-email"), config.authentication.scopes.get());
assertTrue(config.token.verifyAccessTokenWithUserInfo.get());
assertEquals("display_name", config.getToken().getPrincipalClaim().get());
}
@Test
public void testOverrideSpotifyProperties() {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.getToken().setIssuer("http://localhost/wiremock");
tenant.authentication.setScopes(List.of("write"));
tenant.authentication.setForceRedirectHttpsScheme(false);
tenant.token.setPrincipalClaim("firstname");
tenant.token.setVerifyAccessTokenWithUserInfo(false);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.SPOTIFY));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertEquals(List.of("write"), config.authentication.scopes.get());
assertEquals("http://localhost/wiremock", config.getToken().getIssuer().get());
assertFalse(config.authentication.forceRedirectHttpsScheme.get());
assertEquals("firstname", config.getToken().getPrincipalClaim().get());
assertFalse(config.token.verifyAccessTokenWithUserInfo.get());
}
@Test
public void testAcceptStravaProperties() {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.STRAVA));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertFalse(config.discoveryEnabled.get());
assertEquals("https://www.strava.com/oauth", config.getAuthServerUrl().get());
assertEquals("authorize", config.getAuthorizationPath().get());
assertEquals("token", config.getTokenPath().get());
assertEquals("https://www.strava.com/api/v3/athlete", config.getUserInfoPath().get());
assertEquals(List.of("activity:read"), config.authentication.scopes.get());
assertTrue(config.token.verifyAccessTokenWithUserInfo.get());
assertFalse(config.getAuthentication().idTokenRequired.get());
assertEquals(Method.QUERY, config.credentials.clientSecret.method.get());
assertEquals("/strava", config.authentication.redirectPath.get());
assertEquals(",", config.authentication.scopeSeparator.get());
}
@Test
public void testOverrideStravaProperties() {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.setAuthorizationPath("authorizations");
tenant.setTokenPath("tokens");
tenant.setUserInfoPath("users");
tenant.authentication.setScopes(List.of("write"));
tenant.token.setVerifyAccessTokenWithUserInfo(false);
tenant.credentials.clientSecret.setMethod(Method.BASIC);
tenant.authentication.setRedirectPath("/fitness-app");
tenant.authentication.setScopeSeparator(" ");
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.STRAVA));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertEquals("authorizations", config.getAuthorizationPath().get());
assertEquals("tokens", config.getTokenPath().get());
assertEquals("users", config.getUserInfoPath().get());
assertEquals(List.of("write"), config.authentication.scopes.get());
assertFalse(config.token.verifyAccessTokenWithUserInfo.get());
assertEquals(Method.BASIC, config.credentials.clientSecret.method.get());
assertEquals("/fitness-app", config.authentication.redirectPath.get());
assertEquals(" ", config.authentication.scopeSeparator.get());
}
@Test
public void testAcceptTwitchProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.TWITCH));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertEquals("https://id.twitch.tv/oauth2", config.getAuthServerUrl().get());
assertEquals(Method.POST, config.credentials.clientSecret.method.get());
assertTrue(config.authentication.forceRedirectHttpsScheme.get());
}
@Test
public void testOverrideTwitchProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.credentials.clientSecret.setMethod(Method.BASIC);
tenant.authentication.setForceRedirectHttpsScheme(false);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.FACEBOOK));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertFalse(config.getAuthentication().isForceRedirectHttpsScheme().get());
assertEquals(Method.BASIC, config.credentials.clientSecret.method.get());
}
@Test
public void testAcceptDiscordProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.DISCORD));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertFalse(config.discoveryEnabled.get());
assertEquals("https://discord.com/api/oauth2", config.getAuthServerUrl().get());
assertEquals("authorize", config.getAuthorizationPath().get());
assertEquals("token", config.getTokenPath().get());
assertEquals("keys", config.getJwksPath().get());
assertEquals("https://discord.com/api/users/@me", config.getUserInfoPath().get());
assertEquals(List.of("identify", "email"), config.authentication.scopes.get());
assertFalse(config.getAuthentication().idTokenRequired.get());
}
@Test
public void testOverrideDiscordProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.credentials.clientSecret.setMethod(Method.BASIC);
tenant.authentication.setForceRedirectHttpsScheme(false);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.DISCORD));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertFalse(config.getAuthentication().isForceRedirectHttpsScheme().get());
assertEquals(Method.BASIC, config.credentials.clientSecret.method.get());
}
@Test
public void testAcceptLinkedInProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.LINKEDIN));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals("https://www.linkedin.com/oauth", config.getAuthServerUrl().get());
assertEquals(List.of("email", "profile"), config.authentication.scopes.get());
}
@Test
public void testOverrideLinkedInProperties() throws Exception {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
tenant.setApplicationType(ApplicationType.HYBRID);
tenant.setAuthServerUrl("http://localhost/wiremock");
tenant.credentials.clientSecret.setMethod(Method.BASIC);
tenant.authentication.setForceRedirectHttpsScheme(false);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.LINKEDIN));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.HYBRID, config.getApplicationType().get());
assertEquals("http://localhost/wiremock", config.getAuthServerUrl().get());
assertFalse(config.getAuthentication().isForceRedirectHttpsScheme().get());
assertEquals(Method.BASIC, config.credentials.clientSecret.method.get());
}
@Test
public void testAcceptSlackProperties() {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId(OidcUtils.DEFAULT_TENANT_ID);
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.SLACK));
assertEquals(OidcUtils.DEFAULT_TENANT_ID, config.getTenantId().get());
assertEquals(ApplicationType.WEB_APP, config.getApplicationType().get());
assertTrue(config.isDiscoveryEnabled().orElse(true));
assertEquals("https://slack.com", config.getAuthServerUrl().get());
assertEquals("name", config.token.principalClaim.get());
assertTrue(config.authentication.forceRedirectHttpsScheme.orElse(false));
assertEquals(List.of("profile", "email"), config.authentication.scopes.get());
}
@Test
public void testOverrideSlackProperties() {
OidcTenantConfig tenant = new OidcTenantConfig();
tenant.setTenantId("PattiSmith");
tenant.setApplicationType(ApplicationType.SERVICE);
tenant.setDiscoveryEnabled(false);
tenant.setAuthServerUrl("https://private-slack.com");
tenant.getToken().setPrincipalClaim("I you my own principal");
tenant.getAuthentication().setForceRedirectHttpsScheme(false);
tenant.getAuthentication().setScopes(List.of("profile"));
OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.SLACK));
assertEquals("PattiSmith", config.getTenantId().get());
assertEquals(ApplicationType.SERVICE, config.getApplicationType().get());
assertFalse(config.isDiscoveryEnabled().orElse(true));
assertEquals("https://private-slack.com", config.getAuthServerUrl().get());
assertEquals("I you my own principal", config.token.principalClaim.get());
assertFalse(config.authentication.forceRedirectHttpsScheme.orElse(false));
assertEquals(List.of("profile"), config.authentication.scopes.get());
}
}
| KnownOidcProvidersTest |
java | processing__processing4 | java/test/processing/mode/java/JavaRuntimePathFactoryTest.java | {
"start": 1024,
"end": 2124
} | class ____ {
private RuntimePathBuilder.RuntimePathFactoryStrategy factory;
private JavaMode testMode;
private List<ImportStatement> testImports;
private Sketch testSketch;
private List<String> classpath;
@Before
public void setUp() throws Exception {
RuntimePathBuilder builder = new RuntimePathBuilder();
factory = builder::buildJavaRuntimePath;
testMode = RuntimePathFactoryTestUtil.createTestJavaMode();
testImports = RuntimePathFactoryTestUtil.createTestImports();
testSketch = RuntimePathFactoryTestUtil.createTestSketch();
classpath = factory.buildClasspath(testMode, testImports, testSketch);
}
@Test
public void testBuildClasspathSize() {
assertEquals(RuntimePathBuilder.STANDARD_MODULES.length, classpath.size());
}
@Test
public void testBuildClasspathValues() {
boolean foundTarget = false;
for (String entry : classpath) {
boolean justFound = entry.contains("java.base.jmod") && entry.contains("jmods");
foundTarget = foundTarget || justFound;
}
assertTrue(foundTarget);
}
} | JavaRuntimePathFactoryTest |
java | google__guice | core/test/com/google/inject/ScopesTest.java | {
"start": 18704,
"end": 18787
} | class ____ {}
@ImplementedBy(Implementation.class)
static | SingletonAndCustomScoped |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NonApiTypeTest.java | {
"start": 3619,
"end": 4174
} | class ____ {
// BUG: Diagnostic contains: java.util.Optional
public Optional<String> middleName() {
return Optional.of("alfred");
}
// BUG: Diagnostic contains: java.util.Optional
public void setMiddleName(Optional<String> middleName) {}
}
""")
.doTest();
}
@Test
public void jdkOptionals() {
helper
.addSourceLines(
"Test.java",
"""
import java.util.Optional;
public | Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DoNotMockCheckerTest.java | {
"start": 2483,
"end": 2620
} | class ____ {}",
"",
" @MetaDoNotMock @Retention(RetentionPolicy.RUNTIME)",
" @ | MetaDoNotMockObject |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/AbstractStatus.java | {
"start": 923,
"end": 3757
} | enum ____ {
UNASSIGNED,
RUNNING,
PAUSED,
FAILED,
DESTROYED, // Never visible to users; destroyed Connector and Task instances are not shown
RESTARTING,
STOPPED, // Only ever visible to users for Connector instances; never for Task instances
}
private final T id;
private final State state;
private final String trace;
private final String workerId;
private final String version;
private final int generation;
public AbstractStatus(T id,
State state,
String workerId,
int generation,
String trace,
String version) {
this.id = id;
this.state = state;
this.workerId = workerId;
this.generation = generation;
this.trace = trace;
this.version = version;
}
public AbstractStatus(T id,
State state,
String workerId,
int generation,
String trace) {
this(id, state, workerId, generation, trace, null);
}
public T id() {
return id;
}
public State state() {
return state;
}
public String trace() {
return trace;
}
public String workerId() {
return workerId;
}
public int generation() {
return generation;
}
public String version() {
return version;
}
@Override
public String toString() {
return "Status{" +
"id=" + id +
", state=" + state +
", workerId='" + workerId + '\'' +
", version='" + version + '\'' +
", generation=" + generation +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AbstractStatus<?> that = (AbstractStatus<?>) o;
return generation == that.generation
&& Objects.equals(id, that.id)
&& state == that.state
&& Objects.equals(trace, that.trace)
&& Objects.equals(workerId, that.workerId)
&& Objects.equals(version, that.version);
}
@Override
public int hashCode() {
int result = id != null ? id.hashCode() : 0;
result = 31 * result + (state != null ? state.hashCode() : 0);
result = 31 * result + (trace != null ? trace.hashCode() : 0);
result = 31 * result + (workerId != null ? workerId.hashCode() : 0);
result = 31 * result + (version != null ? version.hashCode() : 0);
result = 31 * result + generation;
return result;
}
}
| State |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MllpEndpointBuilderFactory.java | {
"start": 67102,
"end": 75024
} | interface ____
extends
AdvancedMllpEndpointConsumerBuilder,
AdvancedMllpEndpointProducerBuilder {
default MllpEndpointBuilder basic() {
return (MllpEndpointBuilder) this;
}
/**
* Maximum buffer size used when receiving or sending data over the
* wire.
*
* The option is a: <code>int</code> type.
*
* Default: 1073741824
* Group: advanced
*
* @param maxBufferSize the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder maxBufferSize(int maxBufferSize) {
doSetProperty("maxBufferSize", maxBufferSize);
return this;
}
/**
* Maximum buffer size used when receiving or sending data over the
* wire.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 1073741824
* Group: advanced
*
* @param maxBufferSize the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder maxBufferSize(String maxBufferSize) {
doSetProperty("maxBufferSize", maxBufferSize);
return this;
}
/**
* Minimum buffer size used when receiving or sending data over the
* wire.
*
* The option is a: <code>int</code> type.
*
* Default: 2048
* Group: advanced
*
* @param minBufferSize the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder minBufferSize(int minBufferSize) {
doSetProperty("minBufferSize", minBufferSize);
return this;
}
/**
* Minimum buffer size used when receiving or sending data over the
* wire.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 2048
* Group: advanced
*
* @param minBufferSize the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder minBufferSize(String minBufferSize) {
doSetProperty("minBufferSize", minBufferSize);
return this;
}
/**
* The SO_TIMEOUT value (in milliseconds) used after the start of an
* MLLP frame has been received.
*
* The option is a: <code>int</code> type.
*
* Default: 5000
* Group: advanced
*
* @param readTimeout the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder readTimeout(int readTimeout) {
doSetProperty("readTimeout", readTimeout);
return this;
}
/**
* The SO_TIMEOUT value (in milliseconds) used after the start of an
* MLLP frame has been received.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 5000
* Group: advanced
*
* @param readTimeout the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder readTimeout(String readTimeout) {
doSetProperty("readTimeout", readTimeout);
return this;
}
/**
* Sets the SO_RCVBUF option to the specified value (in bytes).
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 8192
* Group: advanced
*
* @param receiveBufferSize the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder receiveBufferSize(Integer receiveBufferSize) {
doSetProperty("receiveBufferSize", receiveBufferSize);
return this;
}
/**
* Sets the SO_RCVBUF option to the specified value (in bytes).
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Default: 8192
* Group: advanced
*
* @param receiveBufferSize the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder receiveBufferSize(String receiveBufferSize) {
doSetProperty("receiveBufferSize", receiveBufferSize);
return this;
}
/**
* The SO_TIMEOUT value (in milliseconds) used when waiting for the
* start of an MLLP frame.
*
* The option is a: <code>int</code> type.
*
* Default: 15000
* Group: advanced
*
* @param receiveTimeout the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder receiveTimeout(int receiveTimeout) {
doSetProperty("receiveTimeout", receiveTimeout);
return this;
}
/**
* The SO_TIMEOUT value (in milliseconds) used when waiting for the
* start of an MLLP frame.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 15000
* Group: advanced
*
* @param receiveTimeout the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder receiveTimeout(String receiveTimeout) {
doSetProperty("receiveTimeout", receiveTimeout);
return this;
}
/**
* Sets the SO_SNDBUF option to the specified value (in bytes).
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 8192
* Group: advanced
*
* @param sendBufferSize the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder sendBufferSize(Integer sendBufferSize) {
doSetProperty("sendBufferSize", sendBufferSize);
return this;
}
/**
* Sets the SO_SNDBUF option to the specified value (in bytes).
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Default: 8192
* Group: advanced
*
* @param sendBufferSize the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder sendBufferSize(String sendBufferSize) {
doSetProperty("sendBufferSize", sendBufferSize);
return this;
}
/**
* The approximate idle time allowed before the Client TCP Connection
* will be reset. A null value or a value less than or equal to zero
* will disable the idle timeout.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: tcp
*
* @param idleTimeout the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder idleTimeout(Integer idleTimeout) {
doSetProperty("idleTimeout", idleTimeout);
return this;
}
/**
* The approximate idle time allowed before the Client TCP Connection
* will be reset. A null value or a value less than or equal to zero
* will disable the idle timeout.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Group: tcp
*
* @param idleTimeout the value to set
* @return the dsl builder
*/
default AdvancedMllpEndpointBuilder idleTimeout(String idleTimeout) {
doSetProperty("idleTimeout", idleTimeout);
return this;
}
}
public | AdvancedMllpEndpointBuilder |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/annotation/JakartaAnnotationsRuntimeHintsTests.java | {
"start": 1306,
"end": 2054
} | class ____ {
private final RuntimeHints hints = new RuntimeHints();
@BeforeEach
void setup() {
AotServices.factories().load(RuntimeHintsRegistrar.class)
.forEach(registrar -> registrar.registerHints(this.hints,
ClassUtils.getDefaultClassLoader()));
}
@Test
void jakartaInjectAnnotationHasHints() {
assertThat(RuntimeHintsPredicates.reflection().onType(Inject.class)).accepts(this.hints);
}
@Test
void jakartaProviderAnnotationHasHints() {
assertThat(RuntimeHintsPredicates.reflection().onType(Provider.class)).accepts(this.hints);
}
@Test
void jakartaQualifierAnnotationHasHints() {
assertThat(RuntimeHintsPredicates.reflection().onType(Qualifier.class)).accepts(this.hints);
}
}
| JakartaAnnotationsRuntimeHintsTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cascade/persist/CascadePersistJpaTest.java | {
"start": 1737,
"end": 1899
} | class ____ {
@Id @GeneratedValue
private Long id;
@Basic(optional = false)
String name = "child";
@ManyToOne(optional = false)
Parent parent;
}
}
| Child |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_3089/Issue3089BuilderProvider.java | {
"start": 1279,
"end": 3855
} | class ____ be be declared with the following line
*
* <pre>
* public static Builder() extends ImmutableItem.Builder { }
* </pre>
*
* The Immutable instance should be created with the following line
*
* <pre>
* new Item.Builder().withId("123").build();
* </pre>
*
* @see org.mapstruct.ap.test.bugs._3089.domain.Item
*
* @param typeElement
* @return
*/
private BuilderInfo findBuilderInfoFromInnerBuilderClass(TypeElement typeElement) {
if (shouldIgnore( typeElement )) {
return null;
}
List<TypeElement> innerTypes = ElementFilter.typesIn( typeElement.getEnclosedElements() );
ExecutableElement defaultConstructor = innerTypes.stream()
.filter( this::isBuilderCandidate )
.map( this::getEmptyArgPublicConstructor )
.filter( Objects::nonNull )
.findAny()
.orElse( null );
if ( defaultConstructor != null ) {
return new BuilderInfo.Builder()
.builderCreationMethod( defaultConstructor )
.buildMethod( findBuildMethods( (TypeElement) defaultConstructor.getEnclosingElement(), typeElement ) )
.build();
}
return null;
}
private boolean isBuilderCandidate(TypeElement innerType ) {
TypeElement outerType = (TypeElement) innerType.getEnclosingElement();
String packageName = this.elementUtils.getPackageOf( outerType ).getQualifiedName().toString();
Name outerSimpleName = outerType.getSimpleName();
String builderClassName = packageName + ".Immutable" + outerSimpleName + ".Builder";
return innerType.getSimpleName().contentEquals( "Builder" )
&& getTypeElement( innerType.getSuperclass() ).getQualifiedName().contentEquals( builderClassName )
&& innerType.getModifiers().contains( Modifier.PUBLIC );
}
private ExecutableElement getEmptyArgPublicConstructor(TypeElement builderType) {
return ElementFilter.constructorsIn( builderType.getEnclosedElements() ).stream()
.filter( c -> c.getParameters().isEmpty() )
.filter( c -> c.getModifiers().contains( Modifier.PUBLIC ) )
.findAny()
.orElse( null );
}
}
| should |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/PreferredUuidJdbcTypeTest.java | {
"start": 3265,
"end": 3441
} | class ____ {
@Id
private Integer id;
private UUID uuid;
@JdbcTypeCode(SqlTypes.UUID)
private UUID uuidJdbcTypeCode;
public EntityWithUuid() {
}
}
}
| EntityWithUuid |
java | google__dagger | hilt-android/main/java/dagger/hilt/android/AndroidEntryPoint.java | {
"start": 2356,
"end": 2559
} | class ____ the generated Hilt class. When applying the Hilt Gradle Plugin this value
* is not necessary and will be inferred from the current superclass.
*/
Class<?> value() default Void.class;
}
| for |
java | apache__camel | tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/generics/ClassUtil.java | {
"start": 4714,
"end": 7140
} | class ____ for given type
*/
public static Class<?> getClazz(Type type) {
if (type instanceof ParameterizedType pt) {
return (Class<?>) pt.getRawType();
} else if (type instanceof Class) {
return (Class<?>) type;
} else if (type instanceof GenericArrayType arrayType) {
return Array.newInstance(getClazz(arrayType.getGenericComponentType()), 0).getClass();
} else if (type instanceof WildcardType wildcardType) {
Type[] bounds = wildcardType.getUpperBounds();
if (bounds.length > 1) {
throw new IllegalArgumentException(
"Illegal use of wild card type with more than one upper bound: " + wildcardType);
} else if (bounds.length == 0) {
return Object.class;
} else {
return getClass(bounds[0]);
}
} else if (type instanceof TypeVariable<?> typeVariable) {
if (typeVariable.getBounds().length > 1) {
throw new IllegalArgumentException("Illegal use of type variable with more than one bound: " + typeVariable);
} else {
Type[] bounds = typeVariable.getBounds();
if (bounds.length == 0) {
return Object.class;
} else {
return getClass(bounds[0]);
}
}
} else {
throw new IllegalArgumentException("Unsupported type " + type);
}
}
public static boolean isRawClassEquals(Type ipType, Type apiType) {
Class<?> ipClass = getRawPrimitiveType(ipType);
Class<?> apiClass = getRawPrimitiveType(apiType);
if (ipClass == null || apiClass == null) {
// we found some illegal generics
return false;
}
return ipClass.equals(apiClass);
}
private static Class<?> getRawPrimitiveType(Type type) {
if (type instanceof Class<?> clazz) {
if (clazz.isPrimitive()) {
return getPrimitiveWrapper(clazz);
}
return clazz;
}
if (type instanceof ParameterizedType) {
return getRawPrimitiveType(((ParameterizedType) type).getRawType());
}
return null;
}
/**
* @param fqAnnotationName a fully qualified runtime annotation name whose presence on the given | type |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAutoUpdateService.java | {
"start": 772,
"end": 934
} | class ____ implements ClusterStateListener {
private static final Logger logger = LogManager.getLogger(MlAutoUpdateService.class);
public | MlAutoUpdateService |
java | apache__flink | flink-core/src/main/java/org/apache/flink/core/memory/ByteArrayOutputStreamWithPos.java | {
"start": 1176,
"end": 3305
} | class ____ extends OutputStream {
protected byte[] buffer;
protected int count;
public ByteArrayOutputStreamWithPos() {
this(64);
}
public ByteArrayOutputStreamWithPos(int size) {
Preconditions.checkArgument(size >= 0);
buffer = new byte[size];
}
private void ensureCapacity(int requiredCapacity) {
if (requiredCapacity - buffer.length > 0) {
increaseCapacity(requiredCapacity);
}
}
private void increaseCapacity(int requiredCapacity) {
int oldCapacity = buffer.length;
int newCapacity = oldCapacity << 1;
if (newCapacity - requiredCapacity < 0) {
newCapacity = requiredCapacity;
}
if (newCapacity < 0) {
if (requiredCapacity < 0) {
throw new OutOfMemoryError();
}
newCapacity = Integer.MAX_VALUE;
}
buffer = Arrays.copyOf(buffer, newCapacity);
}
@Override
public void write(int b) {
ensureCapacity(count + 1);
buffer[count] = (byte) b;
++count;
}
@Override
public void write(byte[] b, int off, int len) {
if ((off < 0) || (len < 0) || (off > b.length) || ((off + len) - b.length > 0)) {
throw new IndexOutOfBoundsException();
}
ensureCapacity(count + len);
System.arraycopy(b, off, buffer, count, len);
count += len;
}
public void reset() {
count = 0;
}
public byte[] toByteArray() {
return Arrays.copyOf(buffer, count);
}
public int size() {
return count;
}
public String toString() {
return new String(buffer, 0, count, ConfigConstants.DEFAULT_CHARSET);
}
public int getPosition() {
return count;
}
public void setPosition(int position) {
Preconditions.checkArgument(position >= 0, "Position out of bounds.");
ensureCapacity(position + 1);
count = position;
}
@Override
public void close() {}
public byte[] getBuf() {
return buffer;
}
}
| ByteArrayOutputStreamWithPos |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/apidiff/ApiDiffCheckerTest.java | {
"start": 18135,
"end": 18458
} | class ____ {
@RequiresNewApiVersion Lib l;
}
""")
.doTest();
}
@Test
public void positiveAnnotatedMethod() {
compilationHelper
.addSourceLines(
"Lib.java",
"""
package my.lib;
import com.google.errorprone.bugpatterns.apidiff.ApiDiffCheckerTest.RequiresNewApiVersion;
public final | Test |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java | {
"start": 44528,
"end": 45447
} | class ____ parameter takes values from
*/
public static <T extends Enum<T>> Parameter<T> enumParam(
String name,
boolean updateable,
Function<FieldMapper, T> initializer,
T defaultValue,
Class<T> enumClass
) {
return enumParam(name, updateable, initializer, (Supplier<T>) () -> defaultValue, enumClass);
}
/**
* Defines a parameter that takes any of the values of an enumeration.
*
* @param name the parameter name
* @param updateable whether the parameter can be changed by a mapping update
* @param initializer a function that reads the parameter value from an existing mapper
* @param defaultValue a supplier for the default value, to be used if the parameter is undefined in a mapping
* @param enumClass the enumeration | the |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/exceptionmappers/AsyncExceptionMappingUtil.java | {
"start": 700,
"end": 2529
} | class ____ {
private static final Logger log = Logger.getLogger(AsyncExceptionMappingUtil.class);
private static final Response DEFAULT_RESPONSE = Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity("Internal Server Error").build();
public static final Response DEFAULT_UNAUTHORIZED_RESPONSE = Response.status(Response.Status.UNAUTHORIZED)
.entity("Not Authenticated").build();
private AsyncExceptionMappingUtil() {
}
public static void handleUniResponse(Uni<Response> asyncResponse, AsyncExceptionMapperContext context) {
context.suspend();
asyncResponse.subscribe().with(new Consumer<Response>() {
@Override
public void accept(Response response) {
if (response == null) {
log.debug("Response was null, returning default error response");
context.setResponse(DEFAULT_RESPONSE);
} else {
context.setResponse(response);
}
context.resume();
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) {
log.error("Unable to convert exception to Response", throwable);
context.setResponse(DEFAULT_RESPONSE);
context.resume();
}
});
}
public static void handleUniRestResponse(Uni<? extends RestResponse<?>> asyncResponse,
AsyncExceptionMapperContext context) {
handleUniResponse(asyncResponse.map(new Function<RestResponse<?>, Response>() {
@Override
public Response apply(RestResponse<?> t) {
return t != null ? t.toResponse() : null;
}
}), context);
}
}
| AsyncExceptionMappingUtil |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/DdlType.java | {
"start": 1807,
"end": 2012
} | enum ____ array types
*/
@Deprecated(since = "6.3")
String getRawTypeName();
/**
* Returns all type names without precision/length and scale parameters.
*
* @deprecated not appropriate for named | or |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/protocol/MaintenanceAwareComponent.java | {
"start": 425,
"end": 1395
} | interface ____ {
/**
* Called whenever a shard migration is initiated
*/
void onMigrateStarted(String shards);
/**
* Called whenever a shard migration is completed
*/
void onMigrateCompleted(String shards);
/**
* Called whenever a failover is initiated
*/
void onFailoverStarted(String shards);
/**
* Called whenever a failover is completed
*/
void onFailoverCompleted(String shards);
/**
* Called whenever a re-bind has been initiated by the remote server
* <p>
* A specific endpoint is going to move to another node within <time> seconds
* </p>
*
* @param endpoint address of the target endpoint
* @param time estimated time for the re-bind to complete
*/
void onRebindStarted(Duration time, SocketAddress endpoint);
/**
* Called whenever the re-bind has been completed
*/
void onRebindCompleted();
}
| MaintenanceAwareComponent |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/JoinOperationFactory.java | {
"start": 1859,
"end": 1943
} | class ____ creating a valid {@link JoinQueryOperation} operation. */
@Internal
final | for |
java | apache__flink | flink-connectors/flink-connector-datagen/src/main/java/org/apache/flink/connector/datagen/source/DataGeneratorSource.java | {
"start": 4398,
"end": 9168
} | class ____<OUT>
implements Source<OUT, NumberSequenceSplit, Collection<NumberSequenceSplit>>,
ResultTypeQueryable<OUT>,
OutputTypeConfigurable<OUT> {
private static final long serialVersionUID = 1L;
private final SourceReaderFactory<OUT, NumberSequenceSplit> sourceReaderFactory;
private TypeInformation<OUT> typeInfo;
private final NumberSequenceSource numberSource;
private final GeneratorFunction<Long, OUT> generatorFunction;
/**
* Instantiates a new {@code DataGeneratorSource}.
*
* @param generatorFunction The {@code GeneratorFunction} function.
* @param count The number of generated data points.
* @param typeInfo The type of the produced data points.
*/
public DataGeneratorSource(
GeneratorFunction<Long, OUT> generatorFunction,
long count,
TypeInformation<OUT> typeInfo) {
this(generatorFunction, count, RateLimiterStrategy.noOp(), typeInfo);
}
/**
* Instantiates a new {@code DataGeneratorSource}.
*
* @param generatorFunction The {@code GeneratorFunction} function.
* @param count The number of generated data points.
* @param rateLimiterStrategy The strategy for rate limiting.
* @param typeInfo The type of the produced data points.
*/
public DataGeneratorSource(
GeneratorFunction<Long, OUT> generatorFunction,
long count,
RateLimiterStrategy rateLimiterStrategy,
TypeInformation<OUT> typeInfo) {
this(
new GeneratorSourceReaderFactory<>(generatorFunction, rateLimiterStrategy),
generatorFunction,
count,
typeInfo);
ClosureCleaner.clean(
rateLimiterStrategy, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
}
DataGeneratorSource(
SourceReaderFactory<OUT, NumberSequenceSplit> sourceReaderFactory,
GeneratorFunction<Long, OUT> generatorFunction,
long count,
TypeInformation<OUT> typeInfo) {
this.sourceReaderFactory = checkNotNull(sourceReaderFactory);
this.generatorFunction = checkNotNull(generatorFunction);
this.typeInfo = checkNotNull(typeInfo);
long to = count > 0 ? count - 1 : 0; // a noop source (0 elements) is used in Table tests
this.numberSource = new NumberSequenceSource(0, to);
ClosureCleaner.clean(
generatorFunction, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
ClosureCleaner.clean(
sourceReaderFactory, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
}
@Override
@SuppressWarnings("unchecked")
public void setOutputType(TypeInformation<OUT> outTypeInfo, ExecutionConfig executionConfig) {
this.typeInfo = outTypeInfo;
if (generatorFunction instanceof OutputTypeConfigurable) {
((OutputTypeConfigurable<OUT>) generatorFunction)
.setOutputType(outTypeInfo, executionConfig);
}
}
@VisibleForTesting
public GeneratorFunction<Long, OUT> getGeneratorFunction() {
return generatorFunction;
}
// ------------------------------------------------------------------------
// source methods
// ------------------------------------------------------------------------
@Override
public TypeInformation<OUT> getProducedType() {
return typeInfo;
}
@Override
public Boundedness getBoundedness() {
return Boundedness.BOUNDED;
}
@Override
public SourceReader<OUT, NumberSequenceSplit> createReader(SourceReaderContext readerContext)
throws Exception {
return sourceReaderFactory.createReader(readerContext);
}
@Override
public SplitEnumerator<NumberSequenceSplit, Collection<NumberSequenceSplit>> restoreEnumerator(
SplitEnumeratorContext<NumberSequenceSplit> enumContext,
Collection<NumberSequenceSplit> checkpoint) {
return numberSource.restoreEnumerator(enumContext, checkpoint);
}
@Override
public SplitEnumerator<NumberSequenceSplit, Collection<NumberSequenceSplit>> createEnumerator(
final SplitEnumeratorContext<NumberSequenceSplit> enumContext) {
return numberSource.createEnumerator(enumContext);
}
@Override
public SimpleVersionedSerializer<NumberSequenceSplit> getSplitSerializer() {
return numberSource.getSplitSerializer();
}
@Override
public SimpleVersionedSerializer<Collection<NumberSequenceSplit>>
getEnumeratorCheckpointSerializer() {
return numberSource.getEnumeratorCheckpointSerializer();
}
}
| DataGeneratorSource |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/map/event/EntryUpdatedListener.java | {
"start": 744,
"end": 860
} | interface ____<K, V> extends MapEntryListener {
void onUpdated(EntryEvent<K, V> event);
}
| EntryUpdatedListener |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/LogManager.java | {
"start": 2211,
"end": 2293
} | class ____ {
/**
* Log4j's property to set to the fully qualified | LogManager |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/EmptyHttpHeaders.java | {
"start": 4588,
"end": 5146
} | class ____ {
/**
* The instance is instantiated here to break the cyclic static initialization between {@link EmptyHttpHeaders}
* and {@link HttpHeaders}. The issue is that if someone accesses {@link EmptyHttpHeaders#INSTANCE} before
* {@link HttpHeaders#EMPTY_HEADERS} then {@link HttpHeaders#EMPTY_HEADERS} will be {@code null}.
*/
@Deprecated
private static final EmptyHttpHeaders EMPTY_HEADERS = new EmptyHttpHeaders();
private InstanceInitializer() {
}
}
}
| InstanceInitializer |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/CancelDelegationTokenRequestPBImpl.java | {
"start": 1530,
"end": 3804
} | class ____ extends
CancelDelegationTokenRequest {
CancelDelegationTokenRequestProto proto = CancelDelegationTokenRequestProto
.getDefaultInstance();
CancelDelegationTokenRequestProto.Builder builder = null;
boolean viaProto = false;
public CancelDelegationTokenRequestPBImpl() {
builder = CancelDelegationTokenRequestProto.newBuilder();
}
public CancelDelegationTokenRequestPBImpl(
CancelDelegationTokenRequestProto proto) {
this.proto = proto;
viaProto = true;
}
Token token;
@Override
public Token getDelegationToken() {
CancelDelegationTokenRequestProtoOrBuilder p = viaProto ? proto : builder;
if (this.token != null) {
return this.token;
}
this.token = convertFromProtoFormat(p.getToken());
return this.token;
}
@Override
public void setDelegationToken(Token token) {
maybeInitBuilder();
if (token == null)
builder.clearToken();
this.token = token;
}
public CancelDelegationTokenRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
private void mergeLocalToBuilder() {
if (token != null) {
builder.setToken(convertToProtoFormat(this.token));
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = CancelDelegationTokenRequestProto.newBuilder(proto);
}
viaProto = false;
}
private TokenPBImpl convertFromProtoFormat(TokenProto p) {
return new TokenPBImpl(p);
}
private TokenProto convertToProtoFormat(Token t) {
return ((TokenPBImpl) t).getProto();
}
}
| CancelDelegationTokenRequestPBImpl |
java | alibaba__nacos | ai/src/test/java/com/alibaba/nacos/ai/form/a2a/admin/AgentFormTest.java | {
"start": 884,
"end": 2907
} | class ____ {
@Test
void testValidateSuccess() throws NacosApiException {
AgentForm agentForm = new AgentForm();
agentForm.setAgentName("test-agent");
agentForm.validate();
// Should not throw exception
}
@Test
void testValidateWithEmptyNameShouldThrowException() {
AgentForm agentForm = new AgentForm();
assertThrows(NacosApiException.class, agentForm::validate);
}
@Test
void testValidateWithNullNameShouldThrowException() {
AgentForm agentForm = new AgentForm();
agentForm.setAgentName(null);
assertThrows(NacosApiException.class, agentForm::validate);
}
@Test
void testFillDefaultNamespaceId() {
AgentForm agentForm = new AgentForm();
agentForm.fillDefaultNamespaceId();
assertEquals("public", agentForm.getNamespaceId());
}
@Test
void testFillDefaultNamespaceIdWithExistingValue() {
AgentForm agentForm = new AgentForm();
agentForm.setNamespaceId("test-namespace");
agentForm.fillDefaultNamespaceId();
assertEquals("test-namespace", agentForm.getNamespaceId());
}
@Test
void testValidateShouldFillDefaultNamespaceId() throws NacosApiException {
AgentForm agentForm = new AgentForm();
agentForm.setAgentName("test-agent");
agentForm.validate();
assertEquals("public", agentForm.getNamespaceId());
}
@Test
void testGetterAndSetter() {
AgentForm agentForm = new AgentForm();
agentForm.setNamespaceId("test-namespace");
agentForm.setAgentName("test-agent");
agentForm.setVersion("1.0.0");
agentForm.setRegistrationType("URL");
assertEquals("test-namespace", agentForm.getNamespaceId());
assertEquals("test-agent", agentForm.getAgentName());
assertEquals("1.0.0", agentForm.getVersion());
assertEquals("URL", agentForm.getRegistrationType());
}
} | AgentFormTest |
java | apache__camel | components/camel-lra/src/test/java/org/apache/camel/service/lra/AbstractLRATestSupport.java | {
"start": 1865,
"end": 4134
} | class ____ extends CamelTestSupport {
@RegisterExtension
static MicroprofileLRAService service = MicroprofileLRAServiceFactory.createService();
private Integer serverPort;
private int activeLRAs;
@BeforeEach
public void getActiveLRAs() throws IOException, InterruptedException {
this.activeLRAs = getNumberOfActiveLRAs();
}
@AfterEach
public void checkActiveLRAs() throws IOException, InterruptedException {
await().atMost(2, SECONDS).until(() -> getNumberOfActiveLRAs(), equalTo(activeLRAs));
assertEquals(activeLRAs, getNumberOfActiveLRAs(), "Some LRA have been left pending");
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.addService(createLRASagaService());
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
restConfiguration()
.port(getServerPort());
}
});
return context;
}
protected LRASagaService createLRASagaService() {
LRASagaService sagaService = new LRASagaService();
sagaService.setCoordinatorUrl(getCoordinatorURL());
sagaService.setLocalParticipantUrl(
String.format("http://%s:%d", service.callbackHost(), getServerPort()));
return sagaService;
}
protected int getNumberOfActiveLRAs() throws IOException, InterruptedException {
HttpClient client = HttpClient.newHttpClient();
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(getCoordinatorURL() + "/lra-coordinator"))
.build();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
ObjectMapper mapper = new ObjectMapper();
JsonNode lras = mapper.readTree(response.body());
return lras.size();
}
private String getCoordinatorURL() {
return service.getServiceAddress();
}
protected int getServerPort() {
if (serverPort == null) {
serverPort = AvailablePortFinder.getNextAvailable();
}
return serverPort;
}
}
| AbstractLRATestSupport |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oceanbase/OceanbaseHintTest_parallel.java | {
"start": 967,
"end": 2186
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "select /*+ parallel(5) */ count(*) from t1;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> stmtList = parser.parseStatementList();
SQLStatement stmt = stmtList.get(0);
String result = SQLUtils.toMySqlString(stmt);
assertEquals("SELECT /*+ parallel(5) */ count(*)"
+ "\nFROM t1;", result);
print(stmtList);
assertEquals(1, stmtList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(1, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertTrue(visitor.containsTable("t1"));
assertTrue(visitor.containsColumn("t1", "*"));
}
}
| OceanbaseHintTest_parallel |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/clusterframework/types/ResourceProfileTest.java | {
"start": 1626,
"end": 24447
} | class ____ {
private static final MemorySize TOO_LARGE_MEMORY =
MAX_MEMORY_SIZE_TO_LOG.add(MemorySize.ofMebiBytes(10));
private static final String EXTERNAL_RESOURCE_NAME = "gpu";
@Test
void testAllFieldsNoLessThanProfile() {
final ResourceProfile rp1 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.build();
final ResourceProfile rp2 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(200)
.setTaskOffHeapMemoryMB(200)
.setManagedMemoryMB(200)
.build();
final ResourceProfile rp3 =
ResourceProfile.newBuilder()
.setCpuCores(2.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.build();
final ResourceProfile rp4 =
ResourceProfile.newBuilder()
.setCpuCores(2.0)
.setTaskHeapMemoryMB(200)
.setTaskOffHeapMemoryMB(200)
.setManagedMemoryMB(200)
.build();
assertThat(rp1.allFieldsNoLessThan(rp2)).isFalse();
assertThat(rp2.allFieldsNoLessThan(rp1)).isTrue();
assertThat(rp1.allFieldsNoLessThan(rp3)).isFalse();
assertThat(rp3.allFieldsNoLessThan(rp1)).isTrue();
assertThat(rp2.allFieldsNoLessThan(rp3)).isFalse();
assertThat(rp3.allFieldsNoLessThan(rp2)).isFalse();
assertThat(rp4.allFieldsNoLessThan(rp1)).isTrue();
assertThat(rp4.allFieldsNoLessThan(rp2)).isTrue();
assertThat(rp4.allFieldsNoLessThan(rp3)).isTrue();
assertThat(rp4.allFieldsNoLessThan(rp4)).isTrue();
final ResourceProfile rp5 =
ResourceProfile.newBuilder()
.setCpuCores(2.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.setNetworkMemoryMB(100)
.build();
assertThat(rp4.allFieldsNoLessThan(rp5)).isFalse();
ResourceSpec rs1 =
ResourceSpec.newBuilder(1.0, 100)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 2.2))
.build();
ResourceSpec rs2 =
ResourceSpec.newBuilder(1.0, 100)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.1))
.build();
assertThat(rp1.allFieldsNoLessThan(ResourceProfile.fromResourceSpec(rs1))).isFalse();
assertThat(
ResourceProfile.fromResourceSpec(rs1)
.allFieldsNoLessThan(ResourceProfile.fromResourceSpec(rs2)))
.isTrue();
assertThat(
ResourceProfile.fromResourceSpec(rs2)
.allFieldsNoLessThan(ResourceProfile.fromResourceSpec(rs1)))
.isFalse();
}
@Test
void testUnknownNoLessThanUnknown() {
assertThat(ResourceProfile.UNKNOWN.allFieldsNoLessThan(ResourceProfile.UNKNOWN)).isTrue();
}
@Test
void testMatchRequirement() {
final ResourceProfile resource1 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.build();
final ResourceProfile resource2 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.0))
.build();
final ResourceProfile requirement1 = ResourceProfile.UNKNOWN;
final ResourceProfile requirement2 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.build();
final ResourceProfile requirement3 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.0))
.build();
assertThat(resource1.isMatching(requirement1)).isTrue();
assertThat(resource1.isMatching(requirement2)).isTrue();
assertThat(resource1.isMatching(requirement3)).isFalse();
assertThat(resource2.isMatching(requirement1)).isTrue();
assertThat(resource2.isMatching(requirement2)).isFalse();
assertThat(resource2.isMatching(requirement3)).isTrue();
}
@Test
void testEquals() {
ResourceSpec rs1 = ResourceSpec.newBuilder(1.0, 100).build();
ResourceSpec rs2 = ResourceSpec.newBuilder(1.0, 100).build();
assertThat(ResourceProfile.fromResourceSpec(rs2))
.isEqualTo(ResourceProfile.fromResourceSpec(rs1));
ResourceSpec rs3 =
ResourceSpec.newBuilder(1.0, 100)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 2.2))
.build();
ResourceSpec rs4 =
ResourceSpec.newBuilder(1.0, 100)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.1))
.build();
assertThat(ResourceProfile.fromResourceSpec(rs4))
.isNotEqualTo(ResourceProfile.fromResourceSpec(rs3));
ResourceSpec rs5 =
ResourceSpec.newBuilder(1.0, 100)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 2.2))
.build();
MemorySize networkMemory = MemorySize.ofMebiBytes(100);
assertThat(ResourceProfile.fromResourceSpec(rs5, networkMemory))
.isEqualTo(ResourceProfile.fromResourceSpec(rs3, networkMemory));
final ResourceProfile rp1 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.setNetworkMemoryMB(100)
.build();
final ResourceProfile rp2 =
ResourceProfile.newBuilder()
.setCpuCores(1.1)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.setNetworkMemoryMB(100)
.build();
final ResourceProfile rp3 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(110)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.setNetworkMemoryMB(100)
.build();
final ResourceProfile rp4 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(110)
.setManagedMemoryMB(100)
.setNetworkMemoryMB(100)
.build();
final ResourceProfile rp5 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(110)
.setNetworkMemoryMB(100)
.build();
final ResourceProfile rp6 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(110)
.setNetworkMemoryMB(100)
.build();
final ResourceProfile rp7 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.setNetworkMemoryMB(110)
.build();
final ResourceProfile rp8 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.setNetworkMemoryMB(100)
.build();
assertThat(rp2).isNotEqualTo(rp1);
assertThat(rp3).isNotEqualTo(rp1);
assertThat(rp4).isNotEqualTo(rp1);
assertThat(rp5).isNotEqualTo(rp1);
assertThat(rp6).isNotEqualTo(rp1);
assertThat(rp7).isNotEqualTo(rp1);
assertThat(rp8).isEqualTo(rp1);
}
@Test
void testGet() {
ResourceSpec rs =
ResourceSpec.newBuilder(1.0, 100)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.6))
.build();
ResourceProfile rp = ResourceProfile.fromResourceSpec(rs, MemorySize.ofMebiBytes(50));
assertThat(rp.getCpuCores()).isEqualTo(new CPUResource(1.0));
assertThat(rp.getTotalMemory().getMebiBytes()).isEqualTo(150);
assertThat(rp.getOperatorsMemory().getMebiBytes()).isEqualTo(100);
assertThat(rp.getExtendedResources().get(EXTERNAL_RESOURCE_NAME))
.isEqualTo(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.6));
}
@Test
void testMerge() {
final ResourceProfile rp1 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.setNetworkMemoryMB(100)
.build();
final ResourceProfile rp2 =
ResourceProfile.newBuilder()
.setCpuCores(2.0)
.setTaskHeapMemoryMB(200)
.setTaskOffHeapMemoryMB(200)
.setManagedMemoryMB(200)
.setNetworkMemoryMB(200)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 2.0))
.build();
final ResourceProfile rp1MergeRp1 =
ResourceProfile.newBuilder()
.setCpuCores(2.0)
.setTaskHeapMemoryMB(200)
.setTaskOffHeapMemoryMB(200)
.setManagedMemoryMB(200)
.setNetworkMemoryMB(200)
.build();
final ResourceProfile rp1MergeRp2 =
ResourceProfile.newBuilder()
.setCpuCores(3.0)
.setTaskHeapMemoryMB(300)
.setTaskOffHeapMemoryMB(300)
.setManagedMemoryMB(300)
.setNetworkMemoryMB(300)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 2.0))
.build();
final ResourceProfile rp2MergeRp2 =
ResourceProfile.newBuilder()
.setCpuCores(4.0)
.setTaskHeapMemoryMB(400)
.setTaskOffHeapMemoryMB(400)
.setManagedMemoryMB(400)
.setNetworkMemoryMB(400)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 4.0))
.build();
assertThat(rp1.merge(rp1)).isEqualTo(rp1MergeRp1);
assertThat(rp1.merge(rp2)).isEqualTo(rp1MergeRp2);
assertThat(rp2.merge(rp1)).isEqualTo(rp1MergeRp2);
assertThat(rp2.merge(rp2)).isEqualTo(rp2MergeRp2);
assertThat(rp1.merge(ResourceProfile.UNKNOWN)).isEqualTo(ResourceProfile.UNKNOWN);
assertThat(ResourceProfile.UNKNOWN.merge(rp1)).isEqualTo(ResourceProfile.UNKNOWN);
assertThat(ResourceProfile.UNKNOWN.merge(ResourceProfile.UNKNOWN))
.isEqualTo(ResourceProfile.UNKNOWN);
assertThat(rp1.merge(ResourceProfile.ANY)).isEqualTo(ResourceProfile.ANY);
assertThat(ResourceProfile.ANY.merge(rp1)).isEqualTo(ResourceProfile.ANY);
assertThat(ResourceProfile.ANY.merge(ResourceProfile.ANY)).isEqualTo(ResourceProfile.ANY);
}
@Test
void testMergeWithOverflow() {
final CPUResource largeDouble = new CPUResource(Double.MAX_VALUE - 1.0);
final MemorySize largeMemory = MemorySize.MAX_VALUE.subtract(MemorySize.parse("100m"));
final ResourceProfile rp1 =
ResourceProfile.newBuilder()
.setCpuCores(3.0)
.setTaskHeapMemoryMB(300)
.setTaskOffHeapMemoryMB(300)
.setManagedMemoryMB(300)
.setNetworkMemoryMB(300)
.build();
final ResourceProfile rp2 =
ResourceProfile.newBuilder()
.setCpuCores(largeDouble)
.setTaskHeapMemory(largeMemory)
.setTaskOffHeapMemory(largeMemory)
.setManagedMemory(largeMemory)
.setNetworkMemory(largeMemory)
.build();
List<ArithmeticException> exceptions = new ArrayList<>();
try {
rp2.merge(rp2);
} catch (ArithmeticException e) {
exceptions.add(e);
}
try {
rp2.merge(rp1);
} catch (ArithmeticException e) {
exceptions.add(e);
}
try {
rp1.merge(rp2);
} catch (ArithmeticException e) {
exceptions.add(e);
}
assertThat(exceptions).hasSize(3);
}
@Test
void testSubtract() {
final ResourceProfile rp1 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setManagedMemoryMB(100)
.setNetworkMemoryMB(100)
.build();
final ResourceProfile rp2 =
ResourceProfile.newBuilder()
.setCpuCores(2.0)
.setTaskHeapMemoryMB(200)
.setTaskOffHeapMemoryMB(200)
.setManagedMemoryMB(200)
.setNetworkMemoryMB(200)
.build();
final ResourceProfile rp3 =
ResourceProfile.newBuilder()
.setCpuCores(3.0)
.setTaskHeapMemoryMB(300)
.setTaskOffHeapMemoryMB(300)
.setManagedMemoryMB(300)
.setNetworkMemoryMB(300)
.build();
assertThat(rp3.subtract(rp2)).isEqualTo(rp1);
assertThat(rp2.subtract(rp1)).isEqualTo(rp1);
assertThatExceptionOfType(IllegalArgumentException.class)
.as("The subtract should failed due to trying to subtract a larger resource")
.isThrownBy(() -> rp1.subtract(rp2));
assertThat(ResourceProfile.ANY.subtract(rp3)).isEqualTo(ResourceProfile.ANY);
assertThat(ResourceProfile.ANY.subtract(ResourceProfile.ANY))
.isEqualTo(ResourceProfile.ANY);
assertThat(rp3.subtract(ResourceProfile.ANY)).isEqualTo(ResourceProfile.ANY);
assertThat(ResourceProfile.UNKNOWN.subtract(rp3)).isEqualTo(ResourceProfile.UNKNOWN);
assertThat(rp3.subtract(ResourceProfile.UNKNOWN)).isEqualTo(ResourceProfile.UNKNOWN);
assertThat(ResourceProfile.UNKNOWN.subtract(ResourceProfile.UNKNOWN))
.isEqualTo(ResourceProfile.UNKNOWN);
}
@Test
void testSubtractWithInfValues() {
// Does not equals to ANY since it has extended resources.
final ResourceProfile rp1 =
ResourceProfile.newBuilder()
.setCpuCores(Double.MAX_VALUE)
.setTaskHeapMemoryMB(Integer.MAX_VALUE)
.setTaskOffHeapMemoryMB(Integer.MAX_VALUE)
.setManagedMemoryMB(Integer.MAX_VALUE)
.setNetworkMemoryMB(Integer.MAX_VALUE)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 4.0))
.build();
final ResourceProfile rp2 =
ResourceProfile.newBuilder()
.setCpuCores(2.0)
.setTaskHeapMemoryMB(200)
.setTaskOffHeapMemoryMB(200)
.setManagedMemoryMB(200)
.setNetworkMemoryMB(200)
.build();
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> rp2.subtract(rp1));
}
@Test
void testMultiply() {
final int by = 3;
final ResourceProfile rp1 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(100)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.0))
.build();
ResourceProfile rp2 = rp1;
for (int i = 1; i < by; ++i) {
rp2 = rp2.merge(rp1);
}
assertThat(rp1.multiply(by)).isEqualTo(rp2);
}
@Test
void testMultiplyZero() {
final ResourceProfile rp1 =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(100)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.0))
.build();
assertThat(rp1.multiply(0)).isEqualTo(ResourceProfile.ZERO);
}
@Test
public void testMultiplyNegative() {
final ResourceProfile rp =
ResourceProfile.newBuilder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(100)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.0))
.build();
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> rp.multiply(-2));
}
@Test
void testFromSpecWithSerializationCopy() throws Exception {
final ResourceSpec copiedSpec =
CommonTestUtils.createCopySerializable(ResourceSpec.UNKNOWN);
final ResourceProfile profile = ResourceProfile.fromResourceSpec(copiedSpec);
assertThat(profile).isEqualTo(ResourceProfile.fromResourceSpec(ResourceSpec.UNKNOWN));
}
@Test
void testSingletonPropertyOfUnknown() throws Exception {
final ResourceProfile copiedProfile =
CommonTestUtils.createCopySerializable(ResourceProfile.UNKNOWN);
assertThat(copiedProfile).isSameAs(ResourceProfile.UNKNOWN);
}
@Test
void testSingletonPropertyOfAny() throws Exception {
final ResourceProfile copiedProfile =
CommonTestUtils.createCopySerializable(ResourceProfile.ANY);
assertThat(copiedProfile).isSameAs(ResourceProfile.ANY);
}
@Test
void doesNotIncludeCPUAndMemoryInToStringIfTheyAreTooLarge() {
double tooLargeCpuCount = MAX_CPU_CORE_NUMBER_TO_LOG.doubleValue() + 1.0;
ResourceProfile resourceProfile = createResourceProfile(tooLargeCpuCount, TOO_LARGE_MEMORY);
assertThat(resourceProfile.toString())
.doesNotContain("cpuCores=")
.doesNotContain("taskHeapMemory=");
}
@Test
void includesCPUAndMemoryInToStringIfTheyAreBelowThreshold() {
ResourceProfile resourceProfile = createResourceProfile(1.0, MemorySize.ofMebiBytes(4));
assertThat(resourceProfile.toString()).contains("cpuCores=").contains("taskHeapMemory=");
}
@Test
void testZeroExtendedResourceFromConstructor() {
final ResourceProfile resourceProfile =
ResourceProfile.newBuilder()
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 0.0))
.build();
assertThat(resourceProfile.getExtendedResources()).isEmpty();
}
@Test
void testZeroExtendedResourceFromSubtract() {
final ResourceProfile resourceProfile =
ResourceProfile.newBuilder()
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.0))
.build();
assertThat(resourceProfile.subtract(resourceProfile).getExtendedResources()).isEmpty();
}
private static ResourceProfile createResourceProfile(double cpu, MemorySize taskHeapMemory) {
return ResourceProfile.newBuilder()
.setCpuCores(cpu)
.setTaskHeapMemory(taskHeapMemory)
.build();
}
}
| ResourceProfileTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java | {
"start": 1337,
"end": 1583
} | interface ____ to the user in order to get
* JobQueue related information from the {@link JobTracker}
*
* It provides the facility to list the JobQueues present and ability to view
* the list of jobs within a specific JobQueue
*
**/
| provided |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/FileExclusiveReadLockCopyTest.java | {
"start": 1282,
"end": 2660
} | class ____ extends ContextTestSupport {
public static final String FILE_QUERY = "?readLock=fileLock&initialDelay=0&delay=10";
private static final String DEST = FileExclusiveReadLockCopyTest.class.getSimpleName();
@Test
@DisabledOnOs(OS.WINDOWS)
public void testCopy() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
final Path path = testFile(DEST + File.separator + "hello.txt");
mock.expectedFileExists(path, "Hello World");
template.sendBodyAndHeader(fileUri(FILE_QUERY), "Hello World", Exchange.FILE_NAME, "hello.txt");
// The file may have been created, but not yet flushed.
Awaitility.await()
.atMost(10, TimeUnit.SECONDS).until(this::isFlushed);
mock.assertIsSatisfied();
}
private boolean isFlushed() {
final Path path = testFile(DEST + File.separator + "hello.txt");
return path.toFile().exists() && "Hello World".length() == path.toFile().length();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(fileUri(FILE_QUERY))
.to(fileUri(DEST))
.to("mock:result");
}
};
}
}
| FileExclusiveReadLockCopyTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/TestContextAnnotationUtilsTests.java | {
"start": 25816,
"end": 25873
} | class ____ {
}
@MetaConfig
static | MetaCycleAnnotatedClass |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug2.java | {
"start": 1304,
"end": 1493
} | class ____ {
private long id;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
}
| Article |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.