language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3679PluginExecIdInterpolationTest.java
|
{
"start": 1147,
"end": 1684
}
|
class ____ extends AbstractMavenIntegrationTestCase {
@Test
public void testitMNG3679() throws Exception {
File testDir = extractResources("/mng-3679");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier.verifyFilePresent("target/check.txt");
}
}
|
MavenITmng3679PluginExecIdInterpolationTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java
|
{
"start": 1218,
"end": 1941
}
|
class ____ extends AcknowledgedRequest<Request> {
private final boolean dryRun;
public Request(StreamInput in) throws IOException {
super(in);
this.dryRun = in.readBoolean();
}
public Request(boolean dryRun, TimeValue timeout) {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout);
this.dryRun = dryRun;
}
public boolean isDryRun() {
return dryRun;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(dryRun);
}
@Override
public int hashCode() {
// the base
|
Request
|
java
|
apache__avro
|
lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnDescriptor.java
|
{
"start": 890,
"end": 2907
}
|
class ____<T extends Comparable> {
final Input file;
final ColumnMetaData metaData;
long start;
long dataStart;
BlockDescriptor[] blocks;
long[] blockStarts; // for random access
long[] firstRows; // for binary searches
T[] firstValues; // for binary searches
public ColumnDescriptor(Input file, ColumnMetaData metaData) {
this.file = file;
this.metaData = metaData;
}
public int findBlock(long row) {
int block = Arrays.binarySearch(firstRows, row);
if (block < 0)
block = -block - 2;
return block;
}
public int findBlock(T value) {
int block = Arrays.binarySearch(firstValues, value);
if (block < 0)
block = -block - 2;
return block;
}
public int blockCount() {
return blocks.length;
}
public long lastRow(int block) {
if (blocks.length == 0 || block < 0)
return 0;
return firstRows[block] + blocks[block].rowCount;
}
public void ensureBlocksRead() throws IOException {
if (blocks != null)
return;
// read block descriptors
InputBuffer in = new InputBuffer(file, start);
int blockCount = in.readFixed32();
BlockDescriptor[] blocks = new BlockDescriptor[blockCount];
if (metaData.hasIndexValues())
firstValues = (T[]) new Comparable[blockCount];
for (int i = 0; i < blockCount; i++) {
blocks[i] = BlockDescriptor.read(in);
if (metaData.hasIndexValues())
firstValues[i] = in.readValue(metaData.getType());
}
dataStart = in.tell();
// compute blockStarts and firstRows
Checksum checksum = Checksum.get(metaData);
blockStarts = new long[blocks.length];
firstRows = new long[blocks.length];
long startPosition = dataStart;
long row = 0;
for (int i = 0; i < blockCount; i++) {
BlockDescriptor b = blocks[i];
blockStarts[i] = startPosition;
firstRows[i] = row;
startPosition += b.compressedSize + checksum.size();
row += b.rowCount;
}
this.blocks = blocks;
}
}
|
ColumnDescriptor
|
java
|
apache__camel
|
catalog/camel-report-maven-plugin/src/main/java/org/apache/camel/maven/htmlxlsx/model/RouteTotalsStatistic.java
|
{
"start": 858,
"end": 2594
}
|
class ____ {
private int totalEips;
private int totalEipsTested;
private int totalProcessingTime;
private int coverage;
public void incrementTotalEips(int totalEips) {
this.totalEips += totalEips;
calculateCoverage();
}
public void incrementTotalEipsTested(int totalEipsTested) {
this.totalEipsTested += totalEipsTested;
calculateCoverage();
}
public void incrementTotalProcessingTime(int processingTime) {
totalProcessingTime += processingTime;
}
protected void calculateCoverage() {
if (totalEips > 0) {
coverage = (100 * totalEipsTested) / totalEips;
}
}
public int getTotalEips() {
return totalEips;
}
public void setTotalEips(int totalEips) {
this.totalEips = totalEips;
}
public int getTotalEipsTested() {
return totalEipsTested;
}
public void setTotalEipsTested(int totalEipsTested) {
this.totalEipsTested = totalEipsTested;
}
public int getTotalProcessingTime() {
return totalProcessingTime;
}
public void setTotalProcessingTime(int totalProcessingTime) {
this.totalProcessingTime = totalProcessingTime;
}
public int getCoverage() {
return coverage;
}
public void setCoverage(int coverage) {
this.coverage = coverage;
}
@Override
public String toString() {
return "RouteTotalsStatistic{" +
"totalEips=" + totalEips +
", totalEipsTested=" + totalEipsTested +
", totalProcessingTime=" + totalProcessingTime +
", coverage=" + coverage +
'}';
}
}
|
RouteTotalsStatistic
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/util/concurrent/TestPlatform.java
|
{
"start": 1478,
"end": 2761
}
|
class ____ {
static void verifyGetOnPendingFuture(Future<?> future) {
checkNotNull(future);
try {
pseudoTimedGetUninterruptibly(future, 10, MILLISECONDS);
fail();
} catch (TimeoutException expected) {
} catch (ExecutionException e) {
throw new AssertionError(e);
}
}
static void verifyTimedGetOnPendingFuture(Future<?> future) {
try {
getUninterruptibly(future, 0, SECONDS);
fail();
} catch (TimeoutException expected) {
} catch (ExecutionException e) {
throw new AssertionError(e);
}
}
static void verifyThreadWasNotInterrupted() {
assertFalse(Thread.currentThread().isInterrupted());
}
static void clearInterrupt() {
Thread.interrupted();
}
/**
* Retrieves the result of a {@code Future} known to be done but uses the {@code get(long,
* TimeUnit)} overload in order to test that method.
*/
static <V extends @Nullable Object> V getDoneFromTimeoutOverload(Future<V> future)
throws ExecutionException {
checkState(future.isDone(), "Future was expected to be done: %s", future);
try {
return getUninterruptibly(future, 0, SECONDS);
} catch (TimeoutException e) {
throw new AssertionError(e);
}
}
private TestPlatform() {}
}
|
TestPlatform
|
java
|
google__guice
|
extensions/assistedinject/test/com/google/inject/assistedinject/FactoryModuleBuilderTest.java
|
{
"start": 15201,
"end": 15371
}
|
class ____ implements Car {
private final Color color;
@Inject
public Beetle(@Assisted Color color) {
this.color = color;
}
}
public static
|
Beetle
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java
|
{
"start": 23804,
"end": 25964
}
|
class ____ implements Writeable, ToXContentObject {
private final String name;
private final AnalyzeToken[] tokens;
static final String NAME = "name";
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AnalyzeTokenList that = (AnalyzeTokenList) o;
return Objects.equals(name, that.name) && Arrays.equals(tokens, that.tokens);
}
@Override
public int hashCode() {
int result = Objects.hash(name);
result = 31 * result + Arrays.hashCode(tokens);
return result;
}
public AnalyzeTokenList(String name, AnalyzeToken[] tokens) {
this.name = name;
this.tokens = tokens;
}
AnalyzeTokenList(StreamInput in) throws IOException {
name = in.readString();
tokens = in.readOptionalArray(AnalyzeToken::new, AnalyzeToken[]::new);
}
public String getName() {
return name;
}
public AnalyzeToken[] getTokens() {
return tokens;
}
void toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException {
builder.field(NAME, this.name);
builder.startArray(Response.Fields.TOKENS);
if (tokens != null) {
for (AnalyzeToken token : tokens) {
token.toXContent(builder, params);
}
}
builder.endArray();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
toXContentWithoutObject(builder, params);
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeOptionalArray(tokens);
}
}
public static
|
AnalyzeTokenList
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ReturnValueIgnoredTest.java
|
{
"start": 13414,
"end": 14022
}
|
class ____ {
void optional() {
// BUG: Diagnostic contains: ReturnValueIgnored
Optional.empty();
// BUG: Diagnostic contains: ReturnValueIgnored
Optional.of(42);
// BUG: Diagnostic contains: ReturnValueIgnored
Optional.ofNullable(null);
}
}
""")
.doTest();
}
@Test
public void optionalInstanceMethods() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import java.util.Optional;
|
Test
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/codec/vectors/diskbbq/ES920DiskBBQVectorsWriter.java
|
{
"start": 2160,
"end": 27537
}
|
class ____ extends IVFVectorsWriter {
private static final Logger logger = LogManager.getLogger(ES920DiskBBQVectorsWriter.class);
private final int vectorPerCluster;
private final int centroidsPerParentCluster;
public ES920DiskBBQVectorsWriter(
SegmentWriteState state,
String rawVectorFormatName,
boolean useDirectIOReads,
FlatVectorsWriter rawVectorDelegate,
int vectorPerCluster,
int centroidsPerParentCluster
) throws IOException {
this(
state,
rawVectorFormatName,
useDirectIOReads,
rawVectorDelegate,
vectorPerCluster,
centroidsPerParentCluster,
ES920DiskBBQVectorsFormat.VERSION_CURRENT
);
}
ES920DiskBBQVectorsWriter(
SegmentWriteState state,
String rawVectorFormatName,
Boolean useDirectIOReads,
FlatVectorsWriter rawVectorDelegate,
int vectorPerCluster,
int centroidsPerParentCluster,
int writeVersion
) throws IOException {
super(state, rawVectorFormatName, useDirectIOReads, rawVectorDelegate, writeVersion);
this.vectorPerCluster = vectorPerCluster;
this.centroidsPerParentCluster = centroidsPerParentCluster;
}
@Override
public CentroidOffsetAndLength buildAndWritePostingsLists(
FieldInfo fieldInfo,
CentroidSupplier centroidSupplier,
FloatVectorValues floatVectorValues,
IndexOutput postingsOutput,
long fileOffset,
int[] assignments,
int[] overspillAssignments
) throws IOException {
int[] centroidVectorCount = new int[centroidSupplier.size()];
for (int i = 0; i < assignments.length; i++) {
centroidVectorCount[assignments[i]]++;
// if soar assignments are present, count them as well
if (overspillAssignments.length > i && overspillAssignments[i] != NO_SOAR_ASSIGNMENT) {
centroidVectorCount[overspillAssignments[i]]++;
}
}
int maxPostingListSize = 0;
int[][] assignmentsByCluster = new int[centroidSupplier.size()][];
for (int c = 0; c < centroidSupplier.size(); c++) {
int size = centroidVectorCount[c];
maxPostingListSize = Math.max(maxPostingListSize, size);
assignmentsByCluster[c] = new int[size];
}
Arrays.fill(centroidVectorCount, 0);
for (int i = 0; i < assignments.length; i++) {
int c = assignments[i];
assignmentsByCluster[c][centroidVectorCount[c]++] = i;
// if soar assignments are present, add them to the cluster as well
if (overspillAssignments.length > i) {
int s = overspillAssignments[i];
if (s != NO_SOAR_ASSIGNMENT) {
assignmentsByCluster[s][centroidVectorCount[s]++] = i;
}
}
}
// write the max posting list size
postingsOutput.writeVInt(maxPostingListSize);
// write the posting lists
final PackedLongValues.Builder offsets = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
final PackedLongValues.Builder lengths = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
DiskBBQBulkWriter bulkWriter = DiskBBQBulkWriter.fromBitSize(1, ES91OSQVectorsScorer.BULK_SIZE, postingsOutput);
OnHeapQuantizedVectors onHeapQuantizedVectors = new OnHeapQuantizedVectors(
floatVectorValues,
fieldInfo.getVectorDimension(),
new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction())
);
final ByteBuffer buffer = ByteBuffer.allocate(fieldInfo.getVectorDimension() * Float.BYTES).order(ByteOrder.LITTLE_ENDIAN);
final int[] docIds = new int[maxPostingListSize];
final int[] docDeltas = new int[maxPostingListSize];
final int[] clusterOrds = new int[maxPostingListSize];
DocIdsWriter idsWriter = new DocIdsWriter();
for (int c = 0; c < centroidSupplier.size(); c++) {
float[] centroid = centroidSupplier.centroid(c);
int[] cluster = assignmentsByCluster[c];
long offset = postingsOutput.alignFilePointer(Float.BYTES) - fileOffset;
offsets.add(offset);
buffer.asFloatBuffer().put(centroid);
// write raw centroid for quantizing the query vectors
postingsOutput.writeBytes(buffer.array(), buffer.array().length);
// write centroid dot product for quantizing the query vectors
postingsOutput.writeInt(Float.floatToIntBits(VectorUtil.dotProduct(centroid, centroid)));
int size = cluster.length;
// write docIds
postingsOutput.writeVInt(size);
for (int j = 0; j < size; j++) {
docIds[j] = floatVectorValues.ordToDoc(cluster[j]);
clusterOrds[j] = j;
}
// sort cluster.buffer by docIds values, this way cluster ordinals are sorted by docIds
new IntSorter(clusterOrds, i -> docIds[i]).sort(0, size);
// encode doc deltas
for (int j = 0; j < size; j++) {
docDeltas[j] = j == 0 ? docIds[clusterOrds[j]] : docIds[clusterOrds[j]] - docIds[clusterOrds[j - 1]];
}
onHeapQuantizedVectors.reset(centroid, size, ord -> cluster[clusterOrds[ord]]);
byte encoding = idsWriter.calculateBlockEncoding(i -> docDeltas[i], size, ES91OSQVectorsScorer.BULK_SIZE);
postingsOutput.writeByte(encoding);
bulkWriter.writeVectors(onHeapQuantizedVectors, i -> {
// for vector i we write `bulk` size docs or the remaining docs
idsWriter.writeDocIds(d -> docDeltas[i + d], Math.min(ES91OSQVectorsScorer.BULK_SIZE, size - i), encoding, postingsOutput);
});
lengths.add(postingsOutput.getFilePointer() - fileOffset - offset);
}
if (logger.isDebugEnabled()) {
printClusterQualityStatistics(assignmentsByCluster);
}
return new CentroidOffsetAndLength(offsets.build(), lengths.build());
}
@Override
@SuppressForbidden(reason = "require usage of Lucene's IOUtils#deleteFilesIgnoringExceptions(...)")
public CentroidOffsetAndLength buildAndWritePostingsLists(
FieldInfo fieldInfo,
CentroidSupplier centroidSupplier,
FloatVectorValues floatVectorValues,
IndexOutput postingsOutput,
long fileOffset,
MergeState mergeState,
int[] assignments,
int[] overspillAssignments
) throws IOException {
// first, quantize all the vectors into a temporary file
String quantizedVectorsTempName = null;
boolean success = false;
try (
IndexOutput quantizedVectorsTemp = mergeState.segmentInfo.dir.createTempOutput(
mergeState.segmentInfo.name,
"qvec_",
IOContext.DEFAULT
)
) {
quantizedVectorsTempName = quantizedVectorsTemp.getName();
OptimizedScalarQuantizer quantizer = new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction());
int[] quantized = new int[fieldInfo.getVectorDimension()];
byte[] binary = new byte[BQVectorUtils.discretize(fieldInfo.getVectorDimension(), 64) / 8];
float[] scratch = new float[fieldInfo.getVectorDimension()];
for (int i = 0; i < assignments.length; i++) {
int c = assignments[i];
float[] centroid = centroidSupplier.centroid(c);
float[] vector = floatVectorValues.vectorValue(i);
boolean overspill = overspillAssignments.length > i && overspillAssignments[i] != NO_SOAR_ASSIGNMENT;
OptimizedScalarQuantizer.QuantizationResult result = quantizer.scalarQuantize(
vector,
scratch,
quantized,
(byte) 1,
centroid
);
ESVectorUtil.packAsBinary(quantized, binary);
writeQuantizedValue(quantizedVectorsTemp, binary, result);
if (overspill) {
int s = overspillAssignments[i];
// write the overspill vector as well
result = quantizer.scalarQuantize(vector, scratch, quantized, (byte) 1, centroidSupplier.centroid(s));
ESVectorUtil.packAsBinary(quantized, binary);
writeQuantizedValue(quantizedVectorsTemp, binary, result);
} else {
// write a zero vector for the overspill
Arrays.fill(binary, (byte) 0);
OptimizedScalarQuantizer.QuantizationResult zeroResult = new OptimizedScalarQuantizer.QuantizationResult(0f, 0f, 0f, 0);
writeQuantizedValue(quantizedVectorsTemp, binary, zeroResult);
}
}
success = true;
} finally {
if (success == false && quantizedVectorsTempName != null) {
org.apache.lucene.util.IOUtils.deleteFilesIgnoringExceptions(mergeState.segmentInfo.dir, quantizedVectorsTempName);
}
}
int[] centroidVectorCount = new int[centroidSupplier.size()];
for (int i = 0; i < assignments.length; i++) {
centroidVectorCount[assignments[i]]++;
// if soar assignments are present, count them as well
if (overspillAssignments.length > i && overspillAssignments[i] != NO_SOAR_ASSIGNMENT) {
centroidVectorCount[overspillAssignments[i]]++;
}
}
int maxPostingListSize = 0;
int[][] assignmentsByCluster = new int[centroidSupplier.size()][];
boolean[][] isOverspillByCluster = new boolean[centroidSupplier.size()][];
for (int c = 0; c < centroidSupplier.size(); c++) {
int size = centroidVectorCount[c];
maxPostingListSize = Math.max(maxPostingListSize, size);
assignmentsByCluster[c] = new int[size];
isOverspillByCluster[c] = new boolean[size];
}
Arrays.fill(centroidVectorCount, 0);
for (int i = 0; i < assignments.length; i++) {
int c = assignments[i];
assignmentsByCluster[c][centroidVectorCount[c]++] = i;
// if soar assignments are present, add them to the cluster as well
if (overspillAssignments.length > i) {
int s = overspillAssignments[i];
if (s != NO_SOAR_ASSIGNMENT) {
assignmentsByCluster[s][centroidVectorCount[s]] = i;
isOverspillByCluster[s][centroidVectorCount[s]++] = true;
}
}
}
// now we can read the quantized vectors from the temporary file
try (IndexInput quantizedVectorsInput = mergeState.segmentInfo.dir.openInput(quantizedVectorsTempName, IOContext.DEFAULT)) {
final PackedLongValues.Builder offsets = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
final PackedLongValues.Builder lengths = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
OffHeapQuantizedVectors offHeapQuantizedVectors = new OffHeapQuantizedVectors(
quantizedVectorsInput,
fieldInfo.getVectorDimension()
);
DiskBBQBulkWriter bulkWriter = DiskBBQBulkWriter.fromBitSize(1, ES91OSQVectorsScorer.BULK_SIZE, postingsOutput);
final ByteBuffer buffer = ByteBuffer.allocate(fieldInfo.getVectorDimension() * Float.BYTES).order(ByteOrder.LITTLE_ENDIAN);
// write the max posting list size
postingsOutput.writeVInt(maxPostingListSize);
// write the posting lists
final int[] docIds = new int[maxPostingListSize];
final int[] docDeltas = new int[maxPostingListSize];
final int[] clusterOrds = new int[maxPostingListSize];
DocIdsWriter idsWriter = new DocIdsWriter();
for (int c = 0; c < centroidSupplier.size(); c++) {
float[] centroid = centroidSupplier.centroid(c);
int[] cluster = assignmentsByCluster[c];
boolean[] isOverspill = isOverspillByCluster[c];
long offset = postingsOutput.alignFilePointer(Float.BYTES) - fileOffset;
offsets.add(offset);
// write raw centroid for quantizing the query vectors
buffer.asFloatBuffer().put(centroid);
postingsOutput.writeBytes(buffer.array(), buffer.array().length);
// write centroid dot product for quantizing the query vectors
postingsOutput.writeInt(Float.floatToIntBits(VectorUtil.dotProduct(centroid, centroid)));
// write docIds
int size = cluster.length;
postingsOutput.writeVInt(size);
for (int j = 0; j < size; j++) {
docIds[j] = floatVectorValues.ordToDoc(cluster[j]);
clusterOrds[j] = j;
}
// sort cluster.buffer by docIds values, this way cluster ordinals are sorted by docIds
new IntSorter(clusterOrds, i -> docIds[i]).sort(0, size);
// encode doc deltas
for (int j = 0; j < size; j++) {
docDeltas[j] = j == 0 ? docIds[clusterOrds[j]] : docIds[clusterOrds[j]] - docIds[clusterOrds[j - 1]];
}
byte encoding = idsWriter.calculateBlockEncoding(i -> docDeltas[i], size, ES91OSQVectorsScorer.BULK_SIZE);
postingsOutput.writeByte(encoding);
offHeapQuantizedVectors.reset(size, ord -> isOverspill[clusterOrds[ord]], ord -> cluster[clusterOrds[ord]]);
// write vectors
bulkWriter.writeVectors(offHeapQuantizedVectors, i -> {
// for vector i we write `bulk` size docs or the remaining docs
idsWriter.writeDocIds(
d -> docDeltas[d + i],
Math.min(ES91OSQVectorsScorer.BULK_SIZE, size - i),
encoding,
postingsOutput
);
});
lengths.add(postingsOutput.getFilePointer() - fileOffset - offset);
}
if (logger.isDebugEnabled()) {
printClusterQualityStatistics(assignmentsByCluster);
}
return new CentroidOffsetAndLength(offsets.build(), lengths.build());
} finally {
org.apache.lucene.util.IOUtils.deleteFilesIgnoringExceptions(mergeState.segmentInfo.dir, quantizedVectorsTempName);
}
}
private static void printClusterQualityStatistics(int[][] clusters) {
float min = Float.MAX_VALUE;
float max = Float.MIN_VALUE;
float mean = 0;
float m2 = 0;
// iteratively compute the variance & mean
int count = 0;
for (int[] cluster : clusters) {
count += 1;
if (cluster == null) {
continue;
}
float delta = cluster.length - mean;
mean += delta / count;
m2 += delta * (cluster.length - mean);
min = Math.min(min, cluster.length);
max = Math.max(max, cluster.length);
}
float variance = m2 / (clusters.length - 1);
logger.debug(
"Centroid count: {} min: {} max: {} mean: {} stdDev: {} variance: {}",
clusters.length,
min,
max,
mean,
Math.sqrt(variance),
variance
);
}
@Override
public CentroidSupplier createCentroidSupplier(
IndexInput centroidsInput,
int numCentroids,
FieldInfo fieldInfo,
float[] globalCentroid
) {
return new OffHeapCentroidSupplier(centroidsInput, numCentroids, fieldInfo);
}
@Override
public void writeCentroids(
FieldInfo fieldInfo,
CentroidSupplier centroidSupplier,
int[] centroidAssignments,
float[] globalCentroid,
CentroidOffsetAndLength centroidOffsetAndLength,
IndexOutput centroidOutput
) throws IOException {
// TODO do we want to store these distances as well for future use?
// TODO: sort centroids by global centroid (was doing so previously here)
// TODO: sorting tanks recall possibly because centroids ordinals no longer are aligned
if (centroidSupplier.size() > centroidsPerParentCluster * centroidsPerParentCluster) {
writeCentroidsWithParents(fieldInfo, centroidSupplier, globalCentroid, centroidOffsetAndLength, centroidOutput);
} else {
writeCentroidsWithoutParents(fieldInfo, centroidSupplier, globalCentroid, centroidOffsetAndLength, centroidOutput);
}
}
@Override
public void doWriteMeta(IndexOutput ivfMeta, FieldInfo field, int numCentroids) {
// Do Nothing Extra
}
private void writeCentroidsWithParents(
FieldInfo fieldInfo,
CentroidSupplier centroidSupplier,
float[] globalCentroid,
CentroidOffsetAndLength centroidOffsetAndLength,
IndexOutput centroidOutput
) throws IOException {
DiskBBQBulkWriter bulkWriter = DiskBBQBulkWriter.fromBitSize(7, ES92Int7VectorsScorer.BULK_SIZE, centroidOutput);
final OptimizedScalarQuantizer osq = new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction());
final CentroidGroups centroidGroups = buildCentroidGroups(fieldInfo, centroidSupplier);
centroidOutput.writeVInt(centroidGroups.centroids.length);
centroidOutput.writeVInt(centroidGroups.maxVectorsPerCentroidLength);
QuantizedCentroids parentQuantizeCentroid = new QuantizedCentroids(
CentroidSupplier.fromArray(centroidGroups.centroids),
fieldInfo.getVectorDimension(),
osq,
globalCentroid
);
bulkWriter.writeVectors(parentQuantizeCentroid, null);
int offset = 0;
for (int i = 0; i < centroidGroups.centroids().length; i++) {
centroidOutput.writeInt(offset);
centroidOutput.writeInt(centroidGroups.vectors()[i].length);
offset += centroidGroups.vectors()[i].length;
}
QuantizedCentroids childrenQuantizeCentroid = new QuantizedCentroids(
centroidSupplier,
fieldInfo.getVectorDimension(),
osq,
globalCentroid
);
for (int i = 0; i < centroidGroups.centroids().length; i++) {
final int[] centroidAssignments = centroidGroups.vectors()[i];
childrenQuantizeCentroid.reset(idx -> centroidAssignments[idx], centroidAssignments.length);
bulkWriter.writeVectors(childrenQuantizeCentroid, null);
}
// write the centroid offsets at the end of the file
for (int i = 0; i < centroidGroups.centroids().length; i++) {
final int[] centroidAssignments = centroidGroups.vectors()[i];
for (int assignment : centroidAssignments) {
centroidOutput.writeLong(centroidOffsetAndLength.offsets().get(assignment));
centroidOutput.writeLong(centroidOffsetAndLength.lengths().get(assignment));
}
}
}
private void writeCentroidsWithoutParents(
FieldInfo fieldInfo,
CentroidSupplier centroidSupplier,
float[] globalCentroid,
CentroidOffsetAndLength centroidOffsetAndLength,
IndexOutput centroidOutput
) throws IOException {
centroidOutput.writeVInt(0);
DiskBBQBulkWriter bulkWriter = DiskBBQBulkWriter.fromBitSize(7, ES92Int7VectorsScorer.BULK_SIZE, centroidOutput);
final OptimizedScalarQuantizer osq = new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction());
QuantizedCentroids quantizedCentroids = new QuantizedCentroids(
centroidSupplier,
fieldInfo.getVectorDimension(),
osq,
globalCentroid
);
bulkWriter.writeVectors(quantizedCentroids, null);
// write the centroid offsets at the end of the file
for (int i = 0; i < centroidSupplier.size(); i++) {
centroidOutput.writeLong(centroidOffsetAndLength.offsets().get(i));
centroidOutput.writeLong(centroidOffsetAndLength.lengths().get(i));
}
}
private record CentroidGroups(float[][] centroids, int[][] vectors, int maxVectorsPerCentroidLength) {}
private CentroidGroups buildCentroidGroups(FieldInfo fieldInfo, CentroidSupplier centroidSupplier) throws IOException {
final FloatVectorValues floatVectorValues = FloatVectorValues.fromFloats(new AbstractList<>() {
@Override
public float[] get(int index) {
try {
return centroidSupplier.centroid(index);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public int size() {
return centroidSupplier.size();
}
}, fieldInfo.getVectorDimension());
// we use the HierarchicalKMeans to partition the space of all vectors across merging segments
// this are small numbers so we run it wih all the centroids.
final KMeansResult kMeansResult = new HierarchicalKMeans(
fieldInfo.getVectorDimension(),
HierarchicalKMeans.MAX_ITERATIONS_DEFAULT,
HierarchicalKMeans.SAMPLES_PER_CLUSTER_DEFAULT,
HierarchicalKMeans.MAXK,
-1 // disable SOAR assignments
).cluster(floatVectorValues, centroidsPerParentCluster);
final int[] centroidVectorCount = new int[kMeansResult.centroids().length];
for (int i = 0; i < kMeansResult.assignments().length; i++) {
centroidVectorCount[kMeansResult.assignments()[i]]++;
}
final int[][] vectorsPerCentroid = new int[kMeansResult.centroids().length][];
int maxVectorsPerCentroidLength = 0;
for (int i = 0; i < kMeansResult.centroids().length; i++) {
vectorsPerCentroid[i] = new int[centroidVectorCount[i]];
maxVectorsPerCentroidLength = Math.max(maxVectorsPerCentroidLength, centroidVectorCount[i]);
}
Arrays.fill(centroidVectorCount, 0);
for (int i = 0; i < kMeansResult.assignments().length; i++) {
final int c = kMeansResult.assignments()[i];
vectorsPerCentroid[c][centroidVectorCount[c]++] = i;
}
return new CentroidGroups(kMeansResult.centroids(), vectorsPerCentroid, maxVectorsPerCentroidLength);
}
@Override
public CentroidAssignments calculateCentroids(FieldInfo fieldInfo, FloatVectorValues floatVectorValues, MergeState mergeState)
throws IOException {
return calculateCentroids(fieldInfo, floatVectorValues);
}
/**
* Calculate the centroids for the given field.
* We use the {@link HierarchicalKMeans} algorithm to partition the space of all vectors across merging segments
*
* @param fieldInfo merging field info
* @param floatVectorValues the float vector values to merge
* @return the vector assignments, soar assignments, and if asked the centroids themselves that were computed
* @throws IOException if an I/O error occurs
*/
@Override
public CentroidAssignments calculateCentroids(FieldInfo fieldInfo, FloatVectorValues floatVectorValues) throws IOException {
// TODO: consider hinting / bootstrapping hierarchical kmeans with the prior segments centroids
// TODO: for flush we are doing this over the vectors and here centroids which seems duplicative
// preliminary tests suggest recall is good using only centroids but need to do further evaluation
KMeansResult kMeansResult = new HierarchicalKMeans(floatVectorValues.dimension()).cluster(floatVectorValues, vectorPerCluster);
float[][] centroids = kMeansResult.centroids();
if (logger.isDebugEnabled()) {
logger.debug("final centroid count: {}", centroids.length);
}
int[] assignments = kMeansResult.assignments();
int[] soarAssignments = kMeansResult.soarAssignments();
return new CentroidAssignments(fieldInfo.getVectorDimension(), centroids, assignments, soarAssignments);
}
static void writeQuantizedValue(IndexOutput indexOutput, byte[] binaryValue, OptimizedScalarQuantizer.QuantizationResult corrections)
throws IOException {
indexOutput.writeBytes(binaryValue, binaryValue.length);
indexOutput.writeInt(Float.floatToIntBits(corrections.lowerInterval()));
indexOutput.writeInt(Float.floatToIntBits(corrections.upperInterval()));
indexOutput.writeInt(Float.floatToIntBits(corrections.additionalCorrection()));
assert corrections.quantizedComponentSum() >= 0 && corrections.quantizedComponentSum() <= 0xffff;
indexOutput.writeShort((short) corrections.quantizedComponentSum());
}
static
|
ES920DiskBBQVectorsWriter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java
|
{
"start": 1471,
"end": 4678
}
|
class ____ extends AbstractArrayState implements GroupingAggregatorState {
private final double init;
private DoubleArray values;
DoubleArrayState(BigArrays bigArrays, double init) {
super(bigArrays);
this.values = bigArrays.newDoubleArray(1, false);
this.values.set(0, init);
this.init = init;
}
double get(int groupId) {
return values.get(groupId);
}
double getOrDefault(int groupId) {
return groupId < values.size() ? values.get(groupId) : init;
}
void set(int groupId, double value) {
ensureCapacity(groupId);
values.set(groupId, value);
trackGroupId(groupId);
}
Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) {
if (false == trackingGroupIds()) {
try (var builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(selected.getPositionCount())) {
for (int i = 0; i < selected.getPositionCount(); i++) {
builder.appendDouble(i, values.get(selected.getInt(i)));
}
return builder.build().asBlock();
}
}
try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount())) {
for (int i = 0; i < selected.getPositionCount(); i++) {
int group = selected.getInt(i);
if (hasValue(group)) {
builder.appendDouble(values.get(group));
} else {
builder.appendNull();
}
}
return builder.build();
}
}
private void ensureCapacity(int groupId) {
if (groupId >= values.size()) {
long prevSize = values.size();
values = bigArrays.grow(values, groupId + 1);
values.fill(prevSize, values.size(), init);
}
}
/** Extracts an intermediate view of the contents of this state. */
@Override
public void toIntermediate(
Block[] blocks,
int offset,
IntVector selected,
org.elasticsearch.compute.operator.DriverContext driverContext
) {
assert blocks.length >= offset + 2;
try (
var valuesBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount());
var hasValueBuilder = driverContext.blockFactory().newBooleanVectorFixedBuilder(selected.getPositionCount())
) {
for (int i = 0; i < selected.getPositionCount(); i++) {
int group = selected.getInt(i);
if (group < values.size()) {
valuesBuilder.appendDouble(values.get(group));
} else {
valuesBuilder.appendDouble(0); // TODO can we just use null?
}
hasValueBuilder.appendBoolean(i, hasValue(group));
}
blocks[offset + 0] = valuesBuilder.build();
blocks[offset + 1] = hasValueBuilder.build().asBlock();
}
}
@Override
public void close() {
Releasables.close(values, super::close);
}
}
|
DoubleArrayState
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/config/plugins/util/PluginManagerTest.java
|
{
"start": 10111,
"end": 10913
}
|
class ____ extends ClassLoader {
private final URL descriptorUrl;
private FilteringClassLoader(final ClassLoader parent, final URL descriptorUrl) {
super(parent);
this.descriptorUrl = descriptorUrl;
}
@Override
public URL getResource(final String name) {
return PluginProcessor.PLUGIN_CACHE_FILE.equals(name) ? descriptorUrl : super.getResource(name);
}
@Override
public Enumeration<URL> getResources(final String name) throws IOException {
return PluginProcessor.PLUGIN_CACHE_FILE.equals(name)
? descriptorUrl != null ? enumeration(singletonList(descriptorUrl)) : emptyEnumeration()
: super.getResources(name);
}
}
}
|
FilteringClassLoader
|
java
|
apache__maven
|
compat/maven-embedder/src/main/java/org/apache/maven/cli/props/MavenProperties.java
|
{
"start": 1748,
"end": 27315
}
|
class ____ extends AbstractMap<String, String> {
/** Constant for the supported comment characters.*/
private static final String COMMENT_CHARS = "#!";
/** The list of possible key/value separators */
private static final char[] SEPARATORS = new char[] {'=', ':'};
/** The white space characters used as key/value separators. */
private static final char[] WHITE_SPACE = new char[] {' ', '\t', '\f'};
/**
* Unless standard java props, use UTF-8
*/
static final String DEFAULT_ENCODING = StandardCharsets.UTF_8.name();
/** Constant for the platform specific line separator.*/
private static final String LINE_SEPARATOR = System.lineSeparator();
/** Constant for the radix of hex numbers.*/
private static final int HEX_RADIX = 16;
/** Constant for the length of a unicode literal.*/
private static final int UNICODE_LEN = 4;
private final Map<String, String> storage = new LinkedHashMap<>();
private final Map<String, Layout> layout = new LinkedHashMap<>();
private List<String> header;
private List<String> footer;
private Path location;
private UnaryOperator<String> callback;
boolean substitute = true;
boolean typed;
public MavenProperties() {}
public MavenProperties(Path location) throws IOException {
this(location, null);
}
public MavenProperties(Path location, UnaryOperator<String> callback) throws IOException {
this.location = location;
this.callback = callback;
if (Files.exists(location)) {
load(location);
}
}
public MavenProperties(boolean substitute) {
this.substitute = substitute;
}
public MavenProperties(Path location, boolean substitute) {
this.location = location;
this.substitute = substitute;
}
public void load(Path location) throws IOException {
try (InputStream is = Files.newInputStream(location)) {
load(is);
}
}
public void load(URL location) throws IOException {
try (InputStream is = location.openStream()) {
load(is);
}
}
public void load(InputStream is) throws IOException {
load(new InputStreamReader(is, DEFAULT_ENCODING));
}
public void load(Reader reader) throws IOException {
loadLayout(reader, false);
}
public void save() throws IOException {
save(this.location);
}
public void save(Path location) throws IOException {
try (OutputStream os = Files.newOutputStream(location)) {
save(os);
}
}
public void save(OutputStream os) throws IOException {
save(new OutputStreamWriter(os, DEFAULT_ENCODING));
}
public void save(Writer writer) throws IOException {
saveLayout(writer, typed);
}
/**
* Store a properties into a output stream, preserving comments, special character, etc.
* This method is mainly to be compatible with the java.util.Properties class.
*
* @param os an output stream.
* @param comment this parameter is ignored as this Properties
* @throws IOException If storing fails
*/
public void store(OutputStream os, String comment) throws IOException {
this.save(os);
}
/**
* Searches for the property with the specified key in this property list.
*
* @param key the property key.
* @return the value in this property list with the specified key value.
*/
public String getProperty(String key) {
return this.get(key);
}
/**
* Searches for the property with the specified key in this property list. If the key is not found in this property
* list, the default property list, and its defaults, recursively, are then checked. The method returns the default
* value argument if the property is not found.
*
* @param key the property key.
* @param defaultValue a default value.
* @return The property value of the default value
*/
public String getProperty(String key, String defaultValue) {
if (this.get(key) != null) {
return this.get(key);
}
return defaultValue;
}
@Override
public Set<Entry<String, String>> entrySet() {
return new AbstractSet<>() {
@Override
public Iterator<Entry<String, String>> iterator() {
return new Iterator<>() {
final Iterator<Entry<String, String>> keyIterator =
storage.entrySet().iterator();
@Override
public boolean hasNext() {
return keyIterator.hasNext();
}
@Override
public Entry<String, String> next() {
final Entry<String, String> entry = keyIterator.next();
return new Entry<String, String>() {
@Override
public String getKey() {
return entry.getKey();
}
@Override
public String getValue() {
return entry.getValue();
}
@Override
public String setValue(String value) {
String old = entry.setValue(value);
if (old == null || !old.equals(value)) {
Layout l = layout.get(entry.getKey());
if (l != null) {
l.clearValue();
}
}
return old;
}
};
}
@Override
public void remove() {
keyIterator.remove();
}
};
}
@Override
public int size() {
return storage.size();
}
};
}
/**
* Returns an enumeration of all the keys in this property list, including distinct keys in the default property
* list if a key of the same name has not already been found from the main properties list.
*
* @return an enumeration of all the keys in this property list, including the keys in the default property list.
*/
public Enumeration<?> propertyNames() {
return Collections.enumeration(storage.keySet());
}
/**
* Calls the map method put. Provided for parallelism with the getProperty method.
* Enforces use of strings for property keys and values. The value returned is the result of the map call to put.
*
* @param key the key to be placed into this property list.
* @param value the value corresponding to the key.
* @return the previous value of the specified key in this property list, or null if it did not have one.
*/
public Object setProperty(String key, String value) {
return this.put(key, value);
}
@Override
public String put(String key, String value) {
String old = storage.put(key, value);
if (old == null || !old.equals(value)) {
Layout l = layout.get(key);
if (l != null) {
l.clearValue();
}
}
return old;
}
void putAllSubstituted(Map<? extends String, ? extends String> m) {
storage.putAll(m);
}
public String put(String key, List<String> commentLines, List<String> valueLines) {
commentLines = new ArrayList<>(commentLines);
valueLines = new ArrayList<>(valueLines);
String escapedKey = escapeKey(key);
StringBuilder sb = new StringBuilder();
// int lastLine = valueLines.size() - 1;
if (valueLines.isEmpty()) {
valueLines.add(escapedKey + "=");
sb.append(escapedKey).append("=");
} else {
String val0 = valueLines.get(0);
String rv0 = typed ? val0 : escapeJava(val0);
if (!val0.trim().startsWith(escapedKey)) {
valueLines.set(0, escapedKey + " = " + rv0 /*+ (0 < lastLine? "\\": "")*/);
sb.append(escapedKey).append(" = ").append(rv0);
} else {
valueLines.set(0, rv0 /*+ (0 < lastLine? "\\": "")*/);
sb.append(rv0);
}
}
for (int i = 1; i < valueLines.size(); i++) {
String val = valueLines.get(i);
valueLines.set(i, typed ? val : escapeJava(val) /*+ (i < lastLine? "\\": "")*/);
while (!val.isEmpty() && Character.isWhitespace(val.charAt(0))) {
val = val.substring(1);
}
sb.append(val);
}
String[] property = PropertiesReader.parseProperty(sb.toString());
this.layout.put(key, new Layout(commentLines, valueLines));
return storage.put(key, property[1]);
}
public String put(String key, List<String> commentLines, String value) {
commentLines = new ArrayList<>(commentLines);
this.layout.put(key, new Layout(commentLines, null));
return storage.put(key, value);
}
public String put(String key, String comment, String value) {
return put(key, Collections.singletonList(comment), value);
}
public boolean update(Map<String, String> props) {
MavenProperties properties;
if (props instanceof MavenProperties mavenProperties) {
properties = mavenProperties;
} else {
properties = new MavenProperties();
properties.putAll(props);
}
return update(properties);
}
public boolean update(MavenProperties properties) {
boolean modified = false;
// Remove "removed" properties from the cfg file
for (String key : new ArrayList<String>(this.keySet())) {
if (!properties.containsKey(key)) {
this.remove(key);
modified = true;
}
}
// Update existing keys
for (String key : properties.keySet()) {
String v = this.get(key);
List<String> comments = properties.getComments(key);
List<String> value = properties.getRaw(key);
if (v == null) {
this.put(key, comments, value);
modified = true;
} else if (!v.equals(properties.get(key))) {
if (comments.isEmpty()) {
comments = this.getComments(key);
}
this.put(key, comments, value);
modified = true;
}
}
return modified;
}
public List<String> getRaw(String key) {
if (layout.containsKey(key)) {
if (layout.get(key).getValueLines() != null) {
return new ArrayList<String>(layout.get(key).getValueLines());
}
}
List<String> result = new ArrayList<String>();
if (storage.containsKey(key)) {
result.add(storage.get(key));
}
return result;
}
public List<String> getComments(String key) {
if (layout.containsKey(key)) {
if (layout.get(key).getCommentLines() != null) {
return new ArrayList<String>(layout.get(key).getCommentLines());
}
}
return new ArrayList<String>();
}
@Override
public String remove(Object key) {
Layout l = layout.get(key);
if (l != null) {
l.clearValue();
}
return storage.remove(key);
}
@Override
public void clear() {
for (Layout l : layout.values()) {
l.clearValue();
}
storage.clear();
}
/**
* Return the comment header.
*
* @return the comment header
*/
public List<String> getHeader() {
return header;
}
/**
* Set the comment header.
*
* @param header the header to use
*/
public void setHeader(List<String> header) {
this.header = header;
}
/**
* Return the comment footer.
*
* @return the comment footer
*/
public List<String> getFooter() {
return footer;
}
/**
* Set the comment footer.
*
* @param footer the footer to use
*/
public void setFooter(List<String> footer) {
this.footer = footer;
}
/**
* Reads a properties file and stores its internal structure. The found
* properties will be added to the associated configuration object.
*
* @param in the reader to the properties file
* @throws IOException if an error occurs
*/
protected void loadLayout(Reader in, boolean maybeTyped) throws IOException {
PropertiesReader reader = new PropertiesReader(in, maybeTyped);
boolean hasProperty = false;
while (reader.nextProperty()) {
hasProperty = true;
storage.put(reader.getPropertyName(), reader.getPropertyValue());
int idx = checkHeaderComment(reader.getCommentLines());
layout.put(
reader.getPropertyName(),
new Layout(
idx < reader.getCommentLines().size()
? new ArrayList<>(reader.getCommentLines()
.subList(
idx,
reader.getCommentLines().size()))
: null,
new ArrayList<>(reader.getValueLines())));
}
typed = maybeTyped && reader.typed != null && reader.typed;
if (!typed) {
for (Entry<String, String> e : storage.entrySet()) {
e.setValue(unescapeJava(e.getValue()));
}
}
if (hasProperty) {
footer = new ArrayList<>(reader.getCommentLines());
} else {
header = new ArrayList<>(reader.getCommentLines());
}
if (substitute) {
substitute();
}
}
public void substitute() {
substitute(callback);
}
public void substitute(UnaryOperator<String> callback) {
new DefaultInterpolator().interpolate(storage, callback);
}
/**
* Writes the properties file to the given writer, preserving as much of its
* structure as possible.
*
* @param out the writer
* @throws IOException if an error occurs
*/
protected void saveLayout(Writer out, boolean typed) throws IOException {
PropertiesWriter writer = new PropertiesWriter(out, typed);
if (header != null) {
for (String s : header) {
writer.writeln(s);
}
}
for (String key : storage.keySet()) {
Layout l = layout.get(key);
if (l != null && l.getCommentLines() != null) {
for (String s : l.getCommentLines()) {
writer.writeln(s);
}
}
if (l != null && l.getValueLines() != null) {
for (int i = 0; i < l.getValueLines().size(); i++) {
String s = l.getValueLines().get(i);
if (i < l.getValueLines().size() - 1) {
writer.writeln(s + "\\");
} else {
writer.writeln(s);
}
}
} else {
writer.writeProperty(key, storage.get(key));
}
}
if (footer != null) {
for (String s : footer) {
writer.writeln(s);
}
}
writer.flush();
}
/**
* Checks if parts of the passed in comment can be used as header comment.
* This method checks whether a header comment can be defined (i.e. whether
* this is the first comment in the loaded file). If this is the case, it is
* searched for the lates blank line. This line will mark the end of the
* header comment. The return value is the index of the first line in the
* passed in list, which does not belong to the header comment.
*
* @param commentLines the comment lines
* @return the index of the next line after the header comment
*/
private int checkHeaderComment(List<String> commentLines) {
if (getHeader() == null && layout.isEmpty()) {
// This is the first comment. Search for blank lines.
int index = commentLines.size() - 1;
while (index >= 0 && !commentLines.get(index).isEmpty()) {
index--;
}
setHeader(new ArrayList<String>(commentLines.subList(0, index + 1)));
return index + 1;
} else {
return 0;
}
}
/**
* Tests whether a line is a comment, i.e. whether it starts with a comment
* character.
*
* @param line the line
* @return a flag if this is a comment line
*/
static boolean isCommentLine(String line) {
String s = line.trim();
// blank lines are also treated as comment lines
return s.isEmpty() || COMMENT_CHARS.indexOf(s.charAt(0)) >= 0;
}
/**
* <p>Unescapes any Java literals found in the <code>String</code> to a
* <code>Writer</code>.</p> This is a slightly modified version of the
* StringEscapeUtils.unescapeJava() function in commons-lang that doesn't
* drop escaped separators (i.e '\,').
*
* @param str the <code>String</code> to unescape, may be null
* @return the processed string
* @throws IllegalArgumentException if the Writer is <code>null</code>
*/
protected static String unescapeJava(String str) {
if (str == null) {
return null;
}
int sz = str.length();
StringBuilder out = new StringBuilder(sz);
StringBuilder unicode = new StringBuilder(UNICODE_LEN);
boolean hadSlash = false;
boolean inUnicode = false;
for (int i = 0; i < sz; i++) {
char ch = str.charAt(i);
if (inUnicode) {
// if in unicode, then we're reading unicode
// values in somehow
unicode.append(ch);
if (unicode.length() == UNICODE_LEN) {
// unicode now contains the four hex digits
// which represents our unicode character
try {
int value = Integer.parseInt(unicode.toString(), HEX_RADIX);
out.append((char) value);
unicode.setLength(0);
inUnicode = false;
hadSlash = false;
} catch (NumberFormatException nfe) {
throw new IllegalArgumentException("Unable to parse unicode value: " + unicode, nfe);
}
}
continue;
}
if (hadSlash) {
// handle an escaped value
hadSlash = false;
switch (ch) {
case '\\':
out.append('\\');
break;
case '\'':
out.append('\'');
break;
case '\"':
out.append('"');
break;
case 'r':
out.append('\r');
break;
case 'f':
out.append('\f');
break;
case 't':
out.append('\t');
break;
case 'n':
out.append('\n');
break;
case 'b':
out.append('\b');
break;
case 'u':
// uh-oh, we're in unicode country....
inUnicode = true;
break;
default:
out.append(ch);
break;
}
continue;
} else if (ch == '\\') {
hadSlash = true;
continue;
}
out.append(ch);
}
if (hadSlash) {
// then we're in the weird case of a \ at the end of the
// string, let's output it anyway.
out.append('\\');
}
return out.toString();
}
/**
* <p>Escapes the characters in a <code>String</code> using Java String rules.</p>
*
* <p>Deals correctly with quotes and control-chars (tab, backslash, cr, ff, etc.) </p>
*
* <p>So a tab becomes the characters <code>'\\'</code> and
* <code>'t'</code>.</p>
*
* <p>The only difference between Java strings and JavaScript strings
* is that in JavaScript, a single quote must be escaped.</p>
*
* <p>Example:</p>
* <pre>
* input string: He didn't say, "Stop!"
* output string: He didn't say, \"Stop!\"
* </pre>
*
*
* @param str String to escape values in, may be null
* @return String with escaped values, <code>null</code> if null string input
*/
@SuppressWarnings("checkstyle:MagicNumber")
protected static String escapeJava(String str) {
if (str == null) {
return null;
}
int sz = str.length();
StringBuilder out = new StringBuilder(sz * 2);
for (int i = 0; i < sz; i++) {
char ch = str.charAt(i);
// handle unicode
if (ch > 0xfff) {
out.append("\\u").append(hex(ch));
} else if (ch > 0xff) {
out.append("\\u0").append(hex(ch));
} else if (ch > 0x7f) {
out.append("\\u00").append(hex(ch));
} else if (ch < 32) {
switch (ch) {
case '\b':
out.append('\\');
out.append('b');
break;
case '\n':
out.append('\\');
out.append('n');
break;
case '\t':
out.append('\\');
out.append('t');
break;
case '\f':
out.append('\\');
out.append('f');
break;
case '\r':
out.append('\\');
out.append('r');
break;
default:
if (ch > 0xf) {
out.append("\\u00").append(hex(ch));
} else {
out.append("\\u000").append(hex(ch));
}
break;
}
} else {
switch (ch) {
case '"':
out.append('\\');
out.append('"');
break;
case '\\':
out.append('\\');
out.append('\\');
break;
default:
out.append(ch);
break;
}
}
}
return out.toString();
}
/**
* <p>Returns an upper case hexadecimal <code>String</code> for the given
* character.</p>
*
* @param ch The character to convert.
* @return An upper case hexadecimal <code>String</code>
*/
protected static String hex(char ch) {
return Integer.toHexString(ch).toUpperCase(Locale.ENGLISH);
}
/**
* <p>Checks if the value is in the given array.</p>
*
* <p>The method returns <code>false</code> if a <code>null</code> array is passed in.</p>
*
* @param array the array to search through
* @param valueToFind the value to find
* @return <code>true</code> if the array contains the object
*/
public static boolean contains(char[] array, char valueToFind) {
if (array == null) {
return false;
}
for (char c : array) {
if (valueToFind == c) {
return true;
}
}
return false;
}
/**
* Escape the separators in the key.
*
* @param key the key
* @return the escaped key
*/
private static String escapeKey(String key) {
StringBuilder newkey = new StringBuilder();
for (int i = 0; i < key.length(); i++) {
char c = key.charAt(i);
if (contains(SEPARATORS, c) || contains(WHITE_SPACE, c)) {
// escape the separator
newkey.append('\\');
newkey.append(c);
} else {
newkey.append(c);
}
}
return newkey.toString();
}
/**
* This
|
MavenProperties
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/extension/TemplateInvocationValidationException.java
|
{
"start": 895,
"end": 1112
}
|
class ____ extends JUnitException {
@Serial
private static final long serialVersionUID = 1L;
public TemplateInvocationValidationException(String message) {
super(message);
}
}
|
TemplateInvocationValidationException
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng4129PluginExecutionInheritanceTest.java
|
{
"start": 1183,
"end": 2523
}
|
class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that plugin executions defined in the parent with inherited=false are not executed in child modules.
*
* @throws Exception in case of failure
*/
@Test
public void testit() throws Exception {
File testDir = extractResources("/mng-4129");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.deleteDirectory("child-1/target");
verifier.deleteDirectory("child-2/target");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
List<String> executions = verifier.loadLines("target/executions.txt");
Collections.sort(executions);
assertEquals(Arrays.asList(new String[] {"inherited-execution", "non-inherited-execution"}), executions);
List<String> executions1 = verifier.loadLines("child-1/target/executions.txt");
assertEquals(Collections.singletonList("inherited-execution"), executions1);
List<String> executions2 = verifier.loadLines("child-2/target/executions.txt");
assertEquals(Collections.singletonList("inherited-execution"), executions2);
}
}
|
MavenITmng4129PluginExecutionInheritanceTest
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/AtomicDataType.java
|
{
"start": 2100,
"end": 2350
}
|
class ____ not be null."));
}
@Override
public List<DataType> getChildren() {
return Collections.emptyList();
}
@Override
public <R> R accept(DataTypeVisitor<R> visitor) {
return visitor.visit(this);
}
}
|
must
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/concurrent/package-info.java
|
{
"start": 9818,
"end": 11194
}
|
class ____ extends BackgroundInitializer<String> {
* // The URL to be loaded.
* private final URL url;
*
* public URLLoader(URL u) {
* url = u;
* }
*
* protected String initialize() throws ConcurrentException {
* try {
* InputStream in = url.openStream();
* // read content into string
* ...
* return content;
* } catch (IOException ioex) {
* throw new ConcurrentException(ioex);
* }
* }
* }
* }
* </pre>
*
* <p>
* An application creates an instance of {@code URLLoader} and starts it. Then it can do other things. When it needs the
* content of the URL it calls the initializer's {@code get()} method:
* </p>
*
* <pre>
* <code>
* URL url = new URL("http://www.application-home-page.com/");
* URLLoader loader = new URLLoader(url);
* loader.start(); // this starts the background initialization
*
* // do other stuff
* ...
* // now obtain the content of the URL
* String content;
* try {
* content = loader.get(); // this may block
* } catch (ConcurrentException cex) {
* content = "Error when loading URL " + url;
* }
* // display content
* </code>
* </pre>
*
* <p>
* Related to {@link org.apache.commons.lang3.concurrent.BackgroundInitializer} is the
* {@link org.apache.commons.lang3.concurrent.MultiBackgroundInitializer} class. As the name implies, this
|
URLLoader
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/CompressibleFSDataOutputStream.java
|
{
"start": 1091,
"end": 2356
}
|
class ____ extends FSDataOutputStream {
private final FSDataOutputStream delegate;
private final OutputStream compressingDelegate;
public CompressibleFSDataOutputStream(
FSDataOutputStream delegate, StreamCompressionDecorator compressionDecorator)
throws IOException {
this.delegate = delegate;
this.compressingDelegate = compressionDecorator.decorateWithCompression(delegate);
}
@Override
public long getPos() throws IOException {
// Underlying compression involves buffering, so the only way to report correct position is
// to flush the underlying stream. This lowers the effectivity of compression, but there is
// no other way, since the position is often used as a split point.
flush();
return delegate.getPos();
}
@Override
public void write(int b) throws IOException {
compressingDelegate.write(b);
}
@Override
public void flush() throws IOException {
compressingDelegate.flush();
}
@Override
public void sync() throws IOException {
delegate.sync();
}
@Override
public void close() throws IOException {
compressingDelegate.close();
}
}
|
CompressibleFSDataOutputStream
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/entitygraph/LoadGraphMergeTest.java
|
{
"start": 4262,
"end": 4709
}
|
class ____ {
@Id
private Long id;
private String name;
@OneToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
private GrandChild grandChild;
public Child() {
}
public Child(Long id, GrandChild grandChild) {
this.id = id;
this.grandChild = grandChild;
}
public Long getId() {
return id;
}
public GrandChild getGrandChild() {
return grandChild;
}
}
@Entity(name = "GrandChild")
public static
|
Child
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/util/ServiceLoaderUtil.java
|
{
"start": 3107,
"end": 3613
}
|
class ____ be null
final Class<?> callerClass = StackLocatorUtil.getCallerClass(2);
final Stream<S> allServices = OsgiServiceLocator.isAvailable() && callerClass != null
? Stream.concat(services, OsgiServiceLocator.loadServices(serviceType, callerClass, logger))
: services;
return allServices
// only the first occurrence of a class
.filter(service -> classes.add(service.getClass()));
}
private static final
|
may
|
java
|
netty__netty
|
example/src/main/java/io/netty/example/ocsp/OcspServerExample.java
|
{
"start": 2278,
"end": 7964
}
|
class ____ {
public static void main(String[] args) throws Exception {
// We assume there's a private key.
PrivateKey privateKey = null;
// Step 1: Load the certificate chain for netty.io. We'll need the certificate
// and the issuer's certificate and we don't need any of the intermediate certs.
// The array is assumed to be a certain order to keep things simple.
X509Certificate[] keyCertChain = parseCertificates(OcspServerExample.class, "netty_io_chain.pem");
X509Certificate certificate = keyCertChain[0];
X509Certificate issuer = keyCertChain[keyCertChain.length - 1];
// Step 2: We need the URL of the CA's OCSP responder server. It's somewhere encoded
// into the certificate! Notice that it's an HTTP URL.
URI uri = OcspUtils.ocspUri(certificate);
System.out.println("OCSP Responder URI: " + uri);
if (uri == null) {
throw new IllegalStateException("The CA/certificate doesn't have an OCSP responder");
}
// Step 3: Construct the OCSP request
OCSPReq request = new OcspRequestBuilder()
.certificate(certificate)
.issuer(issuer)
.build();
// Step 4: Do the request to the CA's OCSP responder
OCSPResp response = OcspUtils.request(uri, request, 5L, TimeUnit.SECONDS);
if (response.getStatus() != OCSPResponseStatus.SUCCESSFUL) {
throw new IllegalStateException("response-status=" + response.getStatus());
}
// Step 5: Is my certificate any good or has the CA revoked it?
BasicOCSPResp basicResponse = (BasicOCSPResp) response.getResponseObject();
SingleResp first = basicResponse.getResponses()[0];
CertificateStatus status = first.getCertStatus();
System.out.println("Status: " + (status == CertificateStatus.GOOD ? "Good" : status));
System.out.println("This Update: " + first.getThisUpdate());
System.out.println("Next Update: " + first.getNextUpdate());
if (status != null) {
throw new IllegalStateException("certificate-status=" + status);
}
BigInteger certSerial = certificate.getSerialNumber();
BigInteger ocspSerial = first.getCertID().getSerialNumber();
if (!certSerial.equals(ocspSerial)) {
throw new IllegalStateException("Bad Serials=" + certSerial + " vs. " + ocspSerial);
}
// Step 6: Cache the OCSP response and use it as long as it's not
// expired. The exact semantics are beyond the scope of this example.
if (!OpenSsl.isAvailable()) {
throw new IllegalStateException("OpenSSL is not available!");
}
if (!OpenSsl.isOcspSupported()) {
throw new IllegalStateException("OCSP is not supported!");
}
if (privateKey == null) {
throw new IllegalStateException("Because we don't have a PrivateKey we can't continue past this point.");
}
ReferenceCountedOpenSslContext context
= (ReferenceCountedOpenSslContext) SslContextBuilder.forServer(privateKey, keyCertChain)
.sslProvider(SslProvider.OPENSSL)
.enableOcsp(true)
.build();
try {
ServerBootstrap bootstrap = new ServerBootstrap()
.childHandler(newServerHandler(context, response));
// so on and so forth...
} finally {
context.release();
}
}
private static ChannelInitializer<Channel> newServerHandler(final ReferenceCountedOpenSslContext context,
final OCSPResp response) {
return new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
SslHandler sslHandler = context.newHandler(ch.alloc());
if (response != null) {
ReferenceCountedOpenSslEngine engine
= (ReferenceCountedOpenSslEngine) sslHandler.engine();
engine.setOcspResponse(response.getEncoded());
}
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(sslHandler);
// so on and so forth...
}
};
}
private static X509Certificate[] parseCertificates(Class<?> clazz, String name) throws Exception {
try (InputStream in = clazz.getResourceAsStream(name)) {
if (in == null) {
throw new FileNotFoundException("clazz=" + clazz + ", name=" + name);
}
try (BufferedReader reader = new BufferedReader(new InputStreamReader(in, CharsetUtil.US_ASCII))) {
return parseCertificates(reader);
}
}
}
private static X509Certificate[] parseCertificates(Reader reader) throws Exception {
JcaX509CertificateConverter converter = new JcaX509CertificateConverter()
.setProvider(new BouncyCastleProvider());
List<X509Certificate> dst = new ArrayList<X509Certificate>();
PEMParser parser = new PEMParser(reader);
try {
X509CertificateHolder holder = null;
while ((holder = (X509CertificateHolder) parser.readObject()) != null) {
X509Certificate certificate = converter.getCertificate(holder);
if (certificate == null) {
continue;
}
dst.add(certificate);
}
} finally {
parser.close();
}
return dst.toArray(EmptyArrays.EMPTY_X509_CERTIFICATES);
}
}
|
OcspServerExample
|
java
|
apache__camel
|
components/camel-ai/camel-langchain4j-tools/src/test/java/org/apache/camel/component/langchain4j/tools/ToolsHelper.java
|
{
"start": 1082,
"end": 1943
}
|
class ____ {
private ToolsHelper() {
}
public static ChatModel createModel(OllamaService service) {
return OpenAiChatModel.builder()
.apiKey(service.apiKey())
.modelName(service.modelName())
.baseUrl(service.baseUrlV1())
.temperature(0.0)
.timeout(ofSeconds(60))
.logRequests(true)
.logResponses(true)
.build();
}
public static ChatModel createModel(String baseUrl) {
return OpenAiChatModel.builder()
.apiKey("NOT_NEEDED")
.modelName("MOCK")
.baseUrl(baseUrl)
.temperature(0.0)
.timeout(ofSeconds(60))
.logRequests(true)
.logResponses(true)
.build();
}
}
|
ToolsHelper
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/metrics/Sensor.java
|
{
"start": 2765,
"end": 15278
}
|
enum ____ {
INFO(0, "INFO"), DEBUG(1, "DEBUG"), TRACE(2, "TRACE");
private static final RecordingLevel[] ID_TO_TYPE;
private static final int MIN_RECORDING_LEVEL_KEY = 0;
public static final int MAX_RECORDING_LEVEL_KEY;
static {
int maxRL = -1;
for (RecordingLevel level : RecordingLevel.values()) {
maxRL = Math.max(maxRL, level.id);
}
RecordingLevel[] idToName = new RecordingLevel[maxRL + 1];
for (RecordingLevel level : RecordingLevel.values()) {
idToName[level.id] = level;
}
ID_TO_TYPE = idToName;
MAX_RECORDING_LEVEL_KEY = maxRL;
}
/** an english description of the api--this is for debugging and can change */
public final String name;
/** the permanent and immutable id of an API--this can't change ever */
public final short id;
RecordingLevel(int id, String name) {
this.id = (short) id;
this.name = name;
}
public static RecordingLevel forId(int id) {
if (id < MIN_RECORDING_LEVEL_KEY || id > MAX_RECORDING_LEVEL_KEY)
throw new IllegalArgumentException(String.format("Unexpected RecordLevel id `%d`, it should be between `%d` " +
"and `%d` (inclusive)", id, MIN_RECORDING_LEVEL_KEY, MAX_RECORDING_LEVEL_KEY));
return ID_TO_TYPE[id];
}
/** Case insensitive lookup by protocol name */
public static RecordingLevel forName(String name) {
return RecordingLevel.valueOf(name.toUpperCase(Locale.ROOT));
}
public boolean shouldRecord(final int configId) {
if (configId == INFO.id) {
return this.id == INFO.id;
} else if (configId == DEBUG.id) {
return this.id == INFO.id || this.id == DEBUG.id;
} else if (configId == TRACE.id) {
return true;
} else {
throw new IllegalStateException("Did not recognize recording level " + configId);
}
}
}
private final RecordingLevel recordingLevel;
Sensor(Metrics registry, String name, Sensor[] parents, MetricConfig config, Time time,
long inactiveSensorExpirationTimeSeconds, RecordingLevel recordingLevel) {
super();
this.registry = registry;
this.name = Objects.requireNonNull(name);
this.parents = parents == null ? new Sensor[0] : parents;
this.metrics = new LinkedHashMap<>();
this.stats = new ArrayList<>();
this.config = config;
this.time = time;
this.inactiveSensorExpirationTimeMs = TimeUnit.MILLISECONDS.convert(inactiveSensorExpirationTimeSeconds, TimeUnit.SECONDS);
this.lastRecordTime = time.milliseconds();
this.recordingLevel = recordingLevel;
this.metricLock = new Object();
checkForest(new HashSet<>());
}
/* Validate that this sensor doesn't end up referencing itself */
private void checkForest(Set<Sensor> sensors) {
if (!sensors.add(this))
throw new IllegalArgumentException("Circular dependency in sensors: " + name() + " is its own parent.");
for (Sensor parent : parents)
parent.checkForest(sensors);
}
/**
* The name this sensor is registered with. This name will be unique among all registered sensors.
*/
public String name() {
return this.name;
}
List<Sensor> parents() {
return unmodifiableList(asList(parents));
}
/**
* @return true if the sensor's record level indicates that the metric will be recorded, false otherwise
*/
public boolean shouldRecord() {
return this.recordingLevel.shouldRecord(config.recordLevel().id);
}
/**
* Record an occurrence, this is just short-hand for {@link #record(double) record(1.0)}
*/
public void record() {
if (shouldRecord()) {
recordInternal(1.0d, time.milliseconds(), true);
}
}
/**
* Record a value with this sensor
* @param value The value to record
* @throws QuotaViolationException if recording this value moves a metric beyond its configured maximum or minimum
* bound
*/
public void record(double value) {
if (shouldRecord()) {
recordInternal(value, time.milliseconds(), true);
}
}
/**
* Record a value at a known time. This method is slightly faster than {@link #record(double)} since it will reuse
* the time stamp.
* @param value The value we are recording
* @param timeMs The current POSIX time in milliseconds
* @throws QuotaViolationException if recording this value moves a metric beyond its configured maximum or minimum
* bound
*/
public void record(double value, long timeMs) {
if (shouldRecord()) {
recordInternal(value, timeMs, true);
}
}
/**
* Record a value at a known time. This method is slightly faster than {@link #record(double)} since it will reuse
* the time stamp.
* @param value The value we are recording
* @param timeMs The current POSIX time in milliseconds
* @param checkQuotas Indicate if quota must be enforced or not
* @throws QuotaViolationException if recording this value moves a metric beyond its configured maximum or minimum
* bound
*/
public void record(double value, long timeMs, boolean checkQuotas) {
if (shouldRecord()) {
recordInternal(value, timeMs, checkQuotas);
}
}
private void recordInternal(double value, long timeMs, boolean checkQuotas) {
this.lastRecordTime = timeMs;
synchronized (this) {
synchronized (metricLock()) {
// increment all the stats
for (StatAndConfig statAndConfig : this.stats) {
statAndConfig.stat.record(statAndConfig.config(), value, timeMs);
}
}
if (checkQuotas)
checkQuotas(timeMs);
}
for (Sensor parent : parents)
parent.record(value, timeMs, checkQuotas);
}
/**
* Check if we have violated our quota for any metric that has a configured quota
*/
public void checkQuotas() {
checkQuotas(time.milliseconds());
}
public void checkQuotas(long timeMs) {
for (KafkaMetric metric : this.metrics.values()) {
MetricConfig config = metric.config();
if (config != null) {
Quota quota = config.quota();
if (quota != null) {
double value = metric.measurableValue(timeMs);
if (metric.measurable() instanceof TokenBucket) {
if (value < 0) {
throw new QuotaViolationException(metric, value, quota.bound());
}
} else {
if (!quota.acceptable(value)) {
throw new QuotaViolationException(metric, value, quota.bound());
}
}
}
}
}
}
/**
* Register a compound statistic with this sensor with no config override
* @param stat The stat to register
* @return true if stat is added to sensor, false if sensor is expired
*/
public boolean add(CompoundStat stat) {
return add(stat, null);
}
/**
* Register a compound statistic with this sensor which yields multiple measurable quantities (like a histogram)
* @param stat The stat to register
* @param config The configuration for this stat. If null then the stat will use the default configuration for this
* sensor.
* @return true if stat is added to sensor, false if sensor is expired
*/
public synchronized boolean add(CompoundStat stat, MetricConfig config) {
if (hasExpired())
return false;
final MetricConfig statConfig = config == null ? this.config : config;
stats.add(new StatAndConfig(Objects.requireNonNull(stat), () -> statConfig));
Object lock = metricLock();
for (NamedMeasurable m : stat.stats()) {
final KafkaMetric metric = new KafkaMetric(lock, m.name(), m.stat(), statConfig, time);
if (!metrics.containsKey(metric.metricName())) {
KafkaMetric existingMetric = registry.registerMetric(metric);
if (existingMetric != null) {
throw new IllegalArgumentException("A metric named '" + metric.metricName() + "' already exists, can't register another one.");
}
metrics.put(metric.metricName(), metric);
}
}
return true;
}
/**
* Register a metric with this sensor
* @param metricName The name of the metric
* @param stat The statistic to keep
* @return true if metric is added to sensor, false if sensor is expired
*/
public boolean add(MetricName metricName, MeasurableStat stat) {
return add(metricName, stat, null);
}
/**
* Register a metric with this sensor
*
* @param metricName The name of the metric
* @param stat The statistic to keep
* @param config A special configuration for this metric. If null use the sensor default configuration.
* @return true if metric is added to sensor, false if sensor is expired
*/
public synchronized boolean add(final MetricName metricName, final MeasurableStat stat, final MetricConfig config) {
if (hasExpired()) {
return false;
} else if (metrics.containsKey(metricName)) {
return true;
} else {
final MetricConfig statConfig = config == null ? this.config : config;
final KafkaMetric metric = new KafkaMetric(
metricLock(),
Objects.requireNonNull(metricName),
Objects.requireNonNull(stat),
statConfig,
time
);
KafkaMetric existingMetric = registry.registerMetric(metric);
if (existingMetric != null) {
throw new IllegalArgumentException("A metric named '" + metricName + "' already exists, can't register another one.");
}
metrics.put(metric.metricName(), metric);
stats.add(new StatAndConfig(Objects.requireNonNull(stat), metric::config));
return true;
}
}
/**
* Return if metrics were registered with this sensor.
*
* @return true if metrics were registered, false otherwise
*/
public synchronized boolean hasMetrics() {
return !metrics.isEmpty();
}
/**
* Return true if the Sensor is eligible for removal due to inactivity.
* false otherwise
*/
public boolean hasExpired() {
return (time.milliseconds() - this.lastRecordTime) > this.inactiveSensorExpirationTimeMs;
}
synchronized List<KafkaMetric> metrics() {
return List.copyOf(this.metrics.values());
}
/**
* KafkaMetrics of sensors which use SampledStat should be synchronized on the same lock
* for sensor record and metric value read to allow concurrent reads and updates. For simplicity,
* all sensors are synchronized on this object.
* <p>
* Sensor object is not used as a lock for reading metric value since metrics reporter is
* invoked while holding Sensor and Metrics locks to report addition and removal of metrics
* and synchronized reporters may deadlock if Sensor lock is used for reading metrics values.
* Note that Sensor object itself is used as a lock to protect the access to stats and metrics
* while recording metric values, adding and deleting sensors.
* </p><p>
* Locking order (assume all MetricsReporter methods may be synchronized):
* <ul>
* <li>Sensor#add: Sensor -> Metrics -> MetricsReporter</li>
* <li>Metrics#removeSensor: Sensor -> Metrics -> MetricsReporter</li>
* <li>KafkaMetric#metricValue: MetricsReporter -> Sensor#metricLock</li>
* <li>Sensor#record: Sensor -> Sensor#metricLock</li>
* </ul>
* </p>
*/
private Object metricLock() {
return metricLock;
}
}
|
RecordingLevel
|
java
|
spring-projects__spring-security
|
rsocket/src/main/java/org/springframework/security/rsocket/api/PayloadExchangeType.java
|
{
"start": 766,
"end": 2182
}
|
enum ____ {
/**
* The <a href="https://rsocket.io/docs/Protocol#setup-frame-0x01">Setup</a>. Can be
* used to determine if a Payload is part of the connection
*/
SETUP(false),
/**
* A <a href="https://rsocket.io/docs/Protocol#frame-fnf">Fire and Forget</a>
* exchange.
*/
FIRE_AND_FORGET(true),
/**
* A <a href="https://rsocket.io/docs/Protocol#frame-request-response">Request
* Response</a> exchange.
*/
REQUEST_RESPONSE(true),
/**
* A <a href="https://rsocket.io/docs/Protocol#request-stream-frame">Request
* Stream</a> exchange. This is only represents the request portion. The
* {@link #PAYLOAD} type represents the data that submitted.
*/
REQUEST_STREAM(true),
/**
* A <a href="https://rsocket.io/docs/Protocol#request-channel-frame">Request
* Channel</a> exchange.
*/
REQUEST_CHANNEL(true),
/**
* A <a href="https://rsocket.io/docs/Protocol#payload-frame">Payload</a> exchange.
*/
PAYLOAD(false),
/**
* A <a href="https://rsocket.io/docs/Protocol#frame-metadata-push">Metadata Push</a>
* exchange.
*/
METADATA_PUSH(true);
private final boolean isRequest;
PayloadExchangeType(boolean isRequest) {
this.isRequest = isRequest;
}
/**
* Determines if this exchange is a type of request (i.e. the initial frame).
* @return true if it is a request, else false
*/
public boolean isRequest() {
return this.isRequest;
}
}
|
PayloadExchangeType
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jaxb/deployment/src/test/java/io/quarkus/resteasy/reactive/jaxb/deployment/test/MultipartTest.java
|
{
"start": 899,
"end": 4363
}
|
class ____ {
private static final String EXPECTED_CONTENT_DISPOSITION_PART = "Content-Disposition: form-data; name=\"%s\"";
private static final String EXPECTED_CONTENT_TYPE_PART = "Content-Type: %s";
private static final String EXPECTED_RESPONSE_NAME = "a name";
private static final String EXPECTED_RESPONSE_PERSON_NAME = "Michal";
private static final int EXPECTED_RESPONSE_PERSON_AGE = 23;
private static final String EXPECTED_RESPONSE_PERSON = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>"
+ "<person>"
+ "<age>" + EXPECTED_RESPONSE_PERSON_AGE + "</age>"
+ "<name>" + EXPECTED_RESPONSE_PERSON_NAME + "</name>"
+ "</person>";
private static final String SCHOOL = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>"
+ "<school>"
+ "<name>Divino Pastor</name>"
+ "</school>";
private final File HTML_FILE = new File("./src/test/resources/test.html");
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.addClasses(MultipartOutputResource.class, MultipartOutputResponse.class, Person.class));
@Test
public void testOutput() {
String response = RestAssured.get("/multipart/output")
.then()
.contentType(ContentType.MULTIPART)
.statusCode(200)
.extract().asString();
assertContains(response, "name", MediaType.TEXT_PLAIN, EXPECTED_RESPONSE_NAME);
assertContains(response, "person", MediaType.TEXT_XML, EXPECTED_RESPONSE_PERSON);
}
@Test
public void testInput() {
String response = RestAssured
.given()
.multiPart("name", "John")
.multiPart("school", SCHOOL, MediaType.APPLICATION_XML)
.post("/multipart/input")
.then()
.statusCode(200)
.extract().asString();
assertThat(response).isEqualTo("John-Divino Pastor");
}
@Test
public void testInputParam() {
String response = RestAssured
.given()
.multiPart("name", "John")
.multiPart("school", SCHOOL, MediaType.APPLICATION_XML)
.post("/multipart/param/input")
.then()
.statusCode(200)
.extract().asString();
assertThat(response).isEqualTo("John-Divino Pastor");
}
@Test
public void testInputFile() throws IOException {
String response = RestAssured
.given()
.multiPart("file", HTML_FILE, "text/html")
.post("/multipart/input/file")
.then()
.statusCode(200)
.extract().asString();
assertThat(response).isEqualTo(String.valueOf(Files.readAllBytes(HTML_FILE.toPath()).length));
}
private void assertContains(String response, String name, String contentType, Object value) {
String[] lines = response.split("--");
assertThat(lines).anyMatch(line -> line.contains(String.format(EXPECTED_CONTENT_DISPOSITION_PART, name))
&& line.contains(String.format(EXPECTED_CONTENT_TYPE_PART, contentType))
&& line.contains(value.toString()));
}
@Path("/multipart")
private static
|
MultipartTest
|
java
|
apache__flink
|
flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionCapacitySchedulerITCase.java
|
{
"start": 4113,
"end": 26245
}
|
class ____ extends YarnTestBase {
private static final Logger LOG =
LoggerFactory.getLogger(YARNSessionCapacitySchedulerITCase.class);
private static final ApplicationId TEST_YARN_APPLICATION_ID =
ApplicationId.newInstance(System.currentTimeMillis(), 42);
/** RestClient to query Flink cluster. */
private static RestClient restClient;
/**
* ExecutorService for {@link RestClient}.
*
* @see #restClient
*/
private static ExecutorService restClientExecutor;
/** Toggles checking for prohibited strings in logs after the test has run. */
private boolean checkForProhibitedLogContents = true;
@RegisterExtension
private final LoggerAuditingExtension cliLoggerAuditingExtension =
new LoggerAuditingExtension(CliFrontend.class, Level.INFO);
@RegisterExtension
private final LoggerAuditingExtension yarLoggerAuditingExtension =
new LoggerAuditingExtension(YarnClusterDescriptor.class, Level.WARN);
@BeforeAll
static void setup() throws Exception {
YARN_CONFIGURATION.setClass(
YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class, ResourceScheduler.class);
YARN_CONFIGURATION.set("yarn.scheduler.capacity.root.queues", "default,qa-team");
YARN_CONFIGURATION.setInt("yarn.scheduler.capacity.root.default.capacity", 40);
YARN_CONFIGURATION.setInt("yarn.scheduler.capacity.root.qa-team.capacity", 60);
YARN_CONFIGURATION.set(
YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-capacityscheduler");
startYARNWithConfig(YARN_CONFIGURATION);
restClientExecutor = Executors.newSingleThreadExecutor();
restClient = new RestClient(new Configuration(), restClientExecutor);
}
@AfterAll
static void teardown() throws Exception {
try {
YarnTestBase.teardown();
} finally {
if (restClient != null) {
restClient.shutdown(Duration.ofSeconds(5));
}
if (restClientExecutor != null) {
restClientExecutor.shutdownNow();
}
}
}
/**
* Tests that a session cluster, that uses the resources from the <i>qa-team</i> queue, can be
* started from the command line.
*/
@Test
void testStartYarnSessionClusterInQaTeamQueue() throws Exception {
runTest(
() ->
runWithArgs(
new String[] {
"-j",
flinkUberjar.getAbsolutePath(),
"-t",
flinkLibFolder.getAbsolutePath(),
"-jm",
"768m",
"-tm",
"1024m",
"-qu",
"qa-team"
},
"JobManager Web Interface:",
null,
RunTypes.YARN_SESSION,
0));
}
/**
* Starts a session cluster on YARN, and submits a streaming job.
*
* <p>Tests
*
* <ul>
* <li>if a custom YARN application name can be set from the command line,
* <li>if the number of TaskManager slots can be set from the command line,
* <li>if dynamic properties from the command line are set,
* <li>if the vcores are set correctly (FLINK-2213),
* <li>if jobmanager hostname/port are shown in web interface (FLINK-1902)
* </ul>
*
* <p><b>Hint: </b> If you think it is a good idea to add more assertions to this test, think
* again!
*/
@Test
void
testVCoresAreSetCorrectlyAndJobManagerHostnameAreShownInWebInterfaceAndDynamicPropertiesAndYarnApplicationNameAndTaskManagerSlots()
throws Exception {
runTest(
() -> {
checkForProhibitedLogContents = false;
final Runner yarnSessionClusterRunner =
startWithArgs(
new String[] {
"-j",
flinkUberjar.getAbsolutePath(),
"-t",
flinkLibFolder.getAbsolutePath(),
"-jm",
"768m",
"-tm",
"1024m",
"-s",
"3", // set the slots 3 to check if the vCores are set
// properly!
"-nm",
"customName",
"-Dfancy-configuration-value=veryFancy",
"-D" + YarnConfigOptions.VCORES.key() + "=2"
},
"JobManager Web Interface:",
RunTypes.YARN_SESSION);
try {
final String logs = outContent.toString();
final HostAndPort hostAndPort = parseJobManagerHostname(logs);
final String host = hostAndPort.getHost();
final int port = hostAndPort.getPort();
LOG.info("Extracted hostname:port: {}:{}", host, port);
submitJob("WindowJoin.jar");
//
// Assert that custom YARN application name "customName" is set
//
final ApplicationReport applicationReport = getOnlyApplicationReport();
assertThat(applicationReport.getName()).isEqualTo("customName");
//
// Assert the number of TaskManager slots are set
//
waitForTaskManagerRegistration(host, port);
assertNumberOfSlotsPerTask(host, port, 3);
final Map<String, String> flinkConfig = getFlinkConfig(host, port);
//
// Assert dynamic properties
//
assertThat(flinkConfig)
.containsEntry("fancy-configuration-value", "veryFancy")
//
// FLINK-2213: assert that vcores are set
//
.containsEntry(YarnConfigOptions.VCORES.key(), "2")
//
// FLINK-1902: check if jobmanager hostname is shown in web
// interface
//
.containsEntry(JobManagerOptions.ADDRESS.key(), host);
} finally {
yarnSessionClusterRunner.sendStop();
yarnSessionClusterRunner.join();
}
});
}
private static HostAndPort parseJobManagerHostname(final String logs) {
final Pattern p =
Pattern.compile("JobManager Web Interface: http://([a-zA-Z0-9.-]+):([0-9]+)");
final Matcher matches = p.matcher(logs);
String hostname = null;
String port = null;
while (matches.find()) {
hostname = matches.group(1).toLowerCase();
port = matches.group(2);
}
checkState(hostname != null, "hostname not found in log");
checkState(port != null, "port not found in log");
return HostAndPort.fromParts(hostname, Integer.parseInt(port));
}
private void submitJob(final String jobFileName) throws IOException, InterruptedException {
Runner jobRunner =
startWithArgs(
new String[] {
"run", "--detached", getTestJarPath(jobFileName).getAbsolutePath()
},
"Job has been submitted with JobID",
RunTypes.CLI_FRONTEND);
jobRunner.join();
}
private static void waitForTaskManagerRegistration(final String host, final int port)
throws Exception {
CommonTestUtils.waitUntilCondition(() -> getNumberOfTaskManagers(host, port) > 0);
}
private static void assertNumberOfSlotsPerTask(
final String host, final int port, final int slotsNumber) throws Exception {
try {
CommonTestUtils.waitUntilCondition(
() -> getNumberOfSlotsPerTaskManager(host, port) == slotsNumber);
} catch (final TimeoutException e) {
final int currentNumberOfSlots = getNumberOfSlotsPerTaskManager(host, port);
fail(
String.format(
"Expected slots per TM to be %d, was: %d",
slotsNumber, currentNumberOfSlots));
}
}
private static int getNumberOfTaskManagers(final String host, final int port) throws Exception {
final ClusterOverviewWithVersion clusterOverviewWithVersion =
restClient
.sendRequest(host, port, ClusterOverviewHeaders.getInstance())
.get(30_000, TimeUnit.MILLISECONDS);
return clusterOverviewWithVersion.getNumTaskManagersConnected();
}
private static int getNumberOfSlotsPerTaskManager(final String host, final int port)
throws Exception {
final TaskManagersInfo taskManagersInfo =
restClient.sendRequest(host, port, TaskManagersHeaders.getInstance()).get();
return taskManagersInfo.getTaskManagerInfos().stream()
.map(TaskManagerInfo::getNumberSlots)
.findFirst()
.orElse(0);
}
private static Map<String, String> getFlinkConfig(final String host, final int port)
throws Exception {
final ConfigurationInfo configurationInfoEntries =
restClient
.sendRequest(host, port, ClusterConfigurationInfoHeaders.getInstance())
.get();
return configurationInfoEntries.stream()
.collect(
Collectors.toMap(
ConfigurationInfoEntry::getKey, ConfigurationInfoEntry::getValue));
}
/**
* Test deployment to non-existing queue & ensure that the system logs a WARN message for the
* user. (Users had unexpected behavior of Flink on YARN because they mistyped the target queue.
* With an error message, we can help users identifying the issue)
*/
@Test
void testNonexistingQueueWARNmessage() throws Exception {
runTest(
() -> {
LOG.info("Starting testNonexistingQueueWARNmessage()");
assertThatThrownBy(
() ->
runWithArgs(
new String[] {
"-j",
flinkUberjar.getAbsolutePath(),
"-t",
flinkLibFolder.getAbsolutePath(),
"-jm",
"768m",
"-tm",
"1024m",
"-qu",
"doesntExist"
},
"to unknown queue: doesntExist",
null,
RunTypes.YARN_SESSION,
1))
.isInstanceOf(Exception.class)
.satisfies(anyCauseMatches("to unknown queue: doesntExist"));
assertThat(yarLoggerAuditingExtension.getMessages())
.anySatisfy(
s ->
assertThat(s)
.contains(
"The specified queue 'doesntExist' does not exist. Available queues"));
LOG.info("Finished testNonexistingQueueWARNmessage()");
});
}
private void testDetachedPerJobYarnClusterInternal(File tempDir, String job) throws Exception {
YarnClient yc = YarnClient.createYarnClient();
yc.init(YARN_CONFIGURATION);
yc.start();
// get temporary file for reading input data for wordcount example
File tmpInFile = tempDir.toPath().resolve(UUID.randomUUID().toString()).toFile();
tmpInFile.createNewFile();
try {
FileUtils.writeStringToFile(tmpInFile, WordCountData.TEXT, Charset.defaultCharset());
} catch (IOException e) {
throw new RuntimeException(e);
}
Runner runner =
startWithArgs(
new String[] {
"run",
"-m",
"yarn-cluster",
"-yj",
flinkUberjar.getAbsolutePath(),
"-yt",
flinkLibFolder.getAbsolutePath(),
"-yjm",
"768m",
"-yD",
YarnConfigOptions.APPLICATION_TAGS.key() + "=test-tag",
"-ytm",
"1024m",
"-ys",
"2", // test requesting slots from YARN.
"-p",
"2",
"--detached",
job,
"--input",
tmpInFile.getAbsoluteFile().toString(),
"--output",
tempDir.getAbsoluteFile().toString()
},
"Job has been submitted with JobID",
RunTypes.CLI_FRONTEND);
// it should usually be 2, but on slow machines, the number varies
assertThat(getRunningContainers()).isLessThanOrEqualTo(2);
// give the runner some time to detach
for (int attempt = 0; runner.isAlive() && attempt < 5; attempt++) {
try {
Thread.sleep(500);
} catch (InterruptedException ignored) {
}
}
assertThat(runner.isAlive()).isFalse();
LOG.info("CLI Frontend has returned, so the job is running");
// find out the application id and wait until it has finished.
try {
List<ApplicationReport> apps =
getApplicationReportWithRetryOnNPE(
yc, EnumSet.of(YarnApplicationState.RUNNING));
ApplicationId tmpAppId;
if (apps.size() == 1) {
// Better method to find the right appId. But sometimes the app is shutting down
// very fast
// Only one running
tmpAppId = apps.get(0).getApplicationId();
LOG.info("waiting for the job with appId {} to finish", tmpAppId);
// wait until the app has finished
while (getApplicationReportWithRetryOnNPE(
yc, EnumSet.of(YarnApplicationState.RUNNING))
.size()
> 0) {
sleep(500);
}
} else {
// get appId by finding the latest finished appid
apps = getApplicationReportWithRetryOnNPE(yc);
Collections.sort(
apps,
(o1, o2) -> o1.getApplicationId().compareTo(o2.getApplicationId()) * -1);
tmpAppId = apps.get(0).getApplicationId();
LOG.info(
"Selected {} as the last appId from {}",
tmpAppId,
Arrays.toString(apps.toArray()));
}
final ApplicationId id = tmpAppId;
// now it has finished.
// check the output files.
File[] listOfOutputFiles = tempDir.listFiles();
assertThat(listOfOutputFiles).isNotNull();
LOG.info("The job has finished. TaskManager output files found in {}", tempDir);
// read all output files in output folder to one output string
StringBuilder content = new StringBuilder();
for (File f : listOfOutputFiles) {
if (f.isFile()) {
content.append(FileUtils.readFileToString(f, Charset.defaultCharset()))
.append("\n");
}
}
// check if the heap size for the TaskManager was set correctly
File jobmanagerLog =
TestUtils.findFile(
"..",
(dir, name) ->
name.contains("jobmanager.log")
&& dir.getAbsolutePath().contains(id.toString()));
assertThat(jobmanagerLog).isNotNull();
content =
new StringBuilder(
FileUtils.readFileToString(jobmanagerLog, Charset.defaultCharset()));
assertThat(content.toString())
.contains("Starting TaskManagers")
.contains(" (2/2) (attempt #0) with attempt id ");
// make sure the detached app is really finished.
LOG.info("Checking again that app has finished");
ApplicationReport rep;
do {
sleep(500);
rep = yc.getApplicationReport(id);
LOG.info("Got report {}", rep);
} while (rep.getYarnApplicationState() == YarnApplicationState.RUNNING);
verifyApplicationTags(rep);
} finally {
// cleanup the yarn-properties file
String confDirPath = System.getenv("FLINK_CONF_DIR");
File configDirectory = new File(confDirPath);
LOG.info(
"testDetachedPerJobYarnClusterInternal: Using configuration directory "
+ configDirectory.getAbsolutePath());
// load the configuration
LOG.info("testDetachedPerJobYarnClusterInternal: Trying to load configuration file");
Configuration configuration =
GlobalConfiguration.loadConfiguration(configDirectory.getAbsolutePath());
try {
File yarnPropertiesFile =
FlinkYarnSessionCli.getYarnPropertiesLocation(
configuration.getValue(YarnConfigOptions.PROPERTIES_FILE_LOCATION));
if (yarnPropertiesFile.exists()) {
LOG.info(
"testDetachedPerJobYarnClusterInternal: Cleaning up temporary Yarn address reference: {}",
yarnPropertiesFile.getAbsolutePath());
yarnPropertiesFile.delete();
}
} catch (Exception e) {
LOG.warn(
"testDetachedPerJobYarnClusterInternal: Exception while deleting the JobManager address file",
e);
}
try {
LOG.info("testDetachedPerJobYarnClusterInternal: Closing the yarn client");
yc.stop();
} catch (Exception e) {
LOG.warn(
"testDetachedPerJobYarnClusterInternal: Exception while close the yarn client",
e);
}
}
}
/**
* Ensures that the YARN application tags were set properly.
*
* <p>Since YARN application tags were only added in Hadoop 2.4, but Flink still supports Hadoop
* 2.3, reflection is required to invoke the methods. If the method does not exist, this test
* passes.
*/
private void verifyApplicationTags(final ApplicationReport report)
throws InvocationTargetException, IllegalAccessException {
final Method applicationTagsMethod;
Class<ApplicationReport> clazz = ApplicationReport.class;
try {
// this method is only supported by Hadoop 2.4.0 onwards
applicationTagsMethod = clazz.getMethod("getApplicationTags");
} catch (NoSuchMethodException e) {
// only verify the tags if the method exists
return;
}
@SuppressWarnings("unchecked")
Set<String> applicationTags = (Set<String>) applicationTagsMethod.invoke(report);
assertThat(applicationTags).containsOnly("test-tag");
}
@AfterEach
void checkForProhibitedLogContents() {
if (checkForProhibitedLogContents) {
ensureNoProhibitedStringInLogFiles(PROHIBITED_STRINGS, WHITELISTED_STRINGS);
}
}
}
|
YARNSessionCapacitySchedulerITCase
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/processor/LogAndSkipOnInvalidTimestamp.java
|
{
"start": 2343,
"end": 3413
}
|
class ____ extends ExtractRecordMetadataTimestamp {
private static final Logger log = LoggerFactory.getLogger(LogAndSkipOnInvalidTimestamp.class);
/**
* Writes a log WARN message when the extracted timestamp is invalid (negative) but returns the invalid timestamp as-is,
* which ultimately causes the record to be skipped and not to be processed.
*
* @param record a data record
* @param recordTimestamp the timestamp extractor from the record
* @param partitionTime the highest extracted valid timestamp of the current record's partition˙ (could be -1 if unknown)
* @return the originally extracted timestamp of the record
*/
@Override
public long onInvalidTimestamp(final ConsumerRecord<Object, Object> record,
final long recordTimestamp,
final long partitionTime) {
log.warn("Input record {} will be dropped because it has an invalid (negative) timestamp.", record);
return recordTimestamp;
}
}
|
LogAndSkipOnInvalidTimestamp
|
java
|
mockito__mockito
|
mockito-core/src/main/java/org/mockito/internal/runners/util/FailureDetector.java
|
{
"start": 356,
"end": 642
}
|
class ____ extends RunListener {
private boolean failed;
@Override
public void testFailure(Failure failure) throws Exception {
super.testFailure(failure);
failed = true;
}
public boolean isSuccessful() {
return !failed;
}
}
|
FailureDetector
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/RedisDataSource.java
|
{
"start": 27085,
"end": 42559
}
|
class ____ the values included in the message exchanged on the streams
* @param <K> the type of the redis key
* @param <F> the type of the fields (map's keys)
* @param <V> the type of the value
* @return the object to execute commands manipulating streams.
*/
<K, F, V> StreamCommands<K, F, V> stream(TypeReference<K> redisKeyType, TypeReference<F> fieldType,
TypeReference<V> valueType);
/**
* Gets the object to execute commands manipulating streams, using a string key, and string fields.
*
* @param <V> the type of the value
* @return the object to execute commands manipulating streams.
*/
default <V> StreamCommands<String, String, V> stream(Class<V> typeOfValue) {
return stream(String.class, String.class, typeOfValue);
}
/**
* Gets the object to execute commands manipulating streams, using a string key, and string fields.
*
* @param <V> the type of the value
* @return the object to execute commands manipulating streams.
*/
default <V> StreamCommands<String, String, V> stream(TypeReference<V> typeOfValue) {
return stream(STRING_TYPE_REFERENCE, STRING_TYPE_REFERENCE, typeOfValue);
}
/**
* Gets the object to manipulate JSON values.
* This group requires the <a href="https://redis.io/docs/stack/json/">RedisJSON module</a>.
*
* @return the object to manipulate JSON values.
*/
default JsonCommands<String> json() {
return json(String.class);
}
/**
* Gets the object to manipulate JSON values.
* This group requires the <a href="https://redis.io/docs/stack/json/">RedisJSON module</a>.
*
* @param <K> the type of keys
* @return the object to manipulate JSON values.
*/
<K> JsonCommands<K> json(Class<K> redisKeyType);
/**
* Gets the object to manipulate JSON values.
* This group requires the <a href="https://redis.io/docs/stack/json/">RedisJSON module</a>.
*
* @param <K> the type of keys
* @return the object to manipulate JSON values.
*/
<K> JsonCommands<K> json(TypeReference<K> redisKeyType);
/**
* Gets the object to manipulate Bloom filters.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a>.
*
* @param <V> the type of the values added into the Bloom filter
* @return the object to manipulate bloom filters.
*/
default <V> BloomCommands<String, V> bloom(Class<V> valueType) {
return bloom(String.class, valueType);
}
/**
* Gets the object to manipulate Bloom filters.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a>.
*
* @param <V> the type of the values added into the Bloom filter
* @return the object to manipulate bloom filters.
*/
default <V> BloomCommands<String, V> bloom(TypeReference<V> valueType) {
return bloom(STRING_TYPE_REFERENCE, valueType);
}
/**
* Gets the object to manipulate Bloom filters.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a>.
*
* @param <K> the type of keys
* @param <V> the type of the values added into the Bloom filter
* @return the object to manipulate bloom filters.
*/
<K, V> BloomCommands<K, V> bloom(Class<K> redisKeyType, Class<V> valueType);
/**
* Gets the object to manipulate Bloom filters.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a>.
*
* @param <K> the type of keys
* @param <V> the type of the values added into the Bloom filter
* @return the object to manipulate bloom filters.
*/
<K, V> BloomCommands<K, V> bloom(TypeReference<K> redisKeyType, TypeReference<V> valueType);
/**
* Gets the object to manipulate Cuckoo filters.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the Cuckoo
* filter support).
*
* @param <V> the type of the values added into the Cuckoo filter
* @return the object to manipulate Cuckoo filters.
*/
default <V> CuckooCommands<String, V> cuckoo(Class<V> valueType) {
return cuckoo(String.class, valueType);
}
/**
* Gets the object to manipulate Cuckoo filters.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the Cuckoo
* filter support).
*
* @param <V> the type of the values added into the Cuckoo filter
* @return the object to manipulate Cuckoo filters.
*/
default <V> CuckooCommands<String, V> cuckoo(TypeReference<V> valueType) {
return cuckoo(STRING_TYPE_REFERENCE, valueType);
}
/**
* Gets the object to manipulate Cuckoo filters.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the Cuckoo
* filter support).
*
* @param <K> the type of keys
* @param <V> the type of the values added into the Cuckoo filter
* @return the object to manipulate Cuckoo filters.
*/
<K, V> CuckooCommands<K, V> cuckoo(Class<K> redisKeyType, Class<V> valueType);
/**
* Gets the object to manipulate Cuckoo filters.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the Cuckoo
* filter support).
*
* @param <K> the type of keys
* @param <V> the type of the values added into the Cuckoo filter
* @return the object to manipulate Cuckoo filters.
*/
<K, V> CuckooCommands<K, V> cuckoo(TypeReference<K> redisKeyType, TypeReference<V> valueType);
/**
* Gets the object to manipulate Count-Min sketches.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the count-min
* sketches support).
*
* @param <V> the type of the values added into the count-min sketches
* @return the object to manipulate count-min sketches.
*/
default <V> CountMinCommands<String, V> countmin(Class<V> valueType) {
return countmin(String.class, valueType);
}
/**
* Gets the object to manipulate Count-Min sketches.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the count-min
* sketches support).
*
* @param <V> the type of the values added into the count-min sketches
* @return the object to manipulate count-min sketches.
*/
default <V> CountMinCommands<String, V> countmin(TypeReference<V> valueType) {
return countmin(STRING_TYPE_REFERENCE, valueType);
}
/**
* Gets the object to manipulate Count-Min sketches.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the count-min
* sketches support).
*
* @param <K> the type of keys
* @param <V> the type of the values added into the count-min sketches
* @return the object to manipulate count-min sketches.
*/
<K, V> CountMinCommands<K, V> countmin(Class<K> redisKeyType, Class<V> valueType);
/**
* Gets the object to manipulate Count-Min sketches.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the count-min
* sketches support).
*
* @param <K> the type of keys
* @param <V> the type of the values added into the count-min sketches
* @return the object to manipulate count-min sketches.
*/
<K, V> CountMinCommands<K, V> countmin(TypeReference<K> redisKeyType, TypeReference<V> valueType);
/**
* Gets the object to manipulate Top-K list.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the top-k
* list support).
*
* @param <V> the type of the values added into the top-k lists
* @return the object to manipulate top-k lists.
*/
default <V> TopKCommands<String, V> topk(Class<V> valueType) {
return topk(String.class, valueType);
}
/**
* Gets the object to manipulate Top-K list.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the top-k
* list support).
*
* @param <V> the type of the values added into the top-k lists
* @return the object to manipulate top-k lists.
*/
default <V> TopKCommands<String, V> topk(TypeReference<V> valueType) {
return topk(STRING_TYPE_REFERENCE, valueType);
}
/**
* Gets the object to manipulate Top-K list.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the top-k
* list support).
*
* @param <K> the type of keys
* @param <V> the type of the values added into the top-k lists
* @return the object to manipulate top-k lists.
*/
<K, V> TopKCommands<K, V> topk(Class<K> redisKeyType, Class<V> valueType);
/**
* Gets the object to manipulate Top-K list.
* This group requires the <a href="https://redis.io/docs/stack/bloom/">RedisBloom module</a> (including the top-k
* list support).
*
* @param <K> the type of keys
* @param <V> the type of the values added into the top-k lists
* @return the object to manipulate top-k lists.
*/
<K, V> TopKCommands<K, V> topk(TypeReference<K> redisKeyType, TypeReference<V> valueType);
/**
* Gets the object to manipulate graphs.
* This group requires the <a href="https://redis.io/docs/stack/graph/">RedisGraph module</a>.
*
* @return the object to manipulate graphs.
*/
@Experimental("The Redis graph support is experimental, in addition, the graph module EOL")
default GraphCommands<String> graph() {
return graph(String.class);
}
/**
* Gets the object to manipulate graphs.
* This group requires the <a href="https://redis.io/docs/stack/graph/">RedisGraph module</a>.
*
* @param <K> the type of keys
* @return the object to manipulate graphs lists.
*/
@Experimental("The Redis graph support is experimental, in addition, the graph module EOL")
<K> GraphCommands<K> graph(Class<K> redisKeyType);
/**
* Gets the object to emit commands from the {@code search} group.
* This group requires the <a href="https://redis.io/docs/stack/search/">RedisSearch module</a>.
*
* @param <K> the type of keys
* @return the object to search documents
* @deprecated Use the variant without parameter, as the index name must be a string
*/
@Experimental("The Redis search support is experimental")
@Deprecated
<K> SearchCommands<K> search(Class<K> redisKeyType);
/**
* Gets the object to emit commands from the {@code search} group.
* This group requires the <a href="https://redis.io/docs/stack/search/">RedisSearch module</a>.
*
* @return the object to search documents
*/
@Experimental("The Redis Search support is experimental")
default SearchCommands<String> search() {
return search(String.class);
}
/**
* Gets the object to emit commands from the {@code auto-suggest} group.
* This group requires the <a href="https://redis.io/docs/stack/search/">RedisSearch module</a>.
*
* @param <K> the type of keys
* @return the object to get suggestions
*/
@Experimental("The Redis auto-suggest support is experimental")
<K> AutoSuggestCommands<K> autosuggest(Class<K> redisKeyType);
/**
* Gets the object to emit commands from the {@code auto-suggest} group.
* This group requires the <a href="https://redis.io/docs/stack/search/">RedisSearch module</a>.
*
* @param <K> the type of keys
* @return the object to get suggestions
*/
@Experimental("The Redis auto-suggest support is experimental")
<K> AutoSuggestCommands<K> autosuggest(TypeReference<K> redisKeyType);
/**
* Gets the object to emit commands from the {@code auto-suggest} group.
* This group requires the <a href="https://redis.io/docs/stack/search/">RedisSearch module</a>.
*
* @return the object to get suggestions
*/
@Experimental("The Redis auto-suggest support is experimental")
default AutoSuggestCommands<String> autosuggest() {
return autosuggest(String.class);
}
/**
* Gets the object to emit commands from the {@code time series} group.
* This group requires the <a href="https://redis.io/docs/stack/timeseries/">Redis Time Series module</a>.
*
* @param <K> the type of keys
* @return the object to manipulate time series
*/
@Experimental("The Redis time series support is experimental")
<K> TimeSeriesCommands<K> timeseries(Class<K> redisKeyType);
/**
* Gets the object to emit commands from the {@code time series} group.
* This group requires the <a href="https://redis.io/docs/stack/timeseries/">Redis Time Series module</a>.
*
* @param <K> the type of keys
* @return the object to manipulate time series
*/
@Experimental("The Redis time series support is experimental")
<K> TimeSeriesCommands<K> timeseries(TypeReference<K> redisKeyType);
/**
* Gets the object to emit commands from the {@code time series} group.
* This group requires the <a href="https://redis.io/docs/stack/timeseries/">Redis Time Series module</a>.
*
* @return the object to manipulate time series
*/
@Experimental("The Redis time series support is experimental")
default TimeSeriesCommands<String> timeseries() {
return timeseries(String.class);
}
/**
* Gets the objects to publish and receive messages.
*
* @param messageType the type of message
* @param <V> the type of message
* @return the object to publish and subscribe to Redis channels
*/
<V> PubSubCommands<V> pubsub(Class<V> messageType);
/**
* Gets the objects to publish and receive messages.
*
* @param messageType the type of message
* @param <V> the type of message
* @return the object to publish and subscribe to Redis channels
*/
<V> PubSubCommands<V> pubsub(TypeReference<V> messageType);
/**
* Executes a command.
* This method is used to execute commands not offered by the API.
*
* @param command the command name
* @param args the parameters, encoded as String.
* @return the response
*/
Response execute(String command, String... args);
/**
* Executes a command.
* This method is used to execute commands not offered by the API.
*
* @param command the command
* @param args the parameters, encoded as String.
* @return the response
*/
Response execute(Command command, String... args);
/**
* Executes a command.
* This method is used to execute commands not offered by the API.
*
* @param command the command
* @param args the parameters, encoded as String.
* @return the response
*/
Response execute(io.vertx.redis.client.Command command, String... args);
/**
* @return the reactive data source.
*/
ReactiveRedisDataSource getReactive();
}
|
of
|
java
|
grpc__grpc-java
|
rls/src/test/java/io/grpc/rls/RlsLoadBalancerTest.java
|
{
"start": 38060,
"end": 38332
}
|
class ____ implements Throttler {
private boolean nextResult = false;
@Override
public boolean shouldThrottle() {
return nextResult;
}
@Override
public void registerBackendResponse(boolean throttled) {
// no-op
}
}
}
|
FakeThrottler
|
java
|
spring-projects__spring-security
|
webauthn/src/main/java/org/springframework/security/web/webauthn/registration/PublicKeyCredentialCreationOptionsFilter.java
|
{
"start": 2729,
"end": 6418
}
|
class ____ extends OncePerRequestFilter {
private PublicKeyCredentialCreationOptionsRepository repository = new HttpSessionPublicKeyCredentialCreationOptionsRepository();
private SecurityContextHolderStrategy securityContextHolderStrategy = SecurityContextHolder
.getContextHolderStrategy();
private RequestMatcher matcher = PathPatternRequestMatcher.withDefaults()
.matcher(HttpMethod.POST, "/webauthn/register/options");
private AuthorizationManager<HttpServletRequest> authorization = AuthenticatedAuthorizationManager.authenticated();
private final WebAuthnRelyingPartyOperations rpOperations;
private HttpMessageConverter<Object> converter = new JacksonJsonHttpMessageConverter(
JsonMapper.builder().addModule(new WebauthnJacksonModule()).build());
/**
* Creates a new instance.
* @param rpOperations the {@link WebAuthnRelyingPartyOperations} to use. Cannot be
* null.
*/
public PublicKeyCredentialCreationOptionsFilter(WebAuthnRelyingPartyOperations rpOperations) {
Assert.notNull(rpOperations, "rpOperations cannot be null");
this.rpOperations = rpOperations;
}
/**
* Sets the {@link RequestMatcher} used to trigger this filter.
* <p>
* By default, the {@link RequestMatcher} is {@code POST /webauthn/register/options}.
* @param requestMatcher the {@link RequestMatcher} to use
* @since 6.5
*/
public void setRequestMatcher(RequestMatcher requestMatcher) {
Assert.notNull(requestMatcher, "requestMatcher cannot be null");
this.matcher = requestMatcher;
}
@Override
protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
throws ServletException, IOException {
if (!this.matcher.matches(request)) {
filterChain.doFilter(request, response);
return;
}
Supplier<SecurityContext> context = this.securityContextHolderStrategy.getDeferredContext();
Supplier<Authentication> authentication = () -> context.get().getAuthentication();
AuthorizationResult result = this.authorization.authorize(authentication, request);
if (result == null || !result.isGranted()) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
return;
}
PublicKeyCredentialCreationOptions options = this.rpOperations.createPublicKeyCredentialCreationOptions(
new ImmutablePublicKeyCredentialCreationOptionsRequest(authentication.get()));
this.repository.save(request, response, options);
response.setStatus(HttpServletResponse.SC_OK);
response.setHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE);
this.converter.write(options, MediaType.APPLICATION_JSON, new ServletServerHttpResponse(response));
}
/**
* Sets the {@link PublicKeyCredentialCreationOptionsRepository} to use. The default
* is {@link HttpSessionPublicKeyCredentialCreationOptionsRepository}.
* @param creationOptionsRepository the
* {@link PublicKeyCredentialCreationOptionsRepository} to use. Cannot be null.
*/
public void setCreationOptionsRepository(PublicKeyCredentialCreationOptionsRepository creationOptionsRepository) {
Assert.notNull(creationOptionsRepository, "creationOptionsRepository cannot be null");
this.repository = creationOptionsRepository;
}
/**
* Set the {@link HttpMessageConverter} to read the
* {@link WebAuthnRegistrationFilter.WebAuthnRegistrationRequest} and write the
* response. The default is {@link JacksonJsonHttpMessageConverter}.
* @param converter the {@link HttpMessageConverter} to use. Cannot be null.
*/
public void setConverter(HttpMessageConverter<Object> converter) {
Assert.notNull(converter, "converter cannot be null");
this.converter = converter;
}
}
|
PublicKeyCredentialCreationOptionsFilter
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/StringFieldTest_special.java
|
{
"start": 1148,
"end": 1200
}
|
class ____ {
public String name;
}
}
|
Model
|
java
|
quarkusio__quarkus
|
integration-tests/grpc-interceptors/src/main/java/io/quarkus/grpc/examples/interceptors/HelloExceptionHandlerProvider.java
|
{
"start": 333,
"end": 1111
}
|
class ____ implements ExceptionHandlerProvider {
public static boolean invoked;
@Override
public <ReqT, RespT> ExceptionHandler<ReqT, RespT> createHandler(ServerCall.Listener<ReqT> listener,
ServerCall<ReqT, RespT> serverCall, Metadata metadata) {
return new HelloExceptionHandler<>(listener, serverCall, metadata);
}
@Override
public Throwable transform(Throwable t) {
invoked = true;
if (t instanceof HelloException) {
HelloException he = (HelloException) t;
return new StatusRuntimeException(Status.ABORTED.withDescription(he.getName()));
} else {
return ExceptionHandlerProvider.toStatusException(t, true);
}
}
private static
|
HelloExceptionHandlerProvider
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/jdk/MapDeserializationTest.java
|
{
"start": 841,
"end": 900
}
|
enum ____ {
KEY1, KEY2, WHATEVER;
}
static
|
Key
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java
|
{
"start": 2749,
"end": 6614
}
|
class ____
extends Reducer<LongWritable,Text,LongWritable,Text> {
public void reduce(LongWritable key, Iterable<Text> values,
Context context) throws IOException, InterruptedException {
String id = context.getTaskAttemptID().toString();
// Reducer 0 does not output anything
if (!id.endsWith("0_0")) {
for (Text val: values) {
context.write(key, val);
}
}
}
}
private static void runTestLazyOutput(Configuration conf, Path output,
int numReducers, boolean createLazily)
throws Exception {
Job job = Job.getInstance(conf, "Test-Lazy-Output");
FileInputFormat.setInputPaths(job, INPUTPATH);
FileOutputFormat.setOutputPath(job, output);
job.setJarByClass(TestMapReduceLazyOutput.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputKeyClass(LongWritable.class);
job.setOutputValueClass(Text.class);
job.setNumReduceTasks(numReducers);
job.setMapperClass(TestMapper.class);
job.setReducerClass(TestReducer.class);
if (createLazily) {
LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);
} else {
job.setOutputFormatClass(TextOutputFormat.class);
}
assertTrue(job.waitForCompletion(true));
}
public void createInput(FileSystem fs, int numMappers) throws Exception {
for (int i =0; i < numMappers; i++) {
OutputStream os = fs.create(new Path(INPUTPATH,
"text" + i + ".txt"));
Writer wr = new OutputStreamWriter(os);
for(String inp : INPUTLIST) {
wr.write(inp+"\n");
}
wr.close();
}
}
@Test
public void testLazyOutput() throws Exception {
MiniDFSCluster dfs = null;
MiniMRCluster mr = null;
FileSystem fileSys = null;
try {
Configuration conf = new Configuration();
// Start the mini-MR and mini-DFS clusters
dfs = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_HADOOP_WORKERS)
.build();
fileSys = dfs.getFileSystem();
mr = new MiniMRCluster(NUM_HADOOP_WORKERS,
fileSys.getUri().toString(), 1);
int numReducers = 2;
int numMappers = NUM_HADOOP_WORKERS * NUM_MAPS_PER_NODE;
createInput(fileSys, numMappers);
Path output1 = new Path("/testlazy/output1");
// Test 1.
runTestLazyOutput(mr.createJobConf(), output1,
numReducers, true);
Path[] fileList =
FileUtil.stat2Paths(fileSys.listStatus(output1,
new Utils.OutputFileUtils.OutputFilesFilter()));
for(int i=0; i < fileList.length; ++i) {
System.out.println("Test1 File list[" + i + "]" + ": "+ fileList[i]);
}
assertTrue(fileList.length == (numReducers - 1));
// Test 2. 0 Reducers, maps directly write to the output files
Path output2 = new Path("/testlazy/output2");
runTestLazyOutput(mr.createJobConf(), output2, 0, true);
fileList =
FileUtil.stat2Paths(fileSys.listStatus(output2,
new Utils.OutputFileUtils.OutputFilesFilter()));
for(int i=0; i < fileList.length; ++i) {
System.out.println("Test2 File list[" + i + "]" + ": "+ fileList[i]);
}
assertTrue(fileList.length == numMappers - 1);
// Test 3. 0 Reducers, but flag is turned off
Path output3 = new Path("/testlazy/output3");
runTestLazyOutput(mr.createJobConf(), output3, 0, false);
fileList =
FileUtil.stat2Paths(fileSys.listStatus(output3,
new Utils.OutputFileUtils.OutputFilesFilter()));
for(int i=0; i < fileList.length; ++i) {
System.out.println("Test3 File list[" + i + "]" + ": "+ fileList[i]);
}
assertTrue(fileList.length == numMappers);
} finally {
if (dfs != null) { dfs.shutdown(); }
if (mr != null) { mr.shutdown();
}
}
}
}
|
TestReducer
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/update/MySqlUpdateTest_0.java
|
{
"start": 1020,
"end": 2188
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "UPDATE `z_code` SET `key`='INTRANT_NOTALLOWED_CATEGORY_C'";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
// print(statementList);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
statemen.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(1, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("z_code")));
assertTrue(visitor.getColumns().contains(new Column("z_code", "key")));
}
}
|
MySqlUpdateTest_0
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TestInstancePreConstructCallbackTests.java
|
{
"start": 11540,
"end": 12055
}
|
class ____ extends CallSequenceRecordingTestCase {
PreConstructWithClassLifecycle() {
record("constructor");
}
@BeforeEach
void beforeEach() {
record("beforeEach");
}
@Test
void test1() {
callSequence.add("test1");
}
@Test
void test2() {
callSequence.add("test2");
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
@ExtendWith(InstancePreConstructCallbackRecordingFoo.class)
@ExtendWith(InstancePreConstructCallbackRecordingLegacy.class)
static
|
PreConstructWithClassLifecycle
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/matchers/CustomMatcherDoesYieldCCETest.java
|
{
"start": 524,
"end": 1315
}
|
class ____ extends TestBase {
@Mock private IMethods mock;
@Test
public void shouldNotThrowCCE() {
mock.simpleMethod(new Object());
try {
// calling overloaded method so that matcher will be called with
// different type
verify(mock).simpleMethod(argThat(isStringWithTextFoo()));
fail();
} catch (ArgumentsAreDifferent e) {
}
}
private ArgumentMatcher<String> isStringWithTextFoo() {
return new ArgumentMatcher<String>() {
public boolean matches(String argument) {
// casting that should not be thrown:
String str = (String) argument;
return str.equals("foo");
}
};
}
}
|
CustomMatcherDoesYieldCCETest
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java
|
{
"start": 33613,
"end": 33742
}
|
interface ____<T extends BaseEntity> extends BaseInterface<T> {
@Override
<S extends T> S test(S T);
}
public
|
EntityInterface
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeMetrics.java
|
{
"start": 2159,
"end": 2217
}
|
class ____ DataNodeVolumeMetrics.
*/
@Timeout(300)
public
|
for
|
java
|
quarkusio__quarkus
|
extensions/panache/hibernate-reactive-rest-data-panache/deployment/src/test/java/io/quarkus/hibernate/reactive/rest/data/panache/deployment/security/entity/CollectionsResource.java
|
{
"start": 670,
"end": 1574
}
|
interface ____ extends PanacheEntityResource<Collection, String> {
@PermissionsAllowed("find-by-name-1")
@PermissionsAllowed("find-by-name-2")
@GET
@Path("/name/{name}")
default Uni<Collection> findByName(@PathParam("name") String name) {
return Collection.find("name = :name", Collections.singletonMap("name", name)).singleResult();
}
@RolesAllowed("admin")
@POST
@Path("/name/{name}")
default Uni<Collection> addByName(@PathParam("name") String name) {
Collection collection = new Collection();
collection.id = name;
collection.name = name;
return Collection.persist(collection).onItem().transform(res -> collection);
}
@PermissionsAllowed("get-1")
@PermissionsAllowed("get-2")
Uni<Collection> get(String id);
@PermissionsAllowed("add")
Uni<Collection> add(Collection entity);
}
|
CollectionsResource
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/function/json/PostgreSQLJsonRemoveFunction.java
|
{
"start": 678,
"end": 2681
}
|
class ____ extends AbstractJsonRemoveFunction {
public PostgreSQLJsonRemoveFunction(TypeConfiguration typeConfiguration) {
super( typeConfiguration );
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> arguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> translator) {
final Expression json = (Expression) arguments.get( 0 );
final Expression jsonPath = (Expression) arguments.get( 1 );
final boolean needsCast = !isJsonType( json ) && AbstractSqlAstTranslator.isParameter( json );
if ( needsCast ) {
sqlAppender.appendSql( "cast(" );
}
json.accept( translator );
if ( needsCast ) {
sqlAppender.appendSql( " as jsonb)" );
}
sqlAppender.appendSql( "#-" );
List<JsonPathHelper.JsonPathElement> jsonPathElements =
JsonPathHelper.parseJsonPathElements( translator.getLiteralValue( jsonPath ) );
sqlAppender.appendSql( "array" );
char separator = '[';
for ( JsonPathHelper.JsonPathElement pathElement : jsonPathElements ) {
sqlAppender.appendSql( separator );
if ( pathElement instanceof JsonPathHelper.JsonAttribute attribute ) {
sqlAppender.appendSingleQuoteEscapedString( attribute.attribute() );
}
else if ( pathElement instanceof JsonPathHelper.JsonParameterIndexAccess parameterIndexAccess ) {
final String parameterName = parameterIndexAccess.parameterName();
throw new QueryException( "JSON path [" + jsonPath + "] uses parameter [" + parameterName + "] that is not passed" );
}
else {
sqlAppender.appendSql( '\'' );
sqlAppender.appendSql( ( (JsonPathHelper.JsonIndexAccess) pathElement ).index() );
sqlAppender.appendSql( '\'' );
}
separator = ',';
}
sqlAppender.appendSql( "]::text[]" );
}
private boolean isJsonType(Expression expression) {
final JdbcMappingContainer expressionType = expression.getExpressionType();
return expressionType != null && expressionType.getSingleJdbcMapping().getJdbcType().isJson();
}
}
|
PostgreSQLJsonRemoveFunction
|
java
|
apache__spark
|
sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/HasPartitionKey.java
|
{
"start": 2459,
"end": 2682
}
|
interface ____ to ensure that all its records have the same value for the
* partition keys. Note that the value is after partition transform has been applied, if there
* is any.
*/
InternalRow partitionKey();
}
|
needs
|
java
|
quarkusio__quarkus
|
integration-tests/test-extension/extension/deployment/src/test/java/io/quarkus/commandmode/launch/MultipleInstanceMainInSuperClassCommandModeTestCase.java
|
{
"start": 277,
"end": 972
}
|
class ____ {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar
.addClasses(HelloWorldSuperSuper.class, HelloWorldSuper.class, HelloWorldMain.class))
.setApplicationName("run-exit")
.setApplicationVersion("0.1-SNAPSHOT")
.setExpectExit(true)
.setRun(true);
@Test
public void testRun() {
Assertions.assertThat(config.getStartupConsoleOutput()).contains("Hi World");
Assertions.assertThat(config.getExitCode()).isEqualTo(0);
}
@QuarkusMain
public static
|
MultipleInstanceMainInSuperClassCommandModeTestCase
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/MultisetAssert_containsAtLeast_Test.java
|
{
"start": 1105,
"end": 3115
}
|
class ____ {
@Test
public void should_fail_if_actual_is_null() {
// GIVEN
Multiset<String> actual = null;
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).containsAtLeast(1, "test"));
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage(actualIsNull());
}
@Test
public void should_fail_if_expected_is_negative() {
// GIVEN
Multiset<String> actual = HashMultiset.create();
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).containsAtLeast(-1, "test"));
// THEN
assertThat(throwable).isInstanceOf(IllegalArgumentException.class)
.hasMessage("The minimum count should not be negative.");
}
@Test
public void should_fail_if_actual_contains_value_fewer_times_than_expected() {
// GIVEN
Multiset<String> actual = HashMultiset.create();
actual.add("test", 2);
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).containsAtLeast(3, "test"));
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage(format("%nExpecting:%n" +
" [\"test\", \"test\"]%n" +
"to contain:%n" +
" \"test\"%n" +
"at least 3 times but was found 2 times."));
}
@Test
public void should_pass_if_actual_contains_value_number_of_times_expected() {
// GIVEN
Multiset<String> actual = HashMultiset.create();
actual.add("test", 2);
// THEN
assertThat(actual).containsAtLeast(2, "test");
}
@Test
public void should_pass_if_actual_contains_value_more_times_than_expected() {
// GIVEN
Multiset<String> actual = HashMultiset.create();
actual.add("test", 2);
// THEN
assertThat(actual).containsAtLeast(1, "test");
}
}
|
MultisetAssert_containsAtLeast_Test
|
java
|
apache__flink
|
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterTableModify.java
|
{
"start": 1813,
"end": 2327
}
|
class ____ extends SqlAlterTableSchema {
public SqlAlterTableModify(
SqlParserPos pos,
SqlIdentifier tableName,
SqlNodeList modifiedColumns,
List<SqlTableConstraint> constraints,
@Nullable SqlWatermark watermark,
boolean ifTableExists) {
super(pos, tableName, modifiedColumns, constraints, watermark, ifTableExists);
}
@Override
protected String getAlterOperation() {
return "MODIFY";
}
}
|
SqlAlterTableModify
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/SplitterStreamingStopOnExceptionErrorHandlingTest.java
|
{
"start": 1206,
"end": 3198
}
|
class ____ extends ContextTestSupport {
@Test
public void testSplitterStreamingNoError() throws Exception {
getMockEndpoint("mock:a").expectedBodiesReceived("A", "B", "C", "D", "E");
getMockEndpoint("mock:b").expectedBodiesReceived("A", "B", "C", "D", "E");
getMockEndpoint("mock:result").expectedBodiesReceived("A,B,C,D,E");
template.sendBody("direct:start", "A,B,C,D,E");
assertMockEndpointsSatisfied();
}
@Test
public void testSplitterStreamingWithError() throws Exception {
getMockEndpoint("mock:a").expectedBodiesReceived("A", "B", "Kaboom");
getMockEndpoint("mock:b").expectedBodiesReceived("A", "B");
getMockEndpoint("mock:result").expectedMessageCount(0);
try {
template.sendBody("direct:start", "A,B,Kaboom,D,E");
fail("Should have thrown an exception");
} catch (CamelExecutionException e) {
assertIsInstanceOf(IllegalArgumentException.class, e.getCause().getCause());
assertEquals("Cannot do this", e.getCause().getCause().getMessage());
}
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").split(body().tokenize(",")).streaming().stopOnException().to("mock:a")
.process(new Processor() {
public void process(Exchange exchange) {
String body = exchange.getIn().getBody(String.class);
if ("Kaboom".equals(body)) {
throw new IllegalArgumentException("Cannot do this");
}
}
}).to("mock:b").end().to("mock:result");
}
};
}
}
|
SplitterStreamingStopOnExceptionErrorHandlingTest
|
java
|
spring-projects__spring-security
|
saml2/saml2-service-provider/src/test/java/org/springframework/security/saml2/provider/service/authentication/TestSaml2Authentications.java
|
{
"start": 902,
"end": 1329
}
|
class ____ {
private TestSaml2Authentications() {
}
public static Saml2Authentication authentication() {
DefaultSaml2AuthenticatedPrincipal principal = new DefaultSaml2AuthenticatedPrincipal("user",
Collections.emptyMap());
principal.setRelyingPartyRegistrationId("simplesamlphp");
return new Saml2Authentication(principal, "response", AuthorityUtils.createAuthorityList("ROLE_USER"));
}
}
|
TestSaml2Authentications
|
java
|
elastic__elasticsearch
|
modules/reindex/src/test/java/org/elasticsearch/reindex/TransportRethrottleActionTests.java
|
{
"start": 1808,
"end": 11125
}
|
class ____ extends ESTestCase {
private int slices;
private BulkByScrollTask task;
@Before
public void createTask() {
slices = between(2, 50);
task = new BulkByScrollTask(1, "test_type", "test_action", "test", TaskId.EMPTY_TASK_ID, Collections.emptyMap());
task.setWorkerCount(slices);
}
/**
* Test rethrottling.
* @param runningSlices the number of slices still running
* @param simulator simulate a response from the sub-request to rethrottle the child requests
* @param verifier verify the resulting response
*/
private void rethrottleTestCase(
int runningSlices,
Consumer<ActionListener<ListTasksResponse>> simulator,
Consumer<ActionListener<TaskInfo>> verifier
) {
Client client = mock(Client.class);
String localNodeId = randomAlphaOfLength(5);
float newRequestsPerSecond = randomValueOtherThanMany(f -> f <= 0, () -> randomFloat());
@SuppressWarnings("unchecked")
ActionListener<TaskInfo> listener = mock(ActionListener.class);
when(listener.delegateFailureAndWrap(any())).thenCallRealMethod();
TransportRethrottleAction.rethrottle(logger, localNodeId, client, task, newRequestsPerSecond, listener);
// Capture the sub request and the listener so we can verify they are sane
ArgumentCaptor<RethrottleRequest> subRequest = ArgumentCaptor.forClass(RethrottleRequest.class);
@SuppressWarnings({ "unchecked", "rawtypes" }) // Magical generics incantation.....
ArgumentCaptor<ActionListener<ListTasksResponse>> subListener = ArgumentCaptor.forClass((Class) ActionListener.class);
if (runningSlices > 0) {
verify(client).execute(eq(ReindexPlugin.RETHROTTLE_ACTION), subRequest.capture(), subListener.capture());
assertEquals(new TaskId(localNodeId, task.getId()), subRequest.getValue().getTargetParentTaskId());
assertEquals(newRequestsPerSecond / runningSlices, subRequest.getValue().getRequestsPerSecond(), 0.00001f);
simulator.accept(subListener.getValue());
}
verifier.accept(listener);
}
private Consumer<ActionListener<TaskInfo>> expectSuccessfulRethrottleWithStatuses(
List<BulkByScrollTask.StatusOrException> sliceStatuses
) {
return listener -> {
TaskInfo taskInfo = captureResponse(TaskInfo.class, listener);
assertEquals(sliceStatuses, ((BulkByScrollTask.Status) taskInfo.status()).getSliceStatuses());
};
}
public void testRethrottleSuccessfulResponse() {
List<TaskInfo> tasks = new ArrayList<>();
List<BulkByScrollTask.StatusOrException> sliceStatuses = new ArrayList<>(slices);
for (int i = 0; i < slices; i++) {
BulkByScrollTask.Status status = believeableInProgressStatus(i);
tasks.add(
new TaskInfo(
new TaskId("test", 123),
"test",
"test",
"test",
"test",
status,
0,
0,
true,
false,
new TaskId("test", task.getId()),
Collections.emptyMap()
)
);
sliceStatuses.add(new BulkByScrollTask.StatusOrException(status));
}
rethrottleTestCase(
slices,
listener -> listener.onResponse(new ListTasksResponse(tasks, emptyList(), emptyList())),
expectSuccessfulRethrottleWithStatuses(sliceStatuses)
);
}
public void testRethrottleWithSomeSucceeded() {
int succeeded = between(1, slices - 1);
List<BulkByScrollTask.StatusOrException> sliceStatuses = new ArrayList<>(slices);
for (int i = 0; i < succeeded; i++) {
BulkByScrollTask.Status status = believeableCompletedStatus(i);
task.getLeaderState()
.onSliceResponse(neverCalled(), i, new BulkByScrollResponse(timeValueMillis(10), status, emptyList(), emptyList(), false));
sliceStatuses.add(new BulkByScrollTask.StatusOrException(status));
}
List<TaskInfo> tasks = new ArrayList<>();
for (int i = succeeded; i < slices; i++) {
BulkByScrollTask.Status status = believeableInProgressStatus(i);
tasks.add(
new TaskInfo(
new TaskId("test", 123),
"test",
"test",
"test",
"test",
status,
0,
0,
true,
false,
new TaskId("test", task.getId()),
Collections.emptyMap()
)
);
sliceStatuses.add(new BulkByScrollTask.StatusOrException(status));
}
rethrottleTestCase(
slices - succeeded,
listener -> listener.onResponse(new ListTasksResponse(tasks, emptyList(), emptyList())),
expectSuccessfulRethrottleWithStatuses(sliceStatuses)
);
}
public void testRethrottleWithAllSucceeded() {
List<BulkByScrollTask.StatusOrException> sliceStatuses = new ArrayList<>(slices);
for (int i = 0; i < slices; i++) {
@SuppressWarnings("unchecked")
ActionListener<BulkByScrollResponse> listener = i < slices - 1 ? neverCalled() : mock(ActionListener.class);
BulkByScrollTask.Status status = believeableCompletedStatus(i);
task.getLeaderState()
.onSliceResponse(listener, i, new BulkByScrollResponse(timeValueMillis(10), status, emptyList(), emptyList(), false));
if (i == slices - 1) {
// The whole thing succeeded so we should have got the success
captureResponse(BulkByScrollResponse.class, listener).getStatus();
}
sliceStatuses.add(new BulkByScrollTask.StatusOrException(status));
}
rethrottleTestCase(
0,
listener -> { /* There are no async tasks to simulate because the listener is called for us. */},
expectSuccessfulRethrottleWithStatuses(sliceStatuses)
);
}
private Consumer<ActionListener<TaskInfo>> expectException(Matcher<Exception> exceptionMatcher) {
return listener -> {
ArgumentCaptor<Exception> failure = ArgumentCaptor.forClass(Exception.class);
verify(listener).onFailure(failure.capture());
assertThat(failure.getValue(), exceptionMatcher);
};
}
public void testRethrottleCatastrophicFailures() {
Exception e = new Exception();
rethrottleTestCase(slices, listener -> listener.onFailure(e), expectException(theInstance(e)));
}
public void testRethrottleTaskOperationFailure() {
Exception e = new Exception();
TaskOperationFailure failure = new TaskOperationFailure("test", 123, e);
rethrottleTestCase(
slices,
listener -> listener.onResponse(new ListTasksResponse(emptyList(), singletonList(failure), emptyList())),
expectException(hasToString(containsString("Rethrottle of [test:123] failed")))
);
}
public void testRethrottleNodeFailure() {
FailedNodeException e = new FailedNodeException("test", "test", new Exception());
rethrottleTestCase(
slices,
listener -> listener.onResponse(new ListTasksResponse(emptyList(), emptyList(), singletonList(e))),
expectException(theInstance(e))
);
}
private BulkByScrollTask.Status believeableInProgressStatus(Integer sliceId) {
return new BulkByScrollTask.Status(sliceId, 10, 0, 0, 0, 0, 0, 0, 0, 0, timeValueMillis(0), 0, null, timeValueMillis(0));
}
private BulkByScrollTask.Status believeableCompletedStatus(Integer sliceId) {
return new BulkByScrollTask.Status(sliceId, 10, 10, 0, 0, 0, 0, 0, 0, 0, timeValueMillis(0), 0, null, timeValueMillis(0));
}
private <T> ActionListener<T> neverCalled() {
return new ActionListener<T>() {
@Override
public void onResponse(T response) {
throw new RuntimeException("Expected no interactions but got [" + response + "]");
}
@Override
public void onFailure(Exception e) {
throw new RuntimeException("Expected no interations but was received a failure", e);
}
};
}
private <T> T captureResponse(Class<T> responseClass, ActionListener<T> listener) {
ArgumentCaptor<Exception> failure = ArgumentCaptor.forClass(Exception.class);
// Rethrow any failures just so we get a nice exception if there were any. We don't expect any though.
verify(listener, atMost(1)).onFailure(failure.capture());
if (false == failure.getAllValues().isEmpty()) {
throw new AssertionError(failure.getValue());
}
ArgumentCaptor<T> response = ArgumentCaptor.forClass(responseClass);
verify(listener).onResponse(response.capture());
return response.getValue();
}
}
|
TransportRethrottleActionTests
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/util/NotificationLiteTest.java
|
{
"start": 915,
"end": 1575
}
|
class ____ extends RxJavaTest {
@Test
public void acceptFullObserver() {
TestObserverEx<Integer> to = new TestObserverEx<>();
Disposable d = Disposable.empty();
assertFalse(NotificationLite.acceptFull(NotificationLite.disposable(d), to));
to.assertSubscribed();
to.dispose();
assertTrue(d.isDisposed());
}
@Test
public void errorNotificationCompare() {
TestException ex = new TestException();
Object n1 = NotificationLite.error(ex);
assertEquals(ex.hashCode(), n1.hashCode());
assertNotEquals(n1, NotificationLite.complete());
}
}
|
NotificationLiteTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
|
{
"start": 12871,
"end": 12968
}
|
class ____ extends CompositeService implements
ContainerManager {
private
|
ContainerManagerImpl
|
java
|
junit-team__junit5
|
junit-platform-commons/src/main/java/org/junit/platform/commons/util/ClassLoaderUtils.java
|
{
"start": 1243,
"end": 1981
}
|
class ____; never {@code null}
* @since 1.10
*/
public static ClassLoader getClassLoader(Class<?> clazz) {
Preconditions.notNull(clazz, "Class must not be null");
ClassLoader classLoader = clazz.getClassLoader();
return (classLoader != null) ? classLoader : getDefaultClassLoader();
}
public static ClassLoader getDefaultClassLoader() {
try {
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
if (contextClassLoader != null) {
return contextClassLoader;
}
}
catch (Throwable t) {
UnrecoverableExceptions.rethrowIfUnrecoverable(t);
/* otherwise ignore */
}
return ClassLoader.getSystemClassLoader();
}
/**
* Get the location from which the supplied object's
|
loader
|
java
|
redisson__redisson
|
redisson-spring-data/redisson-spring-data-31/src/test/java/org/redisson/RedissonRuntimeEnvironment.java
|
{
"start": 117,
"end": 672
}
|
class ____ {
public static final boolean isTravis = "true".equalsIgnoreCase(System.getProperty("travisEnv"));
public static final String redisBinaryPath = System.getProperty("redisBinary", "C:\\redis\\redis-server2.cmd");
public static final String tempDir = System.getProperty("java.io.tmpdir");
public static final String OS;
public static final boolean isWindows;
static {
OS = System.getProperty("os.name", "generic");
isWindows = OS.toLowerCase(Locale.ENGLISH).contains("win");
}
}
|
RedissonRuntimeEnvironment
|
java
|
FasterXML__jackson-core
|
src/test/java/tools/jackson/core/unittest/type/TypeReferenceTest.java
|
{
"start": 464,
"end": 3391
}
|
class ____ extends ResolvedType {
private final boolean _refType;
public BogusResolvedType(boolean isRefType) {
_refType = isRefType;
}
@Override
public Class<?> getRawClass() {
return null;
}
@Override
public boolean hasRawClass(Class<?> clz) {
return false;
}
@Override
public boolean isAbstract() {
return false;
}
@Override
public boolean isConcrete() {
return false;
}
@Override
public boolean isThrowable() {
return false;
}
@Override
public boolean isArrayType() {
return false;
}
@Override
public boolean isEnumType() {
return false;
}
@Override
public boolean isInterface() {
return false;
}
@Override
public boolean isPrimitive() {
return false;
}
@Override
public boolean isFinal() {
return false;
}
@Override
public boolean isContainerType() {
return false;
}
@Override
public boolean isCollectionLikeType() {
return false;
}
@Override
public boolean isMapLikeType() {
return false;
}
@Override
public boolean hasGenericTypes() {
return false;
}
@Override
public ResolvedType getKeyType() {
return null;
}
@Override
public ResolvedType getContentType() {
return null;
}
@Override
public ResolvedType getReferencedType() {
if (_refType) {
return this;
}
return null;
}
@Override
public int containedTypeCount() {
return 0;
}
@Override
public ResolvedType containedType(int index) {
return null;
}
@Override
public String toCanonical() {
return null;
}
}
@Test
void simple()
{
TypeReference<?> ref = new TypeReference<List<String>>() { };
assertNotNull(ref);
ref.equals(null);
}
@SuppressWarnings("rawtypes")
@Test
void invalid()
{
try {
Object ob = new TypeReference() { };
fail("Should not pass, got: "+ob);
} catch (IllegalArgumentException e) {
verifyException(e, "without actual type information");
}
}
@Test
void resolvedType() {
ResolvedType type1 = new BogusResolvedType(false);
assertFalse(type1.isReferenceType());
ResolvedType type2 = new BogusResolvedType(true);
assertTrue(type2.isReferenceType());
}
}
|
BogusResolvedType
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/concurrent/ConcurrentUtilsTest.java
|
{
"start": 1653,
"end": 17200
}
|
class ____ extends AbstractLangTest {
/**
* Tests constant future.
*
* @throws Exception so we don't have to catch it
*/
@Test
void testConstantFuture_Integer() throws Exception {
final Integer value = Integer.valueOf(5);
final Future<Integer> test = ConcurrentUtils.constantFuture(value);
assertTrue(test.isDone());
assertSame(value, test.get());
assertSame(value, test.get(1000, TimeUnit.SECONDS));
assertSame(value, test.get(1000, null));
assertFalse(test.isCancelled());
assertFalse(test.cancel(true));
assertFalse(test.cancel(false));
}
/**
* Tests constant future.
*
* @throws Exception so we don't have to catch it
*/
@Test
void testConstantFuture_null() throws Exception {
final Integer value = null;
final Future<Integer> test = ConcurrentUtils.constantFuture(value);
assertTrue(test.isDone());
assertSame(value, test.get());
assertSame(value, test.get(1000, TimeUnit.SECONDS));
assertSame(value, test.get(1000, null));
assertFalse(test.isCancelled());
assertFalse(test.cancel(true));
assertFalse(test.cancel(false));
}
/**
* Tests createIfAbsent() if the map does not contain the key in question.
*
* @throws org.apache.commons.lang3.concurrent.ConcurrentException so we don't have to catch it
*/
@Test
void testCreateIfAbsentKeyNotPresent() throws ConcurrentException {
final ConcurrentInitializer<Integer> init = EasyMock.createMock(ConcurrentInitializer.class);
final String key = "testKey";
final Integer value = 42;
EasyMock.expect(init.get()).andReturn(value);
EasyMock.replay(init);
final ConcurrentMap<String, Integer> map = new ConcurrentHashMap<>();
assertEquals(value, ConcurrentUtils.createIfAbsent(map, key, init), "Wrong result");
assertEquals(value, map.get(key), "Wrong value in map");
EasyMock.verify(init);
}
/**
* Tests createIfAbsent() if the key is found in the map.
*
* @throws org.apache.commons.lang3.concurrent.ConcurrentException so we don't have to catch it
*/
@Test
void testCreateIfAbsentKeyPresent() throws ConcurrentException {
final ConcurrentInitializer<Integer> init = EasyMock.createMock(ConcurrentInitializer.class);
EasyMock.replay(init);
final String key = "testKey";
final Integer value = 42;
final ConcurrentMap<String, Integer> map = new ConcurrentHashMap<>();
map.put(key, value);
assertEquals(value, ConcurrentUtils.createIfAbsent(map, key, init), "Wrong result");
assertEquals(value, map.get(key), "Wrong value in map");
EasyMock.verify(init);
}
/**
* Tests createIfAbsent() if a null initializer is passed in.
*
* @throws org.apache.commons.lang3.concurrent.ConcurrentException so we don't have to catch it
*/
@Test
void testCreateIfAbsentNullInit() throws ConcurrentException {
final ConcurrentMap<String, Integer> map = new ConcurrentHashMap<>();
final String key = "testKey";
final Integer value = 42;
map.put(key, value);
assertNull(ConcurrentUtils.createIfAbsent(map, key, null), "Wrong result");
assertEquals(value, map.get(key), "Map was changed");
}
/**
* Tests createIfAbsent() if a null map is passed in.
*
* @throws org.apache.commons.lang3.concurrent.ConcurrentException so we don't have to catch it
*/
@Test
void testCreateIfAbsentNullMap() throws ConcurrentException {
final ConcurrentInitializer<Integer> init = EasyMock.createMock(ConcurrentInitializer.class);
EasyMock.replay(init);
assertNull(ConcurrentUtils.createIfAbsent(null, "test", init), "Wrong result");
EasyMock.verify(init);
}
/**
* Tests createIfAbsentUnchecked() if an exception is thrown.
*
* @throws org.apache.commons.lang3.concurrent.ConcurrentException so we don't have to catch it
*/
@Test
void testCreateIfAbsentUncheckedException() throws ConcurrentException {
final ConcurrentInitializer<Integer> init = EasyMock.createMock(ConcurrentInitializer.class);
final Exception ex = new Exception();
EasyMock.expect(init.get()).andThrow(new ConcurrentException(ex));
EasyMock.replay(init);
final ConcurrentRuntimeException crex = assertThrows(ConcurrentRuntimeException.class,
() -> ConcurrentUtils.createIfAbsentUnchecked(new ConcurrentHashMap<>(), "test", init));
assertEquals(ex, crex.getCause(), "Wrong cause");
EasyMock.verify(init);
}
/**
* Tests createIfAbsentUnchecked() if no exception is thrown.
*/
@Test
void testCreateIfAbsentUncheckedSuccess() {
final String key = "testKey";
final Integer value = 42;
final ConcurrentMap<String, Integer> map = new ConcurrentHashMap<>();
assertEquals(value, ConcurrentUtils.createIfAbsentUnchecked(map, key, new ConstantInitializer<>(value)), "Wrong result");
assertEquals(value, map.get(key), "Wrong value in map");
}
/**
* Tests extractCause() if the cause is a checked exception.
*/
@Test
void testExtractCauseChecked() {
final Exception ex = new Exception("Test");
final ConcurrentException cex = ConcurrentUtils.extractCause(new ExecutionException(ex));
assertSame(ex, cex.getCause(), "Wrong cause");
}
/**
* Tests extractCause() if the cause is an error.
*/
@Test
void testExtractCauseError() {
final Error err = new AssertionError("Test");
final AssertionError e = assertThrows(AssertionError.class, () -> ConcurrentUtils.extractCause(new ExecutionException(err)));
assertEquals(err, e, "Wrong error");
}
/**
* Tests extractCause() for a null exception.
*/
@Test
void testExtractCauseNull() {
assertNull(ConcurrentUtils.extractCause(null), "Non null result");
}
/**
* Tests extractCause() if the cause of the passed in exception is null.
*/
@Test
void testExtractCauseNullCause() {
assertNull(ConcurrentUtils.extractCause(new ExecutionException("Test", null)), "Non null result");
}
/**
* Tests extractCauseUnchecked() if the cause is a checked exception.
*/
@Test
void testExtractCauseUncheckedChecked() {
final Exception ex = new Exception("Test");
final ConcurrentRuntimeException cex = ConcurrentUtils.extractCauseUnchecked(new ExecutionException(ex));
assertSame(ex, cex.getCause(), "Wrong cause");
}
/**
* Tests extractCauseUnchecked() if the cause is an error.
*/
@Test
void testExtractCauseUncheckedError() {
final Error err = new AssertionError("Test");
final Error e = assertThrows(Error.class, () -> ConcurrentUtils.extractCauseUnchecked(new ExecutionException(err)));
assertEquals(err, e, "Wrong error");
}
/**
* Tests extractCause() if the cause is an unchecked exception.
*/
@Test
void testExtractCauseUncheckedException() {
final RuntimeException rex = new RuntimeException("Test");
assertThrows(RuntimeException.class, () -> ConcurrentUtils.extractCause(new ExecutionException(rex)));
}
/**
* Tests extractCauseUnchecked() for a null exception.
*/
@Test
void testExtractCauseUncheckedNull() {
assertNull(ConcurrentUtils.extractCauseUnchecked(null), "Non null result");
}
/**
* Tests extractCauseUnchecked() if the cause of the passed in exception is null.
*/
@Test
void testExtractCauseUncheckedNullCause() {
assertNull(ConcurrentUtils.extractCauseUnchecked(new ExecutionException("Test", null)), "Non null result");
}
/**
* Tests extractCauseUnchecked() if the cause is an unchecked exception.
*/
@Test
void testExtractCauseUncheckedUncheckedException() {
final RuntimeException rex = new RuntimeException("Test");
final RuntimeException r = assertThrows(RuntimeException.class, () -> ConcurrentUtils.extractCauseUnchecked(new ExecutionException(rex)));
assertEquals(rex, r, "Wrong exception");
}
/**
* Tests handleCause() if the cause is a checked exception.
*/
@Test
void testHandleCauseChecked() {
final Exception ex = new Exception("Test");
final ConcurrentException cex = assertThrows(ConcurrentException.class, () -> ConcurrentUtils.handleCause(new ExecutionException(ex)));
assertEquals(ex, cex.getCause(), "Wrong cause");
}
/**
* Tests handleCause() if the cause is an error.
*/
@Test
void testHandleCauseError() {
final Error err = new AssertionError("Test");
final Error e = assertThrows(Error.class, () -> ConcurrentUtils.handleCause(new ExecutionException(err)));
assertEquals(err, e, "Wrong error");
}
/**
* Tests handleCause() for a null parameter or a null cause. In this case the method should do nothing. We can only test
* that no exception is thrown.
*
* @throws org.apache.commons.lang3.concurrent.ConcurrentException so we don't have to catch it
*/
@Test
void testHandleCauseNull() throws ConcurrentException {
ConcurrentUtils.handleCause(null);
ConcurrentUtils.handleCause(new ExecutionException("Test", null));
}
/**
* Tests handleCauseUnchecked() if the cause is a checked exception.
*/
@Test
void testHandleCauseUncheckedChecked() {
final Exception ex = new Exception("Test");
final ConcurrentRuntimeException crex = assertThrows(ConcurrentRuntimeException.class,
() -> ConcurrentUtils.handleCauseUnchecked(new ExecutionException(ex)));
assertEquals(ex, crex.getCause(), "Wrong cause");
}
/**
* Tests handleCauseUnchecked() if the cause is an error.
*/
@Test
void testHandleCauseUncheckedError() {
final Error err = new AssertionError("Test");
final Error e = assertThrows(Error.class, () -> ConcurrentUtils.handleCauseUnchecked(new ExecutionException(err)));
assertEquals(err, e, "Wrong error");
}
/**
* Tests handleCause() if the cause is an unchecked exception.
*/
@Test
void testHandleCauseUncheckedException() {
final RuntimeException rex = new RuntimeException("Test");
final RuntimeException r = assertThrows(RuntimeException.class, () -> ConcurrentUtils.handleCause(new ExecutionException(rex)));
assertEquals(rex, r, "Wrong exception");
}
/**
* Tests handleCauseUnchecked() for a null parameter or a null cause. In this case the method should do nothing. We can
* only test that no exception is thrown.
*/
@Test
void testHandleCauseUncheckedNull() {
ConcurrentUtils.handleCauseUnchecked(null);
ConcurrentUtils.handleCauseUnchecked(new ExecutionException("Test", null));
}
/**
* Tests handleCauseUnchecked() if the cause is an unchecked exception.
*/
@Test
void testHandleCauseUncheckedUncheckedException() {
final RuntimeException rex = new RuntimeException("Test");
final RuntimeException r = assertThrows(RuntimeException.class, () -> ConcurrentUtils.handleCauseUnchecked(new ExecutionException(rex)));
assertEquals(rex, r, "Wrong exception");
}
/**
* Tests a successful initialize() operation.
*
* @throws org.apache.commons.lang3.concurrent.ConcurrentException so we don't have to catch it
*/
@Test
void testInitialize() throws ConcurrentException {
final ConcurrentInitializer<Object> init = EasyMock.createMock(ConcurrentInitializer.class);
final Object result = new Object();
EasyMock.expect(init.get()).andReturn(result);
EasyMock.replay(init);
assertSame(result, ConcurrentUtils.initialize(init), "Wrong result object");
EasyMock.verify(init);
}
/**
* Tests initialize() for a null argument.
*
* @throws org.apache.commons.lang3.concurrent.ConcurrentException so we don't have to catch it
*/
@Test
void testInitializeNull() throws ConcurrentException {
assertNull(ConcurrentUtils.initialize(null), "Got a result");
}
/**
* Tests a successful initializeUnchecked() operation.
*
* @throws org.apache.commons.lang3.concurrent.ConcurrentException so we don't have to catch it
*/
@Test
void testInitializeUnchecked() throws ConcurrentException {
final ConcurrentInitializer<Object> init = EasyMock.createMock(ConcurrentInitializer.class);
final Object result = new Object();
EasyMock.expect(init.get()).andReturn(result);
EasyMock.replay(init);
assertSame(result, ConcurrentUtils.initializeUnchecked(init), "Wrong result object");
EasyMock.verify(init);
}
/**
* Tests whether exceptions are correctly handled by initializeUnchecked().
*
* @throws org.apache.commons.lang3.concurrent.ConcurrentException so we don't have to catch it
*/
@Test
void testInitializeUncheckedEx() throws ConcurrentException {
final ConcurrentInitializer<Object> init = EasyMock.createMock(ConcurrentInitializer.class);
final Exception cause = new Exception();
EasyMock.expect(init.get()).andThrow(new ConcurrentException(cause));
EasyMock.replay(init);
final ConcurrentRuntimeException crex = assertThrows(ConcurrentRuntimeException.class, () -> ConcurrentUtils.initializeUnchecked(init));
assertSame(cause, crex.getCause(), "Wrong cause");
EasyMock.verify(init);
}
/**
* Tests initializeUnchecked() for a null argument.
*/
@Test
void testInitializeUncheckedNull() {
assertNull(ConcurrentUtils.initializeUnchecked(null), "Got a result");
}
/**
* Tests putIfAbsent() if the map does not contain the key in question.
*/
@Test
void testPutIfAbsentKeyNotPresent() {
final String key = "testKey";
final Integer value = 42;
final ConcurrentMap<String, Integer> map = new ConcurrentHashMap<>();
assertEquals(value, ConcurrentUtils.putIfAbsent(map, key, value), "Wrong result");
assertEquals(value, map.get(key), "Wrong value in map");
}
/**
* Tests putIfAbsent() if the map contains the key in question.
*/
@Test
void testPutIfAbsentKeyPresent() {
final String key = "testKey";
final Integer value = 42;
final ConcurrentMap<String, Integer> map = new ConcurrentHashMap<>();
map.put(key, value);
assertEquals(value, ConcurrentUtils.putIfAbsent(map, key, 0), "Wrong result");
assertEquals(value, map.get(key), "Wrong value in map");
}
/**
* Tests putIfAbsent() if a null map is passed in.
*/
@Test
void testPutIfAbsentNullMap() {
assertNull(ConcurrentUtils.putIfAbsent(null, "test", 100), "Wrong result");
}
/**
* Tests creating ConcurrentRuntimeException with no arguments.
*/
@Test
void testUninitializedConcurrentRuntimeException() {
assertNotNull(new ConcurrentRuntimeException(), "Error creating empty ConcurrentRuntimeException");
}
}
|
ConcurrentUtilsTest
|
java
|
apache__camel
|
core/camel-support/src/main/java/org/apache/camel/support/PropertyBindingSupport.java
|
{
"start": 86589,
"end": 88092
}
|
class ____ extends LinkedHashMap<String, Object> {
private final String optionPrefix;
private final Map<String, Object> originalMap;
public OptionPrefixMap(Map<String, Object> map, String optionPrefix) {
this.originalMap = map;
this.optionPrefix = optionPrefix;
// copy from original map into our map without the option prefix
map.forEach((k, v) -> {
if (startsWithIgnoreCase(k, optionPrefix)) {
put(k.substring(optionPrefix.length()), v);
} else if (startsWithIgnoreCase(k, "?" + optionPrefix)) {
put(k.substring(optionPrefix.length() + 1), v);
}
});
}
@Override
public Object remove(Object key) {
// we only need to care about the remove method,
// so we can remove the corresponding key from the original map
Set<String> toBeRemoved = new HashSet<>();
originalMap.forEach((k, v) -> {
if (startsWithIgnoreCase(k, optionPrefix)) {
toBeRemoved.add(k);
} else if (startsWithIgnoreCase(k, "?" + optionPrefix)) {
toBeRemoved.add(k);
}
});
toBeRemoved.forEach(originalMap::remove);
return super.remove(key);
}
}
/**
* Used for flatten properties when they are a map of maps
*/
private static
|
OptionPrefixMap
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/http/converter/json/Jackson2ObjectMapperBuilderTests.java
|
{
"start": 4427,
"end": 4607
}
|
class ____ {@link Jackson2ObjectMapperBuilder}.
*
* @author Sebastien Deleuze
* @author Eddú Meléndez
* @author Hyoungjune Kim
*/
@SuppressWarnings({"deprecation", "removal"})
|
for
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/function/FailableDoubleSupplier.java
|
{
"start": 912,
"end": 1088
}
|
interface ____ {@link DoubleSupplier} that declares a {@link Throwable}.
*
* @param <E> The kind of thrown exception or error.
* @since 3.11
*/
@FunctionalInterface
public
|
like
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/MockitoBeanWithInterfacePresentTwiceTests.java
|
{
"start": 1390,
"end": 1740
}
|
interface ____ <em>present</em> twice in the hierarchy.
*
* @author Sam Brannen
* @since 6.2.7
* @see MockitoBeanNestedAndTypeHierarchiesWithEnclosingClassPresentTwiceTests
* @see MockitoBeanNestedAndTypeHierarchiesWithSuperclassPresentTwiceTests
* @see <a href="https://github.com/spring-projects/spring-framework/issues/34844">gh-34844</a>
*/
|
is
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/authentication/OAuth2AuthenticationTokenTests.java
|
{
"start": 1385,
"end": 4207
}
|
class ____ {
private OAuth2User principal;
private Collection<? extends GrantedAuthority> authorities;
private String authorizedClientRegistrationId;
@BeforeEach
public void setUp() {
this.principal = mock(OAuth2User.class);
this.authorities = Collections.emptyList();
this.authorizedClientRegistrationId = "client-registration-1";
}
@Test
public void constructorWhenPrincipalIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(
() -> new OAuth2AuthenticationToken(null, this.authorities, this.authorizedClientRegistrationId));
}
@Test
public void constructorWhenAuthoritiesIsNullThenCreated() {
new OAuth2AuthenticationToken(this.principal, null, this.authorizedClientRegistrationId);
}
@Test
public void constructorWhenAuthoritiesIsEmptyThenCreated() {
new OAuth2AuthenticationToken(this.principal, Collections.emptyList(), this.authorizedClientRegistrationId);
}
@Test
public void constructorWhenAuthorizedClientRegistrationIdIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new OAuth2AuthenticationToken(this.principal, this.authorities, null));
}
@Test
public void constructorWhenAllParametersProvidedAndValidThenCreated() {
OAuth2AuthenticationToken authentication = new OAuth2AuthenticationToken(this.principal, this.authorities,
this.authorizedClientRegistrationId);
assertThat(authentication.getPrincipal()).isEqualTo(this.principal);
assertThat(authentication.getCredentials()).isEqualTo("");
assertThat(authentication.getAuthorities()).isEqualTo(this.authorities);
assertThat(authentication.getAuthorizedClientRegistrationId()).isEqualTo(this.authorizedClientRegistrationId);
assertThat(authentication.isAuthenticated()).isEqualTo(true);
}
@Test
public void toBuilderWhenApplyThenCopies() {
OAuth2AuthenticationToken factorOne = new OAuth2AuthenticationToken(TestOAuth2Users.create(),
AuthorityUtils.createAuthorityList("FACTOR_ONE"), "alice");
OAuth2AuthenticationToken factorTwo = new OAuth2AuthenticationToken(TestOAuth2Users.create(),
AuthorityUtils.createAuthorityList("FACTOR_TWO"), "bob");
OAuth2AuthenticationToken result = factorOne.toBuilder()
.authorities((a) -> a.addAll(factorTwo.getAuthorities()))
.principal(factorTwo.getPrincipal())
.authorizedClientRegistrationId(factorTwo.getAuthorizedClientRegistrationId())
.build();
Set<String> authorities = AuthorityUtils.authorityListToSet(result.getAuthorities());
assertThat(result.getPrincipal()).isSameAs(factorTwo.getPrincipal());
assertThat(result.getAuthorizedClientRegistrationId()).isSameAs(factorTwo.getAuthorizedClientRegistrationId());
assertThat(authorities).containsExactlyInAnyOrder("FACTOR_ONE", "FACTOR_TWO");
}
}
|
OAuth2AuthenticationTokenTests
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/placement/QueueMapping.java
|
{
"start": 1187,
"end": 1238
}
|
class ____ QueueMapping.
*
*/
public static
|
for
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/TypeConverterAllowNullTest.java
|
{
"start": 1017,
"end": 1616
}
|
class ____ {
@Test
public void testMissThenAddTypeConverter() {
DefaultCamelContext context = new DefaultCamelContext();
context.getTypeConverterRegistry().addTypeConverter(MyOrder.class, String.class, new MyOrderTypeConverter());
MyOrder order = context.getTypeConverter().convertTo(MyOrder.class, "0");
assertNull(order);
// this time it should work
order = context.getTypeConverter().convertTo(MyOrder.class, "123");
assertNotNull(order);
assertEquals(123, order.getId());
}
private static
|
TypeConverterAllowNullTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-data-r2dbc-test/src/main/java/org/springframework/boot/data/r2dbc/test/autoconfigure/DataR2dbcTypeExcludeFilter.java
|
{
"start": 968,
"end": 1151
}
|
class ____ extends StandardAnnotationCustomizableTypeExcludeFilter<DataR2dbcTest> {
DataR2dbcTypeExcludeFilter(Class<?> testClass) {
super(testClass);
}
}
|
DataR2dbcTypeExcludeFilter
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/support/AutowireCandidateResolver.java
|
{
"start": 4579,
"end": 4925
}
|
class ____ lazy resolution of the dependency target,
* if demanded by the injection point.
* <p>The default implementation simply returns {@code null}.
* @param descriptor the descriptor for the target method parameter or field
* @param beanName the name of the bean that contains the injection point
* @return the lazy resolution proxy
|
for
|
java
|
spring-projects__spring-boot
|
module/spring-boot-hazelcast/src/main/java/org/springframework/boot/hazelcast/autoconfigure/HazelcastServerConfiguration.java
|
{
"start": 4468,
"end": 4930
}
|
class ____ {
@Bean
@Order(0)
HazelcastConfigCustomizer loggingHazelcastConfigCustomizer() {
return (config) -> {
if (!config.getProperties().containsKey(HAZELCAST_LOGGING_TYPE)) {
config.setProperty(HAZELCAST_LOGGING_TYPE, "slf4j");
}
};
}
}
/**
* {@link HazelcastConfigResourceCondition} that checks if the
* {@code spring.hazelcast.config} configuration key is defined.
*/
static
|
HazelcastLoggingConfigCustomizerConfiguration
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/producer/internals/ProducerBatch.java
|
{
"start": 20203,
"end": 25178
}
|
class ____ {
final Callback callback;
final FutureRecordMetadata future;
Thunk(Callback callback, FutureRecordMetadata future) {
this.callback = callback;
this.future = future;
}
}
@Override
public String toString() {
return "ProducerBatch(topicPartition=" + topicPartition + ", recordCount=" + recordCount + ")";
}
boolean hasReachedDeliveryTimeout(long deliveryTimeoutMs, long now) {
return deliveryTimeoutMs <= now - this.createdMs;
}
public FinalState finalState() {
return this.finalState.get();
}
int attempts() {
return attempts.get();
}
void reenqueued(long now) {
attempts.getAndIncrement();
lastAttemptMs = Math.max(lastAppendTime, now);
lastAppendTime = Math.max(lastAppendTime, now);
retry = true;
}
long queueTimeMs() {
return drainedMs - createdMs;
}
long waitedTimeMs(long nowMs) {
return Math.max(0, nowMs - lastAttemptMs);
}
void drained(long nowMs) {
this.drainedMs = Math.max(drainedMs, nowMs);
}
boolean isSplitBatch() {
return isSplitBatch;
}
/**
* Returns if the batch is been retried for sending to kafka
*/
public boolean inRetry() {
return this.retry;
}
public MemoryRecords records() {
return recordsBuilder.build();
}
public int estimatedSizeInBytes() {
return recordsBuilder.estimatedSizeInBytes();
}
public double compressionRatio() {
return recordsBuilder.compressionRatio();
}
public boolean isFull() {
return recordsBuilder.isFull();
}
public void setProducerState(ProducerIdAndEpoch producerIdAndEpoch, int baseSequence, boolean isTransactional) {
recordsBuilder.setProducerState(producerIdAndEpoch.producerId, producerIdAndEpoch.epoch, baseSequence, isTransactional);
}
public void resetProducerState(ProducerIdAndEpoch producerIdAndEpoch, int baseSequence) {
log.info("Resetting sequence number of batch with current sequence {} for partition {} to {}",
this.baseSequence(), this.topicPartition, baseSequence);
reopened = true;
recordsBuilder.reopenAndRewriteProducerState(producerIdAndEpoch.producerId, producerIdAndEpoch.epoch, baseSequence, isTransactional());
}
/**
* Release resources required for record appends (e.g. compression buffers). Once this method is called, it's only
* possible to update the RecordBatch header.
*/
public void closeForRecordAppends() {
recordsBuilder.closeForRecordAppends();
}
public void close() {
recordsBuilder.close();
if (!recordsBuilder.isControlBatch()) {
CompressionRatioEstimator.updateEstimation(topicPartition.topic(),
recordsBuilder.compression().type(),
(float) recordsBuilder.compressionRatio());
}
reopened = false;
}
/**
* Abort the record builder and reset the state of the underlying buffer. This is used prior to aborting
* the batch with {@link #abort(RuntimeException)} and ensures that no record previously appended can be
* read. This is used in scenarios where we want to ensure a batch ultimately gets aborted, but in which
* it is not safe to invoke the completion callbacks (e.g. because we are holding a lock, such as
* when aborting batches in {@link RecordAccumulator}).
*/
public void abortRecordAppends() {
recordsBuilder.abort();
}
public boolean isClosed() {
return recordsBuilder.isClosed();
}
public ByteBuffer buffer() {
return recordsBuilder.buffer();
}
public int initialCapacity() {
return recordsBuilder.initialCapacity();
}
public boolean isWritable() {
return !recordsBuilder.isClosed();
}
public byte magic() {
return recordsBuilder.magic();
}
public long producerId() {
return recordsBuilder.producerId();
}
public short producerEpoch() {
return recordsBuilder.producerEpoch();
}
public int baseSequence() {
return recordsBuilder.baseSequence();
}
public int lastSequence() {
return recordsBuilder.baseSequence() + recordsBuilder.numRecords() - 1;
}
public boolean hasSequence() {
return baseSequence() != RecordBatch.NO_SEQUENCE;
}
public boolean isTransactional() {
return recordsBuilder.isTransactional();
}
public boolean sequenceHasBeenReset() {
return reopened;
}
// VisibleForTesting
OptionalInt currentLeaderEpoch() {
return currentLeaderEpoch;
}
// VisibleForTesting
int attemptsWhenLeaderLastChanged() {
return attemptsWhenLeaderLastChanged;
}
}
|
Thunk
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationRequestInfo.java
|
{
"start": 1145,
"end": 1285
}
|
class ____ a reservation request.
*/
@XmlRootElement(name = "reservation-definition")
@XmlAccessorType(XmlAccessType.FIELD)
public
|
representing
|
java
|
apache__rocketmq
|
client/src/main/java/org/apache/rocketmq/client/consumer/AckResult.java
|
{
"start": 856,
"end": 1549
}
|
class ____ {
private AckStatus status;
private String extraInfo;
private long popTime;
public void setPopTime(long popTime) {
this.popTime = popTime;
}
public long getPopTime() {
return popTime;
}
public AckStatus getStatus() {
return status;
}
public void setStatus(AckStatus status) {
this.status = status;
}
public void setExtraInfo(String extraInfo) {
this.extraInfo = extraInfo;
}
public String getExtraInfo() {
return extraInfo;
}
@Override
public String toString() {
return "AckResult [AckStatus=" + status + ",extraInfo=" + extraInfo + "]";
}
}
|
AckResult
|
java
|
micronaut-projects__micronaut-core
|
inject/src/main/java/io/micronaut/context/event/ApplicationEventListener.java
|
{
"start": 728,
"end": 913
}
|
interface ____ receivers of application events.
*
* @param <E> An event
* @author Graeme Rocher
* @since 1.0
*/
@Indexed(ApplicationEventListener.class)
@FunctionalInterface
public
|
for
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
|
{
"start": 1760,
"end": 1902
}
|
class ____ {
private static RpcController NULL_CONTROLLER = null;
private static final int PRIME = 16777619;
private static
|
RpcClientUtil
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/jmx/export/assembler/AbstractMBeanInfoAssembler.java
|
{
"start": 7840,
"end": 8597
}
|
interface ____ the managed resource.
* <p>Default implementation returns an empty array of {@code ModelMBeanNotificationInfo}.
* @param managedBean the bean instance (might be an AOP proxy)
* @param beanKey the key associated with the MBean in the beans map
* of the {@code MBeanExporter}
* @return the notification metadata
* @throws JMException in case of errors
*/
protected ModelMBeanNotificationInfo[] getNotificationInfo(Object managedBean, String beanKey)
throws JMException {
return new ModelMBeanNotificationInfo[0];
}
/**
* Get the attribute metadata for the MBean resource. Subclasses should implement
* this method to return the appropriate metadata for all the attributes that should
* be exposed in the management
|
for
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/QlIllegalArgumentException.java
|
{
"start": 478,
"end": 1271
}
|
class ____ extends QlServerException {
public QlIllegalArgumentException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
public QlIllegalArgumentException(String message, Throwable cause) {
super(message, cause);
}
public QlIllegalArgumentException(String message, Object... args) {
super(message, args);
}
public QlIllegalArgumentException(Throwable cause, String message, Object... args) {
super(cause, message, args);
}
public QlIllegalArgumentException(String message) {
super(message);
}
public QlIllegalArgumentException(Throwable cause) {
super(cause);
}
}
|
QlIllegalArgumentException
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Absent.java
|
{
"start": 1817,
"end": 5186
}
|
class ____ extends AggregateFunction implements SurrogateExpression {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Absent", Absent::new);
@FunctionInfo(
returnType = "boolean",
description = "Returns true if the input expression yields no non-null values within the current aggregation context. "
+ "Otherwise it returns false.",
type = FunctionType.AGGREGATE,
appliesTo = { @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.GA, version = "9.2.0") },
examples = {
@Example(file = "absent", tag = "absent"),
@Example(
description = "To check for the absence inside a group use `ABSENT()` and `BY` clauses",
file = "absent",
tag = "absent-by"
),
@Example(
description = "To check for the absence and return 1 when it's true and 0 when it's false you can use to_integer()",
file = "absent",
tag = "absent-as-integer"
) }
)
public Absent(
Source source,
@Param(
name = "field",
type = {
"aggregate_metric_double",
"boolean",
"cartesian_point",
"cartesian_shape",
"date",
"date_nanos",
"double",
"geo_point",
"geo_shape",
"geohash",
"geotile",
"geohex",
"integer",
"ip",
"keyword",
"long",
"text",
"unsigned_long",
"version" },
description = "Expression that outputs values to be checked for absence."
) Expression field
) {
this(source, field, Literal.TRUE, NO_WINDOW);
}
public Absent(Source source, Expression field, Expression filter, Expression window) {
super(source, field, filter, window, emptyList());
}
private Absent(StreamInput in) throws IOException {
super(in);
}
@Override
public String getWriteableName() {
return ENTRY.name;
}
@Override
protected NodeInfo<Absent> info() {
return NodeInfo.create(this, Absent::new, field(), filter(), window());
}
@Override
public AggregateFunction withFilter(Expression filter) {
return new Absent(source(), field(), filter, window());
}
@Override
public Absent replaceChildren(List<Expression> newChildren) {
return new Absent(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2));
}
@Override
public DataType dataType() {
return DataType.BOOLEAN;
}
@Override
public Nullability nullable() {
return Nullability.FALSE;
}
@Override
protected TypeResolution resolveType() {
return isType(
field(),
dt -> dt.isCounter() == false && dt != DataType.DENSE_VECTOR,
sourceText(),
DEFAULT,
"any type except counter types or dense_vector"
);
}
@Override
public Expression surrogate() {
return new Not(source(), new Present(source(), field(), filter(), window()));
}
}
|
Absent
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/serde/ExecNodeVersionUpgradeSerdeTest.java
|
{
"start": 4373,
"end": 5543
}
|
class ____ extends ExecNodeBase<RowData> {
private static final String FIELD_NAME_NEW_ADDED = "newProperty";
/** DummyExecNode gets an additional property in Flink 1.18. */
private final Integer newProperty;
@JsonCreator
protected DummyExecNode(
@JsonProperty(FIELD_NAME_ID) int id,
@JsonProperty(FIELD_NAME_TYPE) ExecNodeContext context,
@JsonProperty(FIELD_NAME_CONFIGURATION) ReadableConfig persistedConfig,
@JsonProperty(FIELD_NAME_INPUT_PROPERTIES) List<InputProperty> inputProperties,
@JsonProperty(FIELD_NAME_OUTPUT_TYPE) LogicalType outputType,
@JsonProperty(FIELD_NAME_DESCRIPTION) String description,
@JsonProperty(FIELD_NAME_NEW_ADDED) Integer newProperty) {
super(id, context, persistedConfig, inputProperties, outputType, description);
this.newProperty = newProperty;
}
@Override
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
return null;
}
}
}
|
DummyExecNode
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/internal/util/beans/BeanIntrospectionException.java
|
{
"start": 329,
"end": 554
}
|
class ____ extends HibernateException {
public BeanIntrospectionException(String string, Throwable root) {
super( string, root );
}
public BeanIntrospectionException(String s) {
super( s );
}
}
|
BeanIntrospectionException
|
java
|
apache__camel
|
components/camel-bean-validator/src/test/java/org/apache/camel/component/bean/validator/CustomValidationProviderResolverTest.java
|
{
"start": 1431,
"end": 2513
}
|
class ____ extends CamelTestSupport {
// Routing fixtures
@BindToRegistry("myValidationProviderResolver")
ValidationProviderResolver validationProviderResolver = mock(ValidationProviderResolver.class);
@Override
protected void doPreSetup() throws Exception {
List<ValidationProvider<?>> validationProviders = asList(new HibernateValidator());
given(validationProviderResolver.getValidationProviders()).willReturn(validationProviders);
super.doPreSetup();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:test").to(
"bean-validator://ValidationProviderResolverTest?validationProviderResolver=#myValidationProviderResolver");
}
};
}
// Tests
@Test
void shouldResolveCustomValidationProviderResolver() {
verify(validationProviderResolver, atLeastOnce()).getValidationProviders();
}
}
|
CustomValidationProviderResolverTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java
|
{
"start": 12424,
"end": 28242
}
|
class ____ extends SubjectInheritingThread {
private boolean isInterrupted = false;
@Override
public void work() {
ApplicationSubmissionContext context =
mock(ApplicationSubmissionContext.class);
ApplicationId applicationId = ApplicationId.newInstance(
System.currentTimeMillis(), 1);
when(context.getApplicationId()).thenReturn(applicationId);
((MockYarnClient) client).setYarnApplicationState(
YarnApplicationState.NEW);
try {
client.submitApplication(context);
} catch (YarnException | IOException e) {
if (e instanceof YarnException && e.getCause() != null &&
e.getCause() instanceof InterruptedException) {
isInterrupted = true;
}
}
}
}
SubmitThread appSubmitThread = new SubmitThread();
appSubmitThread.start();
try {
// Wait for thread to start and begin to sleep
// (enter TIMED_WAITING state).
while (appSubmitThread.getState() != State.TIMED_WAITING) {
Thread.sleep(pollIntervalMs / 2);
}
// Interrupt the thread.
appSubmitThread.interrupt();
appSubmitThread.join();
} catch (InterruptedException e) {
}
assertTrue(appSubmitThread.isInterrupted,
"Expected an InterruptedException wrapped inside a YarnException");
}
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 30)
public void testSubmitIncorrectQueueToCapacityScheduler(SchedulerType type) throws IOException {
initTestYarnClient(type);
MiniYARNCluster cluster = new MiniYARNCluster("testMRAMTokens", 1, 1, 1);
YarnClient rmClient = null;
try {
YarnConfiguration conf = new YarnConfiguration();
conf.set(YarnConfiguration.RM_SCHEDULER,
CapacityScheduler.class.getName());
cluster.init(conf);
cluster.start();
final Configuration yarnConf = cluster.getConfig();
rmClient = YarnClient.createYarnClient();
rmClient.init(yarnConf);
rmClient.start();
YarnClientApplication newApp = rmClient.createApplication();
ApplicationId appId = newApp.getNewApplicationResponse().getApplicationId();
// Create launch context for app master
ApplicationSubmissionContext appContext
= Records.newRecord(ApplicationSubmissionContext.class);
// set the application id
appContext.setApplicationId(appId);
// set the application name
appContext.setApplicationName("test");
// Set the queue to which this application is to be submitted in the RM
appContext.setQueue("nonexist");
// Set up the container launch context for the application master
ContainerLaunchContext amContainer
= Records.newRecord(ContainerLaunchContext.class);
appContext.setAMContainerSpec(amContainer);
appContext.setResource(Resource.newInstance(1024, 1));
// appContext.setUnmanagedAM(unmanaged);
// Submit the application to the applications manager
rmClient.submitApplication(appContext);
fail("Job submission should have thrown an exception");
} catch (YarnException e) {
assertTrue(e.getMessage().contains("Failed to submit"));
} finally {
if (rmClient != null) {
rmClient.stop();
}
cluster.stop();
}
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 30)
public void testKillApplication(SchedulerType type) throws Exception {
initTestYarnClient(type);
MockRM rm = new MockRM();
rm.start();
RMApp app = MockRMAppSubmitter.submitWithMemory(2000, rm);
@SuppressWarnings("resource")
final YarnClient client = new MockYarnClient();
client.init(getConf());
client.start();
client.killApplication(app.getApplicationId());
verify(((MockYarnClient) client).getRMClient(), times(2))
.forceKillApplication(any(KillApplicationRequest.class));
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 30)
public void testApplicationType(SchedulerType type) throws Exception {
initTestYarnClient(type);
GenericTestUtils.setRootLogLevel(Level.DEBUG);
MockRM rm = new MockRM();
rm.start();
RMApp app = MockRMAppSubmitter.submitWithMemory(2000, rm);
RMApp app1 =
MockRMAppSubmitter.submit(rm,
MockRMAppSubmissionData.Builder.createWithMemory(200, rm)
.withAppName("name")
.withUser("user")
.withAcls(new HashMap<>())
.withUnmanagedAM(false)
.withQueue("default")
.withMaxAppAttempts(-1)
.withCredentials(null)
.withAppType("MAPREDUCE")
.build());
assertEquals("YARN", app.getApplicationType());
assertEquals("MAPREDUCE", app1.getApplicationType());
rm.stop();
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 30)
public void testApplicationTypeLimit(SchedulerType type) throws Exception {
initTestYarnClient(type);
GenericTestUtils.setRootLogLevel(Level.DEBUG);
MockRM rm = new MockRM();
rm.start();
RMApp app1 =
MockRMAppSubmitter.submit(rm,
MockRMAppSubmissionData.Builder.createWithMemory(200, rm)
.withAppName("name")
.withUser("user")
.withAcls(new HashMap<>())
.withUnmanagedAM(false)
.withQueue("default")
.withMaxAppAttempts(-1)
.withCredentials(null)
.withAppType("MAPREDUCE-LENGTH-IS-20")
.build());
assertEquals("MAPREDUCE-LENGTH-IS-", app1.getApplicationType());
rm.stop();
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 10)
public void testGetApplications(SchedulerType type) throws YarnException, IOException {
initTestYarnClient(type);
final YarnClient client = new MockYarnClient();
client.init(getConf());
client.start();
List<ApplicationReport> expectedReports = ((MockYarnClient)client).getReports();
List<ApplicationReport> reports = client.getApplications();
assertThat(reports).isEqualTo(expectedReports);
Set<String> appTypes = new HashSet<>();
appTypes.add("YARN");
appTypes.add("NON-YARN");
reports =
client.getApplications(appTypes, null);
assertThat(reports).hasSize(2);
assertTrue((reports.get(0).getApplicationType().equals("YARN") && reports
.get(1).getApplicationType().equals("NON-YARN"))
|| (reports.get(1).getApplicationType().equals("YARN") && reports
.get(0).getApplicationType().equals("NON-YARN")));
for(ApplicationReport report : reports) {
assertTrue(expectedReports.contains(report));
}
EnumSet<YarnApplicationState> appStates =
EnumSet.noneOf(YarnApplicationState.class);
appStates.add(YarnApplicationState.FINISHED);
appStates.add(YarnApplicationState.FAILED);
reports = client.getApplications(null, appStates);
assertThat(reports).hasSize(2);
assertTrue((reports.get(0).getApplicationType().equals("NON-YARN") && reports
.get(1).getApplicationType().equals("NON-MAPREDUCE"))
|| (reports.get(1).getApplicationType().equals("NON-YARN") && reports
.get(0).getApplicationType().equals("NON-MAPREDUCE")));
for (ApplicationReport report : reports) {
assertTrue(expectedReports.contains(report));
}
reports = client.getApplications(appTypes, appStates);
assertEquals(1, reports.size());
assertEquals("NON-YARN", reports.get(0).getApplicationType());
for (ApplicationReport report : reports) {
assertTrue(expectedReports.contains(report));
}
client.stop();
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 10)
public void testGetApplicationAttempts(SchedulerType type) throws YarnException, IOException {
initTestYarnClient(type);
final YarnClient client = new MockYarnClient();
client.init(getConf());
client.start();
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
List<ApplicationAttemptReport> reports = client
.getApplicationAttempts(applicationId);
assertNotNull(reports);
assertThat(reports.get(0).getApplicationAttemptId()).isEqualTo(
ApplicationAttemptId.newInstance(applicationId, 1));
assertThat(reports.get(1).getApplicationAttemptId()).isEqualTo(
ApplicationAttemptId.newInstance(applicationId, 2));
client.stop();
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 10)
public void testGetApplicationAttempt(SchedulerType type) throws YarnException, IOException {
initTestYarnClient(type);
Configuration conf = new Configuration();
final YarnClient client = new MockYarnClient();
client.init(conf);
client.start();
List<ApplicationReport> expectedReports = ((MockYarnClient) client)
.getReports();
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
applicationId, 1);
ApplicationAttemptReport report = client
.getApplicationAttemptReport(appAttemptId);
assertNotNull(report);
assertThat(report.getApplicationAttemptId().toString()).isEqualTo(
expectedReports.get(0).getCurrentApplicationAttemptId().toString());
client.stop();
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 10)
public void testGetContainers(SchedulerType type) throws YarnException, IOException {
initTestYarnClient(type);
Configuration conf = getConf();
conf.setBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED,
true);
final YarnClient client = new MockYarnClient();
client.init(conf);
client.start();
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
applicationId, 1);
List<ContainerReport> reports = client.getContainers(appAttemptId);
assertNotNull(reports);
assertThat(reports.get(0).getContainerId()).isEqualTo(
(ContainerId.newContainerId(appAttemptId, 1)));
assertThat(reports.get(1).getContainerId()).isEqualTo(
(ContainerId.newContainerId(appAttemptId, 2)));
assertThat(reports.get(2).getContainerId()).isEqualTo(
(ContainerId.newContainerId(appAttemptId, 3)));
//First2 containers should come from RM with updated state information and
// 3rd container is not there in RM and should
assertEquals(ContainerState.RUNNING,
(reports.get(0).getContainerState()));
assertEquals(ContainerState.RUNNING,
(reports.get(1).getContainerState()));
assertEquals(ContainerState.COMPLETE,
(reports.get(2).getContainerState()));
client.stop();
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 10)
public void testGetContainersOnAHSFail(SchedulerType type) throws YarnException, IOException {
initTestYarnClient(type);
Configuration conf = getConf();
conf.setBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED,
true);
final YarnClient client = new MockYarnClient() {
@Override
public List<ContainerReport> getContainers(
ApplicationAttemptId appAttemptId) throws YarnException,
IOException {
return getContainersOnAHSFail(appAttemptId);
}
};
client.init(conf);
client.start();
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
applicationId, 1);
List<ContainerReport> reports = client.getContainers(appAttemptId);
assertNotNull(reports);
assertTrue(reports.size() == 2);
assertThat(reports.get(0).getContainerId()).isEqualTo(
(ContainerId.newContainerId(appAttemptId, 1)));
assertThat(reports.get(1).getContainerId()).isEqualTo(
(ContainerId.newContainerId(appAttemptId, 2)));
//Only 2 running containers from RM are present when AHS throws exception
assertEquals(ContainerState.RUNNING,
(reports.get(0).getContainerState()));
assertEquals(ContainerState.RUNNING,
(reports.get(1).getContainerState()));
client.stop();
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 10)
public void testGetContainerReport(SchedulerType type) throws YarnException, IOException {
initTestYarnClient(type);
Configuration conf = getConf();
conf.setBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED,
true);
final YarnClient client = new MockYarnClient();
client.init(conf);
client.start();
List<ApplicationReport> expectedReports = ((MockYarnClient) client)
.getReports();
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
applicationId, 1);
ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
ContainerReport report = client.getContainerReport(containerId);
assertNotNull(report);
assertThat(report.getContainerId().toString()).isEqualTo(
(ContainerId.newContainerId(expectedReports.get(0)
.getCurrentApplicationAttemptId(), 1)).toString());
containerId = ContainerId.newContainerId(appAttemptId, 3);
report = client.getContainerReport(containerId);
assertNotNull(report);
assertThat(report.getContainerId().toString()).isEqualTo(
(ContainerId.newContainerId(expectedReports.get(0)
.getCurrentApplicationAttemptId(), 3)).toString());
assertNotNull(report.getExecutionType());
client.stop();
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 10)
public void testGetLabelsToNodes(SchedulerType type) throws YarnException, IOException {
initTestYarnClient(type);
final YarnClient client = new MockYarnClient();
client.init(getConf());
client.start();
// Get labels to nodes mapping
Map<String, Set<NodeId>> expectedLabelsToNodes =
((MockYarnClient)client).getLabelsToNodesMap();
Map<String, Set<NodeId>> labelsToNodes = client.getLabelsToNodes();
assertThat(labelsToNodes).isEqualTo(expectedLabelsToNodes);
assertThat(labelsToNodes).hasSize(3);
// Get labels to nodes for selected labels
Set<String> setLabels = new HashSet<>(Arrays.asList("x", "z"));
expectedLabelsToNodes =
((MockYarnClient)client).getLabelsToNodesMap(setLabels);
labelsToNodes = client.getLabelsToNodes(setLabels);
assertThat(labelsToNodes).isEqualTo(expectedLabelsToNodes);
assertThat(labelsToNodes).hasSize(2);
client.stop();
client.close();
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(value = 10)
public void testGetNodesToLabels(SchedulerType type) throws YarnException, IOException {
initTestYarnClient(type);
final YarnClient client = new MockYarnClient();
client.init(getConf());
client.start();
// Get labels to nodes mapping
Map<NodeId, Set<String>> expectedNodesToLabels = ((MockYarnClient) client)
.getNodeToLabelsMap();
Map<NodeId, Set<String>> nodesToLabels = client.getNodeToLabels();
assertThat(nodesToLabels).isEqualTo(expectedNodesToLabels);
assertThat(nodesToLabels).hasSize(1);
client.stop();
client.close();
}
private static
|
SubmitThread
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/criteria/CriteriaTest.java
|
{
"start": 2049,
"end": 14345
}
|
class ____ {
@BeforeEach
public void init(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Person person1 = new Person( "John Doe" );
person1.setNickName( "JD" );
person1.setAddress( "Earth" );
person1.setCreatedOn( LocalDateTime.of( 2000, 1, 1, 0, 0, 0 ) );
person1.getAddresses().put( AddressType.HOME, "Home address" );
person1.getAddresses().put( AddressType.OFFICE, "Office address" );
entityManager.persist( person1 );
Person person2 = new Person( "Mrs. John Doe" );
person2.setAddress( "Earth" );
person2.setCreatedOn( LocalDateTime.of( 2000, 1, 2, 12, 0, 0 ) );
entityManager.persist( person2 );
Person person3 = new Person( "Dr_ John Doe" );
entityManager.persist( person3 );
Phone phone1 = new Phone( "123-456-7890" );
phone1.setId( 1L );
phone1.setType( PhoneType.MOBILE );
person1.addPhone( phone1 );
phone1.getRepairTimestamps().add( LocalDateTime.of( 2005, 1, 1, 12, 0, 0 ) );
phone1.getRepairTimestamps().add( LocalDateTime.of( 2006, 1, 1, 12, 0, 0 ) );
Call call11 = new Call();
call11.setDuration( 12 );
call11.setTimestamp( LocalDateTime.of( 2000, 1, 1, 0, 0, 0 ) );
Call call12 = new Call();
call12.setDuration( 33 );
call12.setTimestamp( LocalDateTime.of( 2000, 1, 1, 1, 0, 0 ) );
phone1.addCall( call11 );
phone1.addCall( call12 );
Phone phone2 = new Phone( "098_765-4321" );
phone2.setId( 2L );
phone2.setType( PhoneType.LAND_LINE );
Phone phone3 = new Phone( "098-765-4320" );
phone3.setId( 3L );
phone3.setType( PhoneType.LAND_LINE );
person2.addPhone( phone2 );
person2.addPhone( phone3 );
CreditCardPayment creditCardPayment = new CreditCardPayment();
creditCardPayment.setCompleted( true );
creditCardPayment.setAmount( BigDecimal.ZERO );
creditCardPayment.setPerson( person1 );
WireTransferPayment wireTransferPayment = new WireTransferPayment();
wireTransferPayment.setCompleted( true );
wireTransferPayment.setAmount( BigDecimal.valueOf( 100 ) );
wireTransferPayment.setPerson( person2 );
entityManager.persist( creditCardPayment );
entityManager.persist( wireTransferPayment );
Partner partner = new Partner( "John Doe" );
entityManager.persist( partner );
} );
}
@AfterEach
public void cleanup(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().unwrap( SessionFactory.class ).getSchemaManager().truncateMappedObjects();
}
@Test
public void test_criteria_typedquery_entity_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::criteria-typedquery-entity-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Person> criteria = builder.createQuery( Person.class );
Root<Person> root = criteria.from( Person.class );
criteria.select( root );
criteria.where( builder.equal( root.get( Person_.name ), "John Doe" ) );
List<Person> persons = entityManager.createQuery( criteria ).getResultList();
//end::criteria-typedquery-entity-example[]
assertThat( persons ).hasSize( 1 );
} );
}
@Test
public void test_criteria_typedquery_expression_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::criteria-typedquery-expression-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<String> criteria = builder.createQuery( String.class );
Root<Person> root = criteria.from( Person.class );
criteria.select( root.get( Person_.nickName ) );
criteria.where( builder.equal( root.get( Person_.name ), "John Doe" ) );
List<String> nickNames = entityManager.createQuery( criteria ).getResultList();
//end::criteria-typedquery-expression-example[]
assertThat( nickNames ).hasSize( 1 );
} );
}
@Test
public void test_criteria_typedquery_multiselect_explicit_array_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::criteria-typedquery-multiselect-array-explicit-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Object[]> criteria = builder.createQuery( Object[].class );
Root<Person> root = criteria.from( Person.class );
Path<Long> idPath = root.get( Person_.id );
Path<String> nickNamePath = root.get( Person_.nickName );
criteria.select( builder.array( idPath, nickNamePath ) );
criteria.where( builder.equal( root.get( Person_.name ), "John Doe" ) );
List<Object[]> idAndNickNames = entityManager.createQuery( criteria ).getResultList();
//end::criteria-typedquery-multiselect-array-explicit-example[]
assertThat( idAndNickNames ).hasSize( 1 );
} );
}
@Test
public void test_criteria_typedquery_multiselect_implicit_array_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::criteria-typedquery-multiselect-array-implicit-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Object[]> criteria = builder.createQuery( Object[].class );
Root<Person> root = criteria.from( Person.class );
Path<Long> idPath = root.get( Person_.id );
Path<String> nickNamePath = root.get( Person_.nickName );
criteria.multiselect( idPath, nickNamePath );
criteria.where( builder.equal( root.get( Person_.name ), "John Doe" ) );
List<Object[]> idAndNickNames = entityManager.createQuery( criteria ).getResultList();
//end::criteria-typedquery-multiselect-array-implicit-example[]
assertThat( idAndNickNames ).hasSize( 1 );
} );
}
@Test
public void test_criteria_typedquery_wrapper_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::criteria-typedquery-wrapper-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<PersonWrapper> criteria = builder.createQuery( PersonWrapper.class );
Root<Person> root = criteria.from( Person.class );
Path<Long> idPath = root.get( Person_.id );
Path<String> nickNamePath = root.get( Person_.nickName );
criteria.select( builder.construct( PersonWrapper.class, idPath, nickNamePath ) );
criteria.where( builder.equal( root.get( Person_.name ), "John Doe" ) );
List<PersonWrapper> wrappers = entityManager.createQuery( criteria ).getResultList();
//end::criteria-typedquery-wrapper-example[]
assertThat( wrappers ).hasSize( 1 );
} );
}
@Test
public void test_criteria_tuple_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::criteria-tuple-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Tuple> criteria = builder.createQuery( Tuple.class );
Root<Person> root = criteria.from( Person.class );
Path<Long> idPath = root.get( Person_.id );
Path<String> nickNamePath = root.get( Person_.nickName );
criteria.multiselect( idPath, nickNamePath );
criteria.where( builder.equal( root.get( Person_.name ), "John Doe" ) );
List<Tuple> tuples = entityManager.createQuery( criteria ).getResultList();
for ( Tuple tuple : tuples ) {
Long id = tuple.get( idPath );
String nickName = tuple.get( nickNamePath );
}
//or using indices
for ( Tuple tuple : tuples ) {
Long id = (Long) tuple.get( 0 );
String nickName = (String) tuple.get( 1 );
}
//end::criteria-tuple-example[]
assertThat( tuples ).hasSize( 1 );
} );
}
@Test
public void test_criteria_from_root_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::criteria-from-root-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Person> criteria = builder.createQuery( Person.class );
Root<Person> root = criteria.from( Person.class );
//end::criteria-from-root-example[]
} );
}
@Test
public void test_criteria_from_multiple_root_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
String address = "Earth";
String prefix = "J%";
//tag::criteria-from-multiple-root-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Tuple> criteria = builder.createQuery( Tuple.class );
Root<Person> personRoot = criteria.from( Person.class );
Root<Partner> partnerRoot = criteria.from( Partner.class );
criteria.multiselect( personRoot, partnerRoot );
Predicate personRestriction = builder.and(
builder.equal( personRoot.get( Person_.address ), address ),
builder.isNotEmpty( personRoot.get( Person_.phones ) )
);
Predicate partnerRestriction = builder.and(
builder.like( partnerRoot.get( Partner_.name ), prefix ),
builder.equal( partnerRoot.get( Partner_.version ), 0 )
);
criteria.where( builder.and( personRestriction, partnerRestriction ) );
List<Tuple> tuples = entityManager.createQuery( criteria ).getResultList();
//end::criteria-from-multiple-root-example[]
assertThat( tuples ).hasSize( 2 );
} );
}
@Test
public void test_criteria_from_join_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::criteria-from-join-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Phone> criteria = builder.createQuery( Phone.class );
Root<Phone> root = criteria.from( Phone.class );
// Phone.person is a @ManyToOne
Join<Phone, Person> personJoin = root.join( Phone_.person );
// Person.addresses is an @ElementCollection
Join<Person, String> addressesJoin = personJoin.join( Person_.addresses );
criteria.where( builder.isNotEmpty( root.get( Phone_.calls ) ) );
List<Phone> phones = entityManager.createQuery( criteria ).getResultList();
//end::criteria-from-join-example[]
assertThat( phones ).hasSize( 1 );
} );
}
@Test
public void test_criteria_from_fetch_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::criteria-from-fetch-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Phone> criteria = builder.createQuery( Phone.class );
Root<Phone> root = criteria.from( Phone.class );
// Phone.person is a @ManyToOne
Fetch<Phone, Person> personFetch = root.fetch( Phone_.person );
// Person.addresses is an @ElementCollection
Fetch<Person, String> addressesJoin = personFetch.fetch( Person_.addresses );
criteria.where( builder.isNotEmpty( root.get( Phone_.calls ) ) );
List<Phone> phones = entityManager.createQuery( criteria ).getResultList();
//end::criteria-from-fetch-example[]
assertThat( phones ).hasSize( 1 );
} );
}
@Test
public void test_criteria_param_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::criteria-param-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Person> criteria = builder.createQuery( Person.class );
Root<Person> root = criteria.from( Person.class );
ParameterExpression<String> nickNameParameter = builder.parameter( String.class );
criteria.where( builder.equal( root.get( Person_.nickName ), nickNameParameter ) );
TypedQuery<Person> query = entityManager.createQuery( criteria );
query.setParameter( nickNameParameter, "JD" );
List<Person> persons = query.getResultList();
//end::criteria-param-example[]
assertThat( persons ).hasSize( 1 );
} );
}
@Test
public void test_criteria_group_by_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::criteria-group-by-example[]
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Tuple> criteria = builder.createQuery( Tuple.class );
Root<Person> root = criteria.from( Person.class );
criteria.groupBy( root.get( "address" ) );
criteria.multiselect( root.get( "address" ), builder.count( root ) );
List<Tuple> tuples = entityManager.createQuery( criteria ).getResultList();
for ( Tuple tuple : tuples ) {
String name = (String) tuple.get( 0 );
Long count = (Long) tuple.get( 1 );
}
//end::criteria-group-by-example[]
assertThat( tuples ).hasSize( 2 );
} );
}
@Entity(name = "ApplicationEvent")
public static
|
CriteriaTest
|
java
|
apache__spark
|
examples/src/main/java/org/apache/spark/examples/ml/JavaVectorSizeHintExample.java
|
{
"start": 1399,
"end": 3047
}
|
class ____ {
public static void main(String[] args) {
SparkSession spark = SparkSession
.builder()
.appName("JavaVectorSizeHintExample")
.getOrCreate();
// $example on$
StructType schema = createStructType(new StructField[]{
createStructField("id", IntegerType, false),
createStructField("hour", IntegerType, false),
createStructField("mobile", DoubleType, false),
createStructField("userFeatures", new VectorUDT(), false),
createStructField("clicked", DoubleType, false)
});
Row row0 = RowFactory.create(0, 18, 1.0, Vectors.dense(0.0, 10.0, 0.5), 1.0);
Row row1 = RowFactory.create(0, 18, 1.0, Vectors.dense(0.0, 10.0), 0.0);
Dataset<Row> dataset = spark.createDataFrame(Arrays.asList(row0, row1), schema);
VectorSizeHint sizeHint = new VectorSizeHint()
.setInputCol("userFeatures")
.setHandleInvalid("skip")
.setSize(3);
Dataset<Row> datasetWithSize = sizeHint.transform(dataset);
System.out.println("Rows where 'userFeatures' is not the right size are filtered out");
datasetWithSize.show(false);
VectorAssembler assembler = new VectorAssembler()
.setInputCols(new String[]{"hour", "mobile", "userFeatures"})
.setOutputCol("features");
// This dataframe can be used by downstream transformers as before
Dataset<Row> output = assembler.transform(datasetWithSize);
System.out.println("Assembled columns 'hour', 'mobile', 'userFeatures' to vector column " +
"'features'");
output.select("features", "clicked").show(false);
// $example off$
spark.stop();
}
}
|
JavaVectorSizeHintExample
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java
|
{
"start": 10362,
"end": 12891
}
|
class ____ implements Releasable {
private final CategorizationAnalyzer analyzer;
CategorizeEvaluator(CategorizationAnalyzer analyzer) {
this.analyzer = analyzer;
}
Block eval(BytesRefBlock vBlock) {
BytesRefVector vVector = vBlock.asVector();
if (vVector == null) {
return eval(vBlock.getPositionCount(), vBlock);
}
IntVector vector = eval(vBlock.getPositionCount(), vVector);
return vector.asBlock();
}
IntBlock eval(int positionCount, BytesRefBlock vBlock) {
try (IntBlock.Builder result = blockFactory.newIntBlockBuilder(positionCount)) {
BytesRef vScratch = new BytesRef();
for (int p = 0; p < positionCount; p++) {
if (vBlock.isNull(p)) {
seenNull = true;
result.appendInt(NULL_ORD);
continue;
}
int first = vBlock.getFirstValueIndex(p);
int count = vBlock.getValueCount(p);
if (count == 1) {
result.appendInt(process(vBlock.getBytesRef(first, vScratch)));
continue;
}
int end = first + count;
result.beginPositionEntry();
for (int i = first; i < end; i++) {
result.appendInt(process(vBlock.getBytesRef(i, vScratch)));
}
result.endPositionEntry();
}
return result.build();
}
}
IntVector eval(int positionCount, BytesRefVector vVector) {
try (IntVector.FixedBuilder result = blockFactory.newIntVectorFixedBuilder(positionCount)) {
BytesRef vScratch = new BytesRef();
for (int p = 0; p < positionCount; p++) {
result.appendInt(p, process(vVector.getBytesRef(p, vScratch)));
}
return result.build();
}
}
int process(BytesRef v) {
var category = categorizer.computeCategory(v.utf8ToString(), analyzer);
if (category == null) {
seenNull = true;
return NULL_ORD;
}
return category.getId() + 1;
}
@Override
public void close() {
analyzer.close();
}
}
}
|
CategorizeEvaluator
|
java
|
apache__flink
|
flink-metrics/flink-metrics-otel/src/test/java/org/apache/flink/traces/otel/OpenTelemetryTraceReporterITCase.java
|
{
"start": 1749,
"end": 11808
}
|
class ____ extends OpenTelemetryTestBase {
private OpenTelemetryTraceReporter reporter;
@BeforeEach
public void setUp() {
reporter = new OpenTelemetryTraceReporter();
}
@AfterEach
public void tearDownEach() {
reporter.close();
}
@Test
public void testReportSpan() throws Exception {
MetricConfig metricConfig = createMetricConfig();
String scope = this.getClass().getCanonicalName();
String attribute1KeyRoot = "foo";
String attribute1ValueRoot = "bar";
String attribute2KeyRoot = "<variable>";
String attribute2ValueRoot = "value";
String name = "root";
Duration startTs = Duration.ofMillis(42);
Duration endTs = Duration.ofMillis(64);
reporter.open(metricConfig);
try {
SpanBuilder span =
Span.builder(this.getClass(), name)
.setAttribute(attribute1KeyRoot, attribute1ValueRoot)
.setAttribute(attribute2KeyRoot, attribute2ValueRoot)
.setStartTsMillis(startTs.toMillis())
.setEndTsMillis(endTs.toMillis());
reporter.notifyOfAddedSpan(span.build());
} finally {
reporter.close();
}
eventuallyConsumeJson(
(json) -> {
JsonNode scopeSpans = json.findPath("resourceSpans").findPath("scopeSpans");
assertThat(scopeSpans.findPath("scope").findPath("name").asText())
.isEqualTo(scope);
JsonNode spans = scopeSpans.findPath("spans");
assertThat(spans.findPath("name").asText()).isEqualTo(name);
assertThat(spans.findPath("startTimeUnixNano").asText())
.isEqualTo(Long.toString(startTs.toNanos()));
assertThat(spans.findPath("endTimeUnixNano").asText())
.isEqualTo(Long.toString(endTs.toNanos()));
assertThat(spans.findPath("name").asText()).isEqualTo(name);
JsonNode attributes = spans.findPath("attributes");
List<String> attributeKeys =
attributes.findValues("key").stream()
.map(JsonNode::asText)
.collect(Collectors.toList());
assertThat(attributeKeys)
.contains(
attribute1KeyRoot,
VariableNameUtil.getVariableName(attribute2KeyRoot));
attributes.forEach(
attribute -> {
if (attribute.get("key").asText().equals(attribute1KeyRoot)) {
assertThat(attribute.at("/value/stringValue").asText())
.isEqualTo(attribute1ValueRoot);
} else if (attribute
.get("key")
.asText()
.equals(
VariableNameUtil.getVariableName(
attribute2KeyRoot))) {
assertThat(attribute.at("/value/stringValue").asText())
.isEqualTo(attribute2ValueRoot);
}
});
});
}
@Test
public void testReportNestedSpan() throws Exception {
String scope = this.getClass().getCanonicalName();
String attribute1KeyRoot = "foo";
String attribute1ValueRoot = "bar";
String attribute2KeyRoot = "<variable>";
String attribute2ValueRoot = "value";
String spanRoot = "root";
String spanL1N1 = "1_1";
String attribute1KeyL1N1 = "foo_" + spanL1N1;
String attribute1ValueL1N1 = "bar_" + spanL1N1;
String spanL1N2 = "1_2";
String attribute1KeyL1N2 = "foo_" + spanL1N2;
String attribute1ValueL1N2 = "bar_" + spanL1N2;
String spanL2N1 = "2_1";
String attribute1KeyL2N1 = "foo_" + spanL2N1;
String attribute1ValueL2N1 = "bar_" + spanL2N1;
reporter.open(createMetricConfig());
try {
SpanBuilder childLeveL2N1 =
Span.builder(this.getClass(), spanL2N1)
.setAttribute(attribute1KeyL2N1, attribute1ValueL2N1)
.setStartTsMillis(44)
.setEndTsMillis(46);
SpanBuilder childL1N1 =
Span.builder(this.getClass(), spanL1N1)
.setAttribute(attribute1KeyL1N1, attribute1ValueL1N1)
.setStartTsMillis(43)
.setEndTsMillis(48)
.addChild(childLeveL2N1);
SpanBuilder childL1N2 =
Span.builder(this.getClass(), spanL1N2)
.setAttribute(attribute1KeyL1N2, attribute1ValueL1N2)
.setStartTsMillis(44)
.setEndTsMillis(46);
SpanBuilder rootSpan =
Span.builder(this.getClass(), spanRoot)
.setAttribute(attribute1KeyRoot, attribute1ValueRoot)
.setAttribute(attribute2KeyRoot, attribute2ValueRoot)
.setStartTsMillis(42)
.setEndTsMillis(64)
.addChildren(Arrays.asList(childL1N1, childL1N2));
reporter.notifyOfAddedSpan(rootSpan.build());
} finally {
reporter.close();
}
eventuallyConsumeJson(
(json) -> {
JsonNode scopeSpans = json.findPath("resourceSpans").findPath("scopeSpans");
assertThat(scopeSpans.findPath("scope").findPath("name").asText())
.isEqualTo(scope);
JsonNode spans = scopeSpans.findPath("spans");
Map<String, ActualSpan> actualSpanSummaries = convertToSummaries(spans);
assertThat(actualSpanSummaries.keySet())
.containsExactlyInAnyOrder(spanRoot, spanL1N1, spanL1N2, spanL2N1);
ActualSpan root = actualSpanSummaries.get(spanRoot);
ActualSpan l1n1 = actualSpanSummaries.get(spanL1N1);
ActualSpan l1n2 = actualSpanSummaries.get(spanL1N2);
ActualSpan l2n1 = actualSpanSummaries.get(spanL2N1);
assertThat(root.parentSpanId).isEmpty();
assertThat(root.attributes)
.containsEntry(attribute1KeyRoot, attribute1ValueRoot);
assertThat(root.attributes)
.containsEntry(
VariableNameUtil.getVariableName(attribute2KeyRoot),
attribute2ValueRoot);
assertThat(l1n1.attributes)
.containsEntry(attribute1KeyL1N1, attribute1ValueL1N1);
assertThat(l1n2.attributes)
.containsEntry(attribute1KeyL1N2, attribute1ValueL1N2);
assertThat(l2n1.attributes)
.containsEntry(attribute1KeyL2N1, attribute1ValueL2N1);
assertThat(root.traceId).isEqualTo(l1n1.traceId);
assertThat(root.traceId).isEqualTo(l1n2.traceId);
assertThat(root.traceId).isEqualTo(l2n1.traceId);
assertThat(root.spanId).isNotEmpty();
assertThat(root.spanId).isEqualTo(l1n1.parentSpanId);
assertThat(root.spanId).isEqualTo(l1n2.parentSpanId);
assertThat(root.children).containsExactlyInAnyOrder(l1n1, l1n2);
assertThat(l1n1.children).containsExactlyInAnyOrder(l2n1);
assertThat(l1n2.children).isEmpty();
assertThat(l2n1.children).isEmpty();
});
}
private Map<String, ActualSpan> convertToSummaries(JsonNode spans) {
Map<String, ActualSpan> spanIdToSpan = new HashMap<>();
for (int i = 0; spans.get(i) != null; i++) {
ActualSpan actualSpan = convertToActualSpan(spans.get(i));
spanIdToSpan.put(actualSpan.spanId, actualSpan);
}
Map<String, ActualSpan> nameToSpan = new HashMap<>();
spanIdToSpan.forEach(
(spanId, actualSpan) -> {
if (!actualSpan.parentSpanId.isEmpty()) {
ActualSpan parentSpan = spanIdToSpan.get(actualSpan.parentSpanId);
parentSpan.addChild(actualSpan);
}
nameToSpan.put(actualSpan.name, actualSpan);
});
return nameToSpan;
}
private ActualSpan convertToActualSpan(JsonNode span) {
String name = span.findPath("name").asText();
String traceId = span.findPath("traceId").asText();
String spanId = span.findPath("spanId").asText();
String parentSpanId = span.findPath("parentSpanId").asText();
Map<String, String> attributeMap = new HashMap<>();
JsonNode attributes = span.findPath("attributes");
for (int j = 0; attributes.get(j) != null; j++) {
JsonNode attribute = attributes.get(j);
String key = attribute.get("key").asText();
String value = attribute.at("/value/stringValue").asText();
attributeMap.put(key, value);
}
return new ActualSpan(traceId, spanId, name, parentSpanId, attributeMap);
}
private static
|
OpenTelemetryTraceReporterITCase
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/filter/reactive/HiddenHttpMethodFilterTests.java
|
{
"start": 1347,
"end": 3161
}
|
class ____ {
private final HiddenHttpMethodFilter filter = new HiddenHttpMethodFilter();
private final TestWebFilterChain filterChain = new TestWebFilterChain();
@Test
void filterWithParameter() {
postForm("_method=DELETE").block(Duration.ZERO);
assertThat(this.filterChain.getHttpMethod()).isEqualTo(HttpMethod.DELETE);
}
@Test
void filterWithParameterMethodNotAllowed() {
postForm("_method=TRACE").block(Duration.ZERO);
assertThat(this.filterChain.getHttpMethod()).isEqualTo(HttpMethod.POST);
}
@Test
void filterWithNoParameter() {
postForm("").block(Duration.ZERO);
assertThat(this.filterChain.getHttpMethod()).isEqualTo(HttpMethod.POST);
}
@Test
void filterWithEmptyStringParameter() {
postForm("_method=").block(Duration.ZERO);
assertThat(this.filterChain.getHttpMethod()).isEqualTo(HttpMethod.POST);
}
@Test
void filterWithDifferentMethodParam() {
this.filter.setMethodParamName("_foo");
postForm("_foo=DELETE").block(Duration.ZERO);
assertThat(this.filterChain.getHttpMethod()).isEqualTo(HttpMethod.DELETE);
}
@Test
void filterWithHttpPut() {
ServerWebExchange exchange = MockServerWebExchange.from(
MockServerHttpRequest.put("/")
.header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_FORM_URLENCODED_VALUE)
.body("_method=DELETE"));
this.filter.filter(exchange, this.filterChain).block(Duration.ZERO);
assertThat(this.filterChain.getHttpMethod()).isEqualTo(HttpMethod.PUT);
}
private Mono<Void> postForm(String body) {
MockServerWebExchange exchange = MockServerWebExchange.from(
MockServerHttpRequest.post("/")
.header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_FORM_URLENCODED_VALUE)
.body(body));
return this.filter.filter(exchange, this.filterChain);
}
private static
|
HiddenHttpMethodFilterTests
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessarilyFullyQualifiedTest.java
|
{
"start": 7583,
"end": 7799
}
|
interface ____ {}
}
""")
.expectUnchanged()
.addInputLines(
"Test.java",
"""
import org.immutables.value.Value.Immutable;
|
Immutable
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionTestCase.java
|
{
"start": 848,
"end": 1649
}
|
class ____<
Request extends AbstractBulkByScrollRequest<Request>,
Response extends BulkByScrollResponse> extends ESTestCase {
protected ThreadPool threadPool;
protected BulkByScrollTask task;
@Before
public void setupForTest() {
threadPool = new TestThreadPool(getTestName());
task = new BulkByScrollTask(1, "test", "test", "test", TaskId.EMPTY_TASK_ID, Collections.emptyMap());
task.setWorker(Float.POSITIVE_INFINITY, null);
}
@After
@Override
public void tearDown() throws Exception {
super.tearDown();
threadPool.shutdown();
}
protected abstract Request request();
protected PlainActionFuture<Response> listener() {
return new PlainActionFuture<>();
}
}
|
AbstractAsyncBulkByScrollActionTestCase
|
java
|
quarkusio__quarkus
|
core/runtime/src/main/java/io/quarkus/runtime/StartupContext.java
|
{
"start": 256,
"end": 3396
}
|
class ____ implements Closeable {
public static final String RAW_COMMAND_LINE_ARGS = StartupContext.class.getName() + ".raw-command-line-args";
private static final Logger LOG = Logger.getLogger(StartupContext.class);
// Holds values for returned proxies
// These values are usually returned from recorder methods but can be also set explicitly
// For example, the raw command line args and ShutdownContext are set when the StartupContext is created
private final Map<String, Object> values = new HashMap<>();
private final Deque<Runnable> shutdownTasks = new ConcurrentLinkedDeque<>();
private final Deque<Runnable> lastShutdownTasks = new ConcurrentLinkedDeque<>();
private String[] commandLineArgs;
private String currentBuildStepName;
public StartupContext() {
ShutdownContext shutdownContext = new ShutdownContext() {
@Override
public void addShutdownTask(Runnable runnable) {
if (runnable != null) {
shutdownTasks.addFirst(runnable);
} else {
throw new IllegalArgumentException("Extension passed an invalid shutdown handler");
}
}
@Override
public void addLastShutdownTask(Runnable runnable) {
if (runnable != null) {
lastShutdownTasks.addFirst(runnable);
} else {
throw new IllegalArgumentException("Extension passed an invalid last shutdown handler");
}
}
};
values.put(ShutdownContext.class.getName(), shutdownContext);
values.put(RAW_COMMAND_LINE_ARGS, new Supplier<String[]>() {
@Override
public String[] get() {
if (commandLineArgs == null) {
throw new RuntimeException("Command line arguments not available during static init");
}
return commandLineArgs;
}
});
}
public void putValue(String name, Object value) {
values.put(name, value);
}
public Object getValue(String name) {
return values.get(name);
}
@Override
public void close() {
runAllAndClear(shutdownTasks);
runAllAndClear(lastShutdownTasks);
values.clear();
}
private void runAllAndClear(Deque<Runnable> tasks) {
while (!tasks.isEmpty()) {
try {
var runnable = tasks.remove();
runnable.run();
} catch (Throwable ex) {
LOG.error("Running a shutdown task failed", ex);
}
}
}
@SuppressWarnings("unused")
public void setCommandLineArguments(String[] commandLineArguments) {
this.commandLineArgs = commandLineArguments;
}
@SuppressWarnings("unused")
public String getCurrentBuildStepName() {
return currentBuildStepName;
}
@SuppressWarnings("unused")
public void setCurrentBuildStepName(String currentBuildStepName) {
this.currentBuildStepName = currentBuildStepName;
}
}
|
StartupContext
|
java
|
quarkusio__quarkus
|
extensions/amazon-lambda/deployment/src/test/java/io/quarkus/amazon/lambda/deployment/testing/AbstractInputCollectionOutputCollection.java
|
{
"start": 380,
"end": 793
}
|
class ____ implements RequestHandler<List<InputPerson>, List<OutputPerson>> {
@Override
public List<OutputPerson> handleRequest(List<InputPerson> inputPeronList, Context context) {
List<OutputPerson> personList = new ArrayList<>();
inputPeronList.forEach(person -> personList.add(new OutputPerson(person.getName())));
return personList;
}
}
|
AbstractInputCollectionOutputCollection
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/cluster/NotMasterException.java
|
{
"start": 1324,
"end": 1842
}
|
class ____ extends ElasticsearchException {
public NotMasterException(String msg) {
super(msg);
}
public NotMasterException(StreamInput in) throws IOException {
super(in);
}
public NotMasterException(String msg, Object... args) {
super(msg, args);
}
public NotMasterException(String msg, Throwable cause, Object... args) {
super(msg, cause, args);
}
@Override
public Throwable fillInStackTrace() {
return this;
}
}
|
NotMasterException
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_1826/SourceChild.java
|
{
"start": 198,
"end": 500
}
|
class ____ {
private String content;
private Boolean hasContent = false;
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
hasContent = true;
}
public Boolean hasContent() {
return hasContent;
}
}
|
SourceChild
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/observable/ObservableDoOnTest.java
|
{
"start": 884,
"end": 3101
}
|
class ____ extends RxJavaTest {
@Test
public void doOnEach() {
final AtomicReference<String> r = new AtomicReference<>();
String output = Observable.just("one").doOnNext(new Consumer<String>() {
@Override
public void accept(String v) {
r.set(v);
}
}).blockingSingle();
assertEquals("one", output);
assertEquals("one", r.get());
}
@Test
public void doOnError() {
final AtomicReference<Throwable> r = new AtomicReference<>();
Throwable t = null;
try {
Observable.<String> error(new RuntimeException("an error"))
.doOnError(new Consumer<Throwable>() {
@Override
public void accept(Throwable v) {
r.set(v);
}
}).blockingSingle();
fail("expected exception, not a return value");
} catch (Throwable e) {
t = e;
}
assertNotNull(t);
assertEquals(t, r.get());
}
@Test
public void doOnCompleted() {
final AtomicBoolean r = new AtomicBoolean();
String output = Observable.just("one").doOnComplete(new Action() {
@Override
public void run() {
r.set(true);
}
}).blockingSingle();
assertEquals("one", output);
assertTrue(r.get());
}
@Test
public void doOnTerminateComplete() {
final AtomicBoolean r = new AtomicBoolean();
String output = Observable.just("one").doOnTerminate(new Action() {
@Override
public void run() {
r.set(true);
}
}).blockingSingle();
assertEquals("one", output);
assertTrue(r.get());
}
@Test
public void doOnTerminateError() {
final AtomicBoolean r = new AtomicBoolean();
Observable.<String>error(new TestException()).doOnTerminate(new Action() {
@Override
public void run() {
r.set(true);
}
})
.test()
.assertFailure(TestException.class);
assertTrue(r.get());
}
}
|
ObservableDoOnTest
|
java
|
quarkusio__quarkus
|
extensions/security/deployment/src/test/java/io/quarkus/security/test/permissionsallowed/MethodLevelStringPermissionsAllowedTest.java
|
{
"start": 3057,
"end": 6238
}
|
class ____ implements PermissionsAllowedNameAndActionsOnlyBeanI {
@PermissionsAllowed(WRITE_PERMISSION_BEAN)
public final String write() {
return WRITE_PERMISSION_BEAN;
}
@PermissionsAllowed(READ_PERMISSION_BEAN)
public final String read() {
return READ_PERMISSION_BEAN;
}
@PermissionsAllowed(WRITE_PERMISSION_BEAN)
public final Uni<String> writeNonBlocking() {
return Uni.createFrom().item(WRITE_PERMISSION_BEAN);
}
@PermissionsAllowed(READ_PERMISSION_BEAN)
public final Uni<String> readNonBlocking() {
return Uni.createFrom().item(READ_PERMISSION_BEAN);
}
@PermissionsAllowed("prohibited:bean")
public final void prohibited() {
}
@PermissionsAllowed("prohibited:bean")
public final Uni<Void> prohibitedNonBlocking() {
return Uni.createFrom().nullItem();
}
@PermissionsAllowed({ "one:a", "two:b", "three:c", READ_PERMISSION_BEAN })
public final String multiple() {
return MULTIPLE_BEAN;
}
@PermissionsAllowed({ "one:a", "two:b", "three:c", READ_PERMISSION_BEAN })
public final Uni<String> multipleNonBlocking() {
return Uni.createFrom().item(MULTIPLE_BEAN);
}
@PermissionsAllowed({ "one:a", "two:b", "three:c", "one:b", "two:a", "three:a", READ_PERMISSION_BEAN, "read:meal" })
public final String multipleActions() {
return MULTIPLE_BEAN;
}
@PermissionsAllowed({ "one:a", "two:b", "three:c", "one:b", "two:a", "three:a", READ_PERMISSION_BEAN, "read:meal" })
public final Uni<String> multipleNonBlockingActions() {
return Uni.createFrom().item(MULTIPLE_BEAN);
}
@PermissionsAllowed({ "one", "two", "three:c", "three", READ_PERMISSION })
public final String combination() {
return MULTIPLE_BEAN;
}
@PermissionsAllowed({ "one", "two", "three:c", "three", READ_PERMISSION })
public final Uni<String> combinationNonBlockingActions() {
return Uni.createFrom().item(MULTIPLE_BEAN);
}
@PermissionsAllowed({ "one", "two", "three:c", "three", "read:bread", "read:meal" })
public final String combination2() {
return "combination2";
}
@PermissionsAllowed({ "one", "two", "three:c", "three", "read:bread", "read:meal" })
public final Uni<String> combination2NonBlockingActions() {
return Uni.createFrom().item("combination2");
}
@PermissionsAllowed(value = { "one", "two", "three:c", "three", "read:bread", "read:meal" }, inclusive = true)
@Override
public User inclusive() {
return new User("Martin");
}
@PermissionsAllowed(value = { "one", "two", "three:c", "three", "read:bread", "read:meal" }, inclusive = true)
@Override
public Uni<User> inclusiveNonBlocking() {
return Uni.createFrom().item(new User("Bruno"));
}
}
@Singleton
public static
|
PermissionsAllowedNameAndActionsOnlyBean
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/interfaceproxy/DocumentImpl.java
|
{
"start": 228,
"end": 1021
}
|
class ____ extends ItemImpl implements Document {
private Blob content;
private Calendar modified;
private Calendar created;
/**
* @return Returns the created.
*/
public Calendar getCreated() {
return created;
}
/**
* @param created The created to set.
*/
public void setCreated(Calendar created) {
this.created = created;
}
/**
* @return Returns the modified.
*/
public Calendar getModified() {
return modified;
}
/**
* @param modified The modified to set.
*/
public void setModified(Calendar modified) {
this.modified = modified;
}
/**
* @return Returns the content.
*/
public Blob getContent() {
return content;
}
/**
* @param content The content to set.
*/
public void setContent(Blob content) {
this.content = content;
}
}
|
DocumentImpl
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java
|
{
"start": 42270,
"end": 48718
}
|
class ____ extends ResultStrategy<
SignificantStringTerms,
SignificantStringTerms.Bucket,
SignificantStringTerms.Bucket> {
private final BackgroundFrequencyForBytes backgroundFrequencies;
private final long supersetSize;
private final SignificanceHeuristic significanceHeuristic;
private LongArray subsetSizes;
SignificantTermsResults(
SignificanceLookup significanceLookup,
SignificanceHeuristic significanceHeuristic,
CardinalityUpperBound cardinality
) {
backgroundFrequencies = significanceLookup.bytesLookup(bigArrays(), cardinality);
supersetSize = significanceLookup.supersetSize();
this.significanceHeuristic = significanceHeuristic;
boolean success = false;
try {
subsetSizes = bigArrays().newLongArray(1, true);
success = true;
} finally {
if (success == false) {
close();
}
}
}
@Override
String describe() {
return "significant_terms";
}
@Override
LeafBucketCollector wrapCollector(LeafBucketCollector primary) {
return new LeafBucketCollectorBase(primary, null) {
@Override
public void collect(int doc, long owningBucketOrd) throws IOException {
super.collect(doc, owningBucketOrd);
subsetSizes = bigArrays().grow(subsetSizes, owningBucketOrd + 1);
subsetSizes.increment(owningBucketOrd, 1);
}
};
}
@Override
ObjectArray<SignificantStringTerms.Bucket[]> buildTopBucketsPerOrd(long size) {
return bigArrays().newObjectArray(size);
}
@Override
SignificantStringTerms.Bucket[] buildBuckets(int size) {
return new SignificantStringTerms.Bucket[size];
}
@Override
SignificantStringTerms.Bucket buildEmptyTemporaryBucket() {
return new SignificantStringTerms.Bucket(new BytesRef(), 0, 0, null, format, 0);
}
private long subsetSize(long owningBucketOrd) {
// if the owningBucketOrd is not in the array that means the bucket is empty so the size has to be 0
return owningBucketOrd < subsetSizes.size() ? subsetSizes.get(owningBucketOrd) : 0;
}
@Override
BucketUpdater<SignificantStringTerms.Bucket> bucketUpdater(long owningBucketOrd, GlobalOrdLookupFunction lookupGlobalOrd) {
long subsetSize = subsetSize(owningBucketOrd);
return (spare, globalOrd, docCount) -> {
oversizedCopy(lookupGlobalOrd.apply(globalOrd), spare.termBytes);
spare.subsetDf = docCount;
spare.supersetDf = backgroundFrequencies.freq(spare.termBytes);
/*
* During shard-local down-selection we use subset/superset stats
* that are for this shard only. Back at the central reducer these
* properties will be updated with global stats.
*/
spare.updateScore(significanceHeuristic, subsetSize, supersetSize);
};
}
@Override
ObjectArrayPriorityQueue<BucketAndOrd<SignificantStringTerms.Bucket>> buildPriorityQueue(int size) {
return new BucketSignificancePriorityQueue<>(size, bigArrays());
}
@Override
SignificantStringTerms.Bucket convertTempBucketToRealBucket(
SignificantStringTerms.Bucket temp,
GlobalOrdLookupFunction lookupGlobalOrd
) throws IOException {
return temp;
}
@Override
void buildSubAggs(ObjectArray<SignificantStringTerms.Bucket[]> topBucketsPreOrd, LongArray ordsArray) throws IOException {
buildSubAggsForAllBuckets(topBucketsPreOrd, ordsArray, (b, aggs) -> b.aggregations = aggs);
}
@Override
SignificantStringTerms buildResult(long owningBucketOrd, long otherDocCount, SignificantStringTerms.Bucket[] topBuckets) {
return new SignificantStringTerms(
name,
bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getMinDocCount(),
metadata(),
format,
subsetSize(owningBucketOrd),
supersetSize,
significanceHeuristic,
Arrays.asList(topBuckets)
);
}
@Override
SignificantStringTerms buildEmptyResult() {
return buildEmptySignificantTermsAggregation(0, supersetSize, significanceHeuristic);
}
@Override
SignificantStringTerms buildNoValuesResult(long owningBucketOrdinal) {
return buildEmptySignificantTermsAggregation(subsetSize(owningBucketOrdinal), supersetSize, significanceHeuristic);
}
@Override
public void close() {
Releasables.close(backgroundFrequencies, subsetSizes);
}
/**
* Copies the bytes from {@code from} into {@code to}, oversizing
* the destination array if the bytes won't fit into the array.
* <p>
* This is fairly similar in spirit to
* {@link BytesRef#deepCopyOf(BytesRef)} in that it is a way to read
* bytes from a mutable {@link BytesRef} into
* <strong>something</strong> that won't mutate out from under you.
* Unlike {@linkplain BytesRef#deepCopyOf(BytesRef)} its designed to
* be run over and over again into the same destination. In particular,
* oversizing the destination bytes helps to keep from allocating
* a bunch of little arrays over and over and over again.
*/
private static void oversizedCopy(BytesRef from, BytesRef to) {
if (to.bytes.length < from.length) {
to.bytes = new byte[ArrayUtil.oversize(from.length, 1)];
}
to.offset = 0;
to.length = from.length;
System.arraycopy(from.bytes, from.offset, to.bytes, 0, from.length);
}
}
/**
* Predicate used for {@link #acceptedGlobalOrdinals} if there is no filter.
*/
static final LongPredicate ALWAYS_TRUE = l -> true;
}
|
SignificantTermsResults
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_1997/Car.java
|
{
"start": 578,
"end": 823
}
|
class ____ {
private String model;
public Builder model(String model) {
this.model = model;
return this;
}
public Car build() {
return new Car( this );
}
}
}
|
Builder
|
java
|
apache__logging-log4j2
|
log4j-1.2-api/src/test/java/org/apache/log4j/CategoryTest.java
|
{
"start": 16901,
"end": 17429
}
|
class ____ extends Logger {
/**
* Create new instance of MockCategory.
*
* @param name category name
*/
public MockCategory(final String name) {
super(name);
}
/**
* Request an info level message.
*
* @param msg message
*/
public void info(final String msg) {
final Priority info = Level.INFO;
forcedLog(MockCategory.class.toString(), info, msg, null);
}
}
}
|
MockCategory
|
java
|
alibaba__fastjson
|
src/main/java/com/alibaba/fastjson/asm/ClassWriter.java
|
{
"start": 7220,
"end": 9089
}
|
class
____ size = 24 + 2 * interfaceCount;
int nbFields = 0;
FieldWriter fb = firstField;
while (fb != null) {
++nbFields;
size += fb.getSize();
fb = fb.next;
}
int nbMethods = 0;
MethodWriter mb = firstMethod;
while (mb != null) {
++nbMethods;
size += mb.getSize();
mb = mb.next;
}
int attributeCount = 0;
size += pool.length;
// allocates a byte vector of this size, in order to avoid unnecessary
// arraycopy operations in the ByteVector.enlarge() method
ByteVector out = new ByteVector(size);
out.putInt(0xCAFEBABE).putInt(version);
out.putShort(index).putByteArray(pool.data, 0, pool.length);
int mask = 393216; // Opcodes.ACC_DEPRECATED | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
out.putShort(access & ~mask).putShort(name).putShort(superName);
out.putShort(interfaceCount);
for (int i = 0; i < interfaceCount; ++i) {
out.putShort(interfaces[i]);
}
out.putShort(nbFields);
fb = firstField;
while (fb != null) {
fb.put(out);
fb = fb.next;
}
out.putShort(nbMethods);
mb = firstMethod;
while (mb != null) {
mb.put(out);
mb = mb.next;
}
out.putShort(attributeCount);
return out.data;
}
// ------------------------------------------------------------------------
// Utility methods: constant pool management
// ------------------------------------------------------------------------
/**
* Adds a number or string constant to the constant pool of the
|
int
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/HttOptionsNoAuthnIntegTests.java
|
{
"start": 948,
"end": 4624
}
|
class ____ extends SecurityIntegTestCase {
@Override
protected boolean addMockHttpTransport() {
return false; // need real http
}
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
final Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings));
// needed to test preflight requests
builder.put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), "true")
.put(HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN.getKey(), "*");
return builder.build();
}
public void testNoAuthnForResourceOptionsMethod() throws Exception {
Request requestNoCredentials = new Request(
"OPTIONS",
randomFrom("/", "/_cluster/stats", "/some-index", "/index/_stats", "/_stats/flush")
);
// no "Authorization" request header -> request is unauthenticated
assertThat(requestNoCredentials.getOptions().getHeaders().isEmpty(), is(true));
// WRONG "Authorization" request header
Request requestWrongCredentials = new Request(
"OPTIONS",
randomFrom("/", "/_cluster/stats", "/some-index", "/index/_stats", "/_stats/flush")
);
RequestOptions.Builder options = requestWrongCredentials.getOptions().toBuilder();
options.addHeader(
"Authorization",
UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, new SecureString("WRONG"))
);
requestWrongCredentials.setOptions(options);
for (Request request : List.of(requestNoCredentials, requestWrongCredentials)) {
Response response = getRestClient().performRequest(request);
assertThat(response.getStatusLine().getStatusCode(), is(200));
assertThat(response.getHeader("Allow"), notNullValue());
assertThat(response.getHeader("X-elastic-product"), is("Elasticsearch"));
assertThat(response.getHeader("content-length"), is("0"));
}
}
public void testNoAuthnForPreFlightRequest() throws Exception {
Request requestNoCredentials = new Request(
"OPTIONS",
randomFrom("/", "/_cluster/stats", "/some-index", "/index/_stats", "/_stats/flush")
);
RequestOptions.Builder options = requestNoCredentials.getOptions().toBuilder();
options.addHeader(CorsHandler.ORIGIN, "google.com");
options.addHeader(CorsHandler.ACCESS_CONTROL_REQUEST_METHOD, "GET");
requestNoCredentials.setOptions(options);
// no "Authorization" request header -> request is unauthenticated
Request requestWrongCredentials = new Request(
"OPTIONS",
randomFrom("/", "/_cluster/stats", "/some-index", "/index/_stats", "/_stats/flush")
);
options = requestWrongCredentials.getOptions().toBuilder();
// WRONG "Authorization" request header
options.addHeader(
"Authorization",
UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, new SecureString("WRONG"))
);
options.addHeader(CorsHandler.ORIGIN, "google.com");
options.addHeader(CorsHandler.ACCESS_CONTROL_REQUEST_METHOD, "GET");
requestWrongCredentials.setOptions(options);
for (Request request : List.of(requestWrongCredentials)) {
Response response = getRestClient().performRequest(request);
assertThat(response.getStatusLine().getStatusCode(), is(200));
assertThat(response.getHeader("content-length"), is("0"));
}
}
}
|
HttOptionsNoAuthnIntegTests
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/qualifiers/AdditionalQualifiersTest.java
|
{
"start": 3900,
"end": 4008
}
|
class ____ {
}
@Target({ TYPE, METHOD, FIELD, PARAMETER })
@Retention(RUNTIME)
public @
|
Charlie
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java
|
{
"start": 2262,
"end": 11459
}
|
class ____ implements Closeable {
private final InferenceDefinition trainedModelDefinition;
private final String modelId;
private final Set<String> fieldNames;
private final Map<String, String> defaultFieldMap;
private final InferenceStats.Accumulator statsAccumulator;
private final TrainedModelStatsService trainedModelStatsService;
private volatile long persistenceQuotient = 100;
private final LongAdder currentInferenceCount;
private final InferenceConfig inferenceConfig;
private final License.OperationMode licenseLevel;
private final CircuitBreaker trainedModelCircuitBreaker;
private final AtomicLong referenceCount;
private final long cachedRamBytesUsed;
private final TrainedModelType trainedModelType;
LocalModel(
String modelId,
String nodeId,
InferenceDefinition trainedModelDefinition,
TrainedModelInput input,
Map<String, String> defaultFieldMap,
InferenceConfig modelInferenceConfig,
License.OperationMode licenseLevel,
TrainedModelType trainedModelType,
TrainedModelStatsService trainedModelStatsService,
CircuitBreaker trainedModelCircuitBreaker
) {
this.trainedModelDefinition = trainedModelDefinition;
this.cachedRamBytesUsed = trainedModelDefinition.ramBytesUsed();
this.modelId = modelId;
this.fieldNames = new HashSet<>(input.getFieldNames());
// the ctor being called means a new instance was created.
// Consequently, it was not loaded from cache and on stats persist we should increment accordingly.
this.statsAccumulator = new InferenceStats.Accumulator(modelId, nodeId, 1L);
this.trainedModelStatsService = trainedModelStatsService;
this.defaultFieldMap = defaultFieldMap == null ? null : new HashMap<>(defaultFieldMap);
this.currentInferenceCount = new LongAdder();
this.inferenceConfig = modelInferenceConfig;
this.licenseLevel = licenseLevel;
this.trainedModelCircuitBreaker = trainedModelCircuitBreaker;
this.referenceCount = new AtomicLong(1);
this.trainedModelType = trainedModelType;
}
long ramBytesUsed() {
// This should always be cached and not calculated on call.
// This is because the caching system calls this method on every promotion call that changes the LRU head
// Consequently, recalculating can cause serious throughput issues due to LRU changes in the cache
return cachedRamBytesUsed;
}
public InferenceConfig getInferenceConfig() {
return inferenceConfig;
}
TrainedModelType getTrainedModelType() {
return trainedModelType;
}
public String getModelId() {
return modelId;
}
public License.OperationMode getLicenseLevel() {
return licenseLevel;
}
public InferenceStats getLatestStatsAndReset() {
return statsAccumulator.currentStatsAndReset();
}
void persistStats(boolean flush) {
trainedModelStatsService.queueStats(getLatestStatsAndReset(), flush);
if (persistenceQuotient < 1000 && currentInferenceCount.sum() > 1000) {
persistenceQuotient = 1000;
}
if (persistenceQuotient < 10_000 && currentInferenceCount.sum() > 10_000) {
persistenceQuotient = 10_000;
}
}
/**
* Infers without updating the stats.
* This is mainly for usage by data frame analytics jobs
* when they do inference against test data.
*/
public InferenceResults inferNoStats(Map<String, Object> fields) {
LocalModel.mapFieldsIfNecessary(fields, defaultFieldMap);
Map<String, Object> flattenedFields = MapHelper.dotCollapse(fields, fieldNames);
if (flattenedFields.isEmpty()) {
new WarningInferenceResults(Messages.getMessage(INFERENCE_WARNING_ALL_FIELDS_MISSING, modelId));
}
return trainedModelDefinition.infer(flattenedFields, inferenceConfig);
}
public Collection<String> inputFields() {
return fieldNames;
}
public void infer(Map<String, Object> fields, InferenceConfigUpdate update, ActionListener<InferenceResults> listener) {
if (update.isSupported(this.inferenceConfig) == false) {
listener.onFailure(
ExceptionsHelper.badRequestException(
"Model [{}] has inference config of type [{}] which is not supported by inference request of type [{}]",
this.modelId,
this.inferenceConfig.getName(),
update.getName()
)
);
return;
}
try {
statsAccumulator.incInference();
currentInferenceCount.increment();
// Needs to happen before collapse as defaultFieldMap might resolve fields to their appropriate name
LocalModel.mapFieldsIfNecessary(fields, defaultFieldMap);
Map<String, Object> flattenedFields = MapHelper.dotCollapse(fields, fieldNames);
boolean shouldPersistStats = ((currentInferenceCount.sum() + 1) % persistenceQuotient == 0);
if (flattenedFields.isEmpty()) {
statsAccumulator.incMissingFields();
if (shouldPersistStats) {
persistStats(false);
}
listener.onResponse(new WarningInferenceResults(Messages.getMessage(INFERENCE_WARNING_ALL_FIELDS_MISSING, modelId)));
return;
}
InferenceResults inferenceResults = trainedModelDefinition.infer(
flattenedFields,
update.isEmpty() ? inferenceConfig : inferenceConfig.apply(update)
);
if (shouldPersistStats) {
persistStats(false);
}
listener.onResponse(inferenceResults);
} catch (Exception e) {
statsAccumulator.incFailure();
listener.onFailure(e);
}
}
public InferenceResults infer(Map<String, Object> fields, InferenceConfigUpdate update) throws Exception {
AtomicReference<InferenceResults> result = new AtomicReference<>();
AtomicReference<Exception> exception = new AtomicReference<>();
ActionListener<InferenceResults> listener = ActionListener.wrap(result::set, exception::set);
infer(fields, update, listener);
if (exception.get() != null) {
throw exception.get();
}
return result.get();
}
public InferenceResults inferLtr(Map<String, Object> fields, InferenceConfig config) {
statsAccumulator.incInference();
currentInferenceCount.increment();
// We should never have nested maps in a LTR context as we retrieve values from source value extractor, queries, or doc_values
assert fields.values().stream().noneMatch(o -> o instanceof Map<?, ?>);
// might resolve fields to their appropriate name
LocalModel.mapFieldsIfNecessary(fields, defaultFieldMap);
boolean shouldPersistStats = ((currentInferenceCount.sum() + 1) % persistenceQuotient == 0);
if (fields.isEmpty()) {
statsAccumulator.incMissingFields();
}
InferenceResults inferenceResults = trainedModelDefinition.infer(fields, config);
if (shouldPersistStats) {
persistStats(false);
}
return inferenceResults;
}
/**
* Used for translating field names in according to the passed `fieldMappings` parameter.
*
* This mutates the `fields` parameter in-place.
*
* Fields are only appended. If the expected field name already exists, it is not created/overwritten.
*
* Original fields are not deleted.
*
* @param fields Fields to map against
* @param fieldMapping Field originalName to expectedName string mapping
*/
public static void mapFieldsIfNecessary(Map<String, Object> fields, Map<String, String> fieldMapping) {
if (fieldMapping != null) {
fieldMapping.forEach((src, dest) -> {
Object srcValue = MapHelper.dig(src, fields);
if (srcValue != null) {
fields.putIfAbsent(dest, srcValue);
}
});
}
}
long acquire() {
long count = referenceCount.incrementAndGet();
// protect against a race where the model could be release to a
// count of zero then the model is quickly re-acquired
if (count == 1) {
trainedModelCircuitBreaker.addEstimateBytesAndMaybeBreak(trainedModelDefinition.ramBytesUsed(), modelId);
}
return count;
}
public long getReferenceCount() {
return referenceCount.get();
}
public long release() {
long count = referenceCount.decrementAndGet();
assert count >= 0;
if (count == 0) {
// no references to this model, it no longer needs to be accounted for
trainedModelCircuitBreaker.addWithoutBreaking(-ramBytesUsed());
}
return referenceCount.get();
}
/**
* Convenience method so the
|
LocalModel
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/IgniteCacheComponentBuilderFactory.java
|
{
"start": 1969,
"end": 7166
}
|
interface ____ extends ComponentBuilder<IgniteCacheComponent> {
/**
* The resource from where to load the configuration. It can be a: URL,
* String or InputStream type.
*
* The option is a: <code>java.lang.Object</code> type.
*
* Group: common
*
* @param configurationResource the value to set
* @return the dsl builder
*/
default IgniteCacheComponentBuilder configurationResource(java.lang.Object configurationResource) {
doSetProperty("configurationResource", configurationResource);
return this;
}
/**
* To use an existing Ignite instance.
*
* The option is a: <code>org.apache.ignite.Ignite</code>
* type.
*
* Group: common
*
* @param ignite the value to set
* @return the dsl builder
*/
default IgniteCacheComponentBuilder ignite(org.apache.ignite.Ignite ignite) {
doSetProperty("ignite", ignite);
return this;
}
/**
* Allows the user to set a programmatic ignite configuration.
*
* The option is a:
* <code>org.apache.ignite.configuration.IgniteConfiguration</code> type.
*
* Group: common
*
* @param igniteConfiguration the value to set
* @return the dsl builder
*/
default IgniteCacheComponentBuilder igniteConfiguration(org.apache.ignite.configuration.IgniteConfiguration igniteConfiguration) {
doSetProperty("igniteConfiguration", igniteConfiguration);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default IgniteCacheComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default IgniteCacheComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default IgniteCacheComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
}
|
IgniteCacheComponentBuilder
|
java
|
junit-team__junit5
|
junit-jupiter-params/src/main/java/org/junit/jupiter/params/provider/FieldSource.java
|
{
"start": 7621,
"end": 7691
}
|
interface ____ {
/**
* The names of fields within the test
|
FieldSource
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/TestExecutionListenersTests.java
|
{
"start": 10110,
"end": 10245
}
|
class ____ {
}
@TestExecutionListeners(QuuxTestExecutionListener.class)
static
|
MergedDefaultListenersWithCustomListenerInsertedTestCase
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.