language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/injection/PlanInterpreter.java | {
"start": 1979,
"end": 4800
} | class ____ {
private static final Logger logger = LogManager.getLogger(PlanInterpreter.class);
private final Map<Class<?>, Object> instances = new LinkedHashMap<>();
PlanInterpreter(Map<Class<?>, Object> existingInstances) {
existingInstances.forEach(this::addInstance);
}
/**
* Main entry point. Contains the implementation logic for each {@link InjectionStep}.
*/
void executePlan(List<InjectionStep> plan) {
int numConstructorCalls = 0;
for (InjectionStep step : plan) {
if (step instanceof InstantiateStep i) {
MethodHandleSpec spec = i.spec();
logger.trace("Instantiating {}", spec.requestedType().getSimpleName());
addInstance(spec.requestedType(), instantiate(spec));
++numConstructorCalls;
} else {
// TODO: switch patterns would make this unnecessary
assert false : "Unexpected step type: " + step.getClass().getSimpleName();
throw new IllegalStateException("Unexpected step type: " + step.getClass().getSimpleName());
}
}
logger.debug("Instantiated {} objects", numConstructorCalls);
}
/**
* @return the list element corresponding to instances.get(type).get(0),
* assuming that instances.get(type) has exactly one element.
* @throws IllegalStateException if instances.get(type) does not have exactly one element
*/
public <T> T theInstanceOf(Class<T> type) {
Object instance = instances.get(type);
if (instance == null) {
throw new IllegalStateException("No object of type " + type.getSimpleName());
}
return type.cast(instance);
}
private void addInstance(Class<?> requestedType, Object instance) {
Object old = instances.put(requestedType, instance);
if (old != null) {
throw new IllegalStateException("Multiple objects for " + requestedType);
}
}
/**
* @throws IllegalStateException if the <code>MethodHandle</code> throws.
*/
@SuppressForbidden(
reason = "Can't call invokeExact because we don't know the method argument types statically, "
+ "since each constructor has a different signature"
)
private Object instantiate(MethodHandleSpec spec) {
Object[] args = spec.parameters().stream().map(this::parameterValue).toArray();
try {
return spec.methodHandle().invokeWithArguments(args);
} catch (Throwable e) {
throw new IllegalStateException("Unexpected exception while instantiating {}" + spec, e);
}
}
private Object parameterValue(ParameterSpec parameterSpec) {
return theInstanceOf(parameterSpec.formalType());
}
}
| PlanInterpreter |
java | apache__camel | components/camel-kafka/src/main/java/org/apache/camel/component/kafka/TaskHealthState.java | {
"start": 1033,
"end": 1237
} | class ____ the health checker. Fields and methods used
* used to build an instance of this class, must be made thread-safe (i.e.: most importantly, read fields
* should be marked as volatile).
*/
public | to |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/delegation/DefaultCalciteContext.java | {
"start": 1559,
"end": 3386
} | class ____ implements CalciteContext {
private final CatalogManager catalogManager;
private final PlannerContext plannerContext;
public DefaultCalciteContext(CatalogManager catalogManager, PlannerContext plannerContext) {
this.catalogManager = catalogManager;
this.plannerContext = plannerContext;
}
public PlannerContext getPlannerContext() {
return plannerContext;
}
public CatalogManager getCatalogManager() {
return catalogManager;
}
@Override
public CatalogRegistry getCatalogRegistry() {
return catalogManager;
}
@Override
public CalciteCatalogReader createCatalogReader(boolean lenientCaseSensitivity) {
return plannerContext.createCatalogReader(lenientCaseSensitivity);
}
@Override
public RelOptCluster getCluster() {
return plannerContext.getCluster();
}
@Override
public FrameworkConfig createFrameworkConfig() {
return plannerContext.createFrameworkConfig();
}
@Override
public RelDataTypeFactory getTypeFactory() {
return plannerContext.getTypeFactory();
}
@Override
public RelBuilder createRelBuilder() {
return plannerContext.createRelBuilder();
}
@Override
public TableConfig getTableConfig() {
return plannerContext.getFlinkContext().getTableConfig();
}
@Override
public ClassLoader getClassLoader() {
return plannerContext.getFlinkContext().getClassLoader();
}
@Override
public FunctionCatalog getFunctionCatalog() {
return plannerContext.getFlinkContext().getFunctionCatalog();
}
@Override
public RelOptTable.ToRelContext createToRelContext() {
return plannerContext.createFlinkPlanner().createToRelContext();
}
}
| DefaultCalciteContext |
java | netty__netty | codec-compression/src/main/java/io/netty/handler/codec/compression/Bzip2Rand.java | {
"start": 742,
"end": 4669
} | class ____ {
/**
* The Bzip2 specification originally included the optional addition of a slight pseudo-random
* perturbation to the input data, in order to work around the block sorting algorithm's non-
* optimal performance on some types of input. The current mainline bzip2 does not require this
* and will not create randomised blocks, but compatibility is still required for old data (and
* third party compressors that haven't caught up). When decompressing a randomised block, for
* each value N in this array, a 1 will be XOR'd onto the output of the Burrows-Wheeler
* transform stage after N bytes, then the next N taken from the following entry.
*/
private static final int[] RNUMS = {
619, 720, 127, 481, 931, 816, 813, 233, 566, 247, 985, 724, 205, 454, 863, 491,
741, 242, 949, 214, 733, 859, 335, 708, 621, 574, 73, 654, 730, 472, 419, 436,
278, 496, 867, 210, 399, 680, 480, 51, 878, 465, 811, 169, 869, 675, 611, 697,
867, 561, 862, 687, 507, 283, 482, 129, 807, 591, 733, 623, 150, 238, 59, 379,
684, 877, 625, 169, 643, 105, 170, 607, 520, 932, 727, 476, 693, 425, 174, 647,
73, 122, 335, 530, 442, 853, 695, 249, 445, 515, 909, 545, 703, 919, 874, 474,
882, 500, 594, 612, 641, 801, 220, 162, 819, 984, 589, 513, 495, 799, 161, 604,
958, 533, 221, 400, 386, 867, 600, 782, 382, 596, 414, 171, 516, 375, 682, 485,
911, 276, 98, 553, 163, 354, 666, 933, 424, 341, 533, 870, 227, 730, 475, 186,
263, 647, 537, 686, 600, 224, 469, 68, 770, 919, 190, 373, 294, 822, 808, 206,
184, 943, 795, 384, 383, 461, 404, 758, 839, 887, 715, 67, 618, 276, 204, 918,
873, 777, 604, 560, 951, 160, 578, 722, 79, 804, 96, 409, 713, 940, 652, 934,
970, 447, 318, 353, 859, 672, 112, 785, 645, 863, 803, 350, 139, 93, 354, 99,
820, 908, 609, 772, 154, 274, 580, 184, 79, 626, 630, 742, 653, 282, 762, 623,
680, 81, 927, 626, 789, 125, 411, 521, 938, 300, 821, 78, 343, 175, 128, 250,
170, 774, 972, 275, 999, 639, 495, 78, 352, 126, 857, 956, 358, 619, 580, 124,
737, 594, 701, 612, 669, 112, 134, 694, 363, 992, 809, 743, 168, 974, 944, 375,
748, 52, 600, 747, 642, 182, 862, 81, 344, 805, 988, 739, 511, 655, 814, 334,
249, 515, 897, 955, 664, 981, 649, 113, 974, 459, 893, 228, 433, 837, 553, 268,
926, 240, 102, 654, 459, 51, 686, 754, 806, 760, 493, 403, 415, 394, 687, 700,
946, 670, 656, 610, 738, 392, 760, 799, 887, 653, 978, 321, 576, 617, 626, 502,
894, 679, 243, 440, 680, 879, 194, 572, 640, 724, 926, 56, 204, 700, 707, 151,
457, 449, 797, 195, 791, 558, 945, 679, 297, 59, 87, 824, 713, 663, 412, 693,
342, 606, 134, 108, 571, 364, 631, 212, 174, 643, 304, 329, 343, 97, 430, 751,
497, 314, 983, 374, 822, 928, 140, 206, 73, 263, 980, 736, 876, 478, 430, 305,
170, 514, 364, 692, 829, 82, 855, 953, 676, 246, 369, 970, 294, 750, 807, 827,
150, 790, 288, 923, 804, 378, 215, 828, 592, 281, 565, 555, 710, 82, 896, 831,
547, 261, 524, 462, 293, 465, 502, 56, 661, 821, 976, 991, 658, 869, 905, 758,
745, 193, 768, 550, 608, 933, 378, 286, 215, 979, 792, 961, 61, 688, 793, 644,
986, 403, 106, 366, 905, 644, 372, 567, 466, 434, 645, 210, 389, 550, 919, 135,
780, 773, 635, 389, 707, 100, 626, 958, 165, 504, 920, 176, 193, 713, 857, 265,
203, 50, 668, 108, 645, 990, 626, 197, 510, 357, 358, 850, 858, 364, 936, 638
};
/**
* Return the random number at a specific index.
*
* @param i the index
* @return the random number
*/
static int rNums(int i) {
return RNUMS[i];
}
private Bzip2Rand() { }
}
| Bzip2Rand |
java | apache__flink | flink-core/src/test/java/org/apache/flink/testutils/DeeplyEqualsChecker.java | {
"start": 1522,
"end": 1736
} | class ____ {
/**
* Checker that compares o1 and o2 objects if they are deeply equal.
*
* <p><b>NOTE:</b> All nested comparisons should be done through checker.
*/
public | DeeplyEqualsChecker |
java | apache__logging-log4j2 | log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/jmh/instant/InstantPatternDynamicFormatterSequencingBenchmark.java | {
"start": 1538,
"end": 2245
} | class ____ {
static final Locale LOCALE = Locale.US;
static final TimeZone TIME_ZONE = TimeZone.getTimeZone("UTC");
private static final Instant[] INSTANTS = createInstants();
private static Instant[] createInstants() {
final Instant initInstant = Instant.parse("2020-05-14T10:44:23.901Z");
return IntStream.range(0, 1_000)
.mapToObj((final int index) -> Instant.ofEpochSecond(
Math.addExact(initInstant.getEpochSecond(), index),
Math.addExact(initInstant.getNano(), index)))
.toArray(Instant[]::new);
}
@FunctionalInterface
private | InstantPatternDynamicFormatterSequencingBenchmark |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/operators/CoGroupRawDriver.java | {
"start": 4185,
"end": 5359
} | class ____<IN> implements Iterator<IN> {
private IN reuse;
private final MutableObjectIterator<IN> iterator;
private boolean consumed = true;
public SimpleIterator(IN reuse, MutableObjectIterator<IN> iterator) {
this.iterator = iterator;
this.reuse = reuse;
}
@Override
public boolean hasNext() {
try {
if (!consumed) {
return true;
}
IN result = iterator.next(reuse);
consumed = result == null;
return !consumed;
} catch (IOException ioex) {
throw new RuntimeException(
"An error occurred while reading the next record: " + ioex.getMessage(),
ioex);
}
}
@Override
public IN next() {
consumed = true;
return reuse;
}
@Override
public void remove() { // unused
}
}
}
}
| SimpleIterator |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorSupplier.java | {
"start": 860,
"end": 1241
} | interface ____ {
Aggregator build(
String name,
int shardSize,
AggregatorFactories factories,
AggregationContext context,
Aggregator parent,
Map<String, Object> metadata,
ValuesSourceConfig valuesSourceConfig,
int maxDocsPerValue,
String executionHint
) throws IOException;
}
| DiversifiedAggregatorSupplier |
java | apache__camel | components/camel-file/src/main/java/org/apache/camel/component/file/FileConsumer.java | {
"start": 1861,
"end": 18583
} | class ____ extends GenericFileConsumer<File> implements ResumeAware<ResumeStrategy> {
private static final Logger LOG = LoggerFactory.getLogger(FileConsumer.class);
private ResumeStrategy resumeStrategy;
private final String endpointPath;
private Set<String> extendedAttributes;
public FileConsumer(FileEndpoint endpoint, Processor processor, GenericFileOperations<File> operations,
GenericFileProcessStrategy<File> processStrategy) {
super(endpoint, processor, operations, processStrategy);
this.endpointPath = endpoint.getConfiguration().getDirectory();
if (endpoint.getExtendedAttributes() != null) {
List<String> attributes = Arrays.asList(endpoint.getExtendedAttributes().split(","));
this.extendedAttributes = new HashSet<>(attributes);
}
}
@Override
protected Exchange createExchange(GenericFile<File> file) {
Exchange exchange = createExchange(true);
if (file != null) {
file.bindToExchange(exchange, getEndpoint().isProbeContentType());
}
return exchange;
}
private boolean pollDirectory(Exchange dynamic, File directory, List<GenericFile<File>> fileList, int depth) {
depth++;
if (LOG.isTraceEnabled()) {
LOG.trace("Polling directory: {}, absolute path: {}", directory.getPath(), directory.getAbsolutePath());
}
final File[] files = listFiles(directory);
if (files == null || files.length == 0) {
return true;
}
if (getEndpoint().isPreSort()) {
Arrays.sort(files, Comparator.comparing(File::getAbsoluteFile));
}
if (processPolledFiles(dynamic, fileList, depth, files)) {
return false;
}
return true;
}
private boolean processPolledFiles(Exchange dynamic, List<GenericFile<File>> fileList, int depth, File[] files) {
for (File file : files) {
// check if we can continue polling in files
if (!canPollMoreFiles(fileList)) {
return true;
}
// trace log as Windows/Unix can have different views what the file is
if (LOG.isTraceEnabled()) {
LOG.trace("Found file: {} [isAbsolute: {}, isDirectory: {}, isFile: {}, isHidden: {}]", file, file.isAbsolute(),
file.isDirectory(), file.isFile(),
file.isHidden());
}
// creates a generic file
Supplier<GenericFile<File>> gf = Suppliers.memorize(
() -> asGenericFile(endpointPath, file, getEndpoint().getCharset(), getEndpoint().isProbeContentType()));
if (resumeStrategy != null) {
final ResumeAdapter adapter = setupResumeStrategy(gf.get());
if (adapter instanceof DirectoryEntriesResumeAdapter directoryEntriesResumeAdapter) {
LOG.trace("Running the resume process for file {}", file);
if (directoryEntriesResumeAdapter.resume(file)) {
LOG.trace("Skipping file {} because it has been marked previously consumed", file);
continue;
}
}
}
if (processEntry(dynamic, fileList, depth, file, gf, files)) {
return true;
}
}
return false;
}
private boolean processEntry(
Exchange dynamic,
List<GenericFile<File>> fileList, int depth, File file, Supplier<GenericFile<File>> gf, File[] files) {
if (file.isDirectory()) {
return processDirectoryEntry(dynamic, fileList, depth, file, gf, files);
} else {
processFileEntry(dynamic, fileList, depth, file, gf, files);
}
return false;
}
private void processFileEntry(
Exchange dynamic,
List<GenericFile<File>> fileList, int depth, File file, Supplier<GenericFile<File>> gf, File[] files) {
// Windows can report false to a file on a share so regard it
// always as a file (if it is not a directory)
if (depth >= endpoint.minDepth) {
boolean valid
= isValidFile(dynamic, gf, file.getName(), file.getAbsolutePath(),
getRelativeFilePath(endpointPath, null, null, file),
false, files);
if (valid) {
LOG.trace("Adding valid file: {}", file);
if (extendedAttributes != null) {
Path path = file.toPath();
Map<String, Object> allAttributes = new HashMap<>();
for (String attribute : extendedAttributes) {
readAttributes(file, path, allAttributes, attribute);
}
gf.get().setExtendedAttributes(allAttributes);
}
fileList.add(gf.get());
}
}
}
private boolean processDirectoryEntry(
Exchange dynamic,
List<GenericFile<File>> fileList, int depth, File file, Supplier<GenericFile<File>> gf, File[] files) {
if (endpoint.isRecursive() && depth < endpoint.getMaxDepth()) {
boolean valid
= isValidFile(dynamic, gf, file.getName(), file.getAbsolutePath(),
getRelativeFilePath(endpointPath, null, null, file),
true, files);
if (valid) {
boolean canPollMore = pollDirectory(dynamic, file, fileList, depth);
return !canPollMore;
}
}
return false;
}
private ResumeAdapter setupResumeStrategy(GenericFile<File> gf) {
ResumeAdapter adapter = resumeStrategy.getAdapter();
LOG.trace("Checking the resume adapter: {}", adapter);
if (adapter instanceof FileOffsetResumeAdapter fileOffsetResumeAdapter) {
LOG.trace("The resume adapter is for offsets: {}", adapter);
fileOffsetResumeAdapter.setResumePayload(gf);
adapter.resume();
}
return adapter;
}
@Override
protected boolean pollDirectory(Exchange dynamic, String fileName, List<GenericFile<File>> fileList, int depth) {
LOG.trace("pollDirectory from fileName: {}", fileName);
File directory = new File(fileName);
if (!directory.exists() || !directory.isDirectory()) {
LOG.debug("Cannot poll as directory does not exist or its not a directory: {}", directory);
if (getEndpoint().isDirectoryMustExist()) {
throw new GenericFileOperationFailedException("Directory does not exist: " + directory);
}
return true;
}
return pollDirectory(dynamic, directory, fileList, depth);
}
private File[] listFiles(File directory) {
if (!getEndpoint().isIncludeHiddenDirs() && directory.isHidden()) {
return null;
}
final File[] dirFiles = directory.listFiles();
if (dirFiles == null || dirFiles.length == 0) {
// no files in this directory to poll
if (LOG.isTraceEnabled()) {
LOG.trace("No files found in directory: {}", directory.getPath());
}
return null;
} else {
// we found some files
if (LOG.isTraceEnabled()) {
LOG.trace("Found {} in directory: {}", dirFiles.length, directory.getPath());
}
}
return dirFiles;
}
private void readAttributes(File file, Path path, Map<String, Object> allAttributes, String attribute) {
try {
String prefix = null;
if (attribute.endsWith(":*")) {
prefix = attribute.substring(0, attribute.length() - 1);
} else if (attribute.equals("*")) {
prefix = "basic:";
}
if (ObjectHelper.isNotEmpty(prefix)) {
Map<String, Object> attributes = Files.readAttributes(path, attribute);
if (attributes != null) {
for (Map.Entry<String, Object> entry : attributes.entrySet()) {
allAttributes.put(prefix + entry.getKey(), entry.getValue());
}
}
} else if (!attribute.contains(":")) {
allAttributes.put("basic:" + attribute, Files.getAttribute(path, attribute));
} else {
allAttributes.put(attribute, Files.getAttribute(path, attribute));
}
} catch (IOException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Unable to read attribute {} on file {}", attribute, file, e);
}
}
}
@Override
protected boolean isMatched(Supplier<GenericFile<File>> file, String doneFileName, File[] files) {
String onlyName = FileUtil.stripPath(doneFileName);
// the done file name must be among the files
for (File f : files) {
if (f.getName().equals(onlyName)) {
return true;
}
}
LOG.trace("Done file: {} does not exist", doneFileName);
return false;
}
/**
* Creates a new GenericFile<File> based on the given file.
*
* @param endpointPath the starting directory the endpoint was configured with
* @param file the source file
* @param probeContentType whether to probe the content type of the file or not
* @return wrapped as a GenericFile
*/
public static GenericFile<File> asGenericFile(String endpointPath, File file, String charset, boolean probeContentType) {
GenericFile<File> answer = new GenericFile<>(probeContentType);
// use file specific binding
answer.setBinding(new FileBinding());
answer.setCharset(charset);
answer.setEndpointPath(endpointPath);
answer.setFile(file);
answer.setFileNameOnly(file.getName());
answer.setDirectory(file.isDirectory());
// must use FileUtil.isAbsolute to have consistent check for whether the
// file is
// absolute or not. As windows do not consider \ paths as absolute where
// as all
// other OS platforms will consider \ as absolute. The logic in Camel
// mandates
// that we align this for all OS. That is why we must use
// FileUtil.isAbsolute
// to return a consistent answer for all OS platforms.
answer.setAbsolute(FileUtil.isAbsolute(file));
answer.setAbsoluteFilePath(file.getAbsolutePath());
// file length and last modified are loaded lazily
answer.setFileLengthSupplier(file::length);
answer.setLastModifiedSupplier(file::lastModified);
// compute the file path as relative to the starting directory
File path;
String endpointNormalizedSep = FileUtil.normalizePath(endpointPath) + File.separator;
String p = file.getPath();
if (p.startsWith(endpointNormalizedSep)) {
p = p.substring(endpointNormalizedSep.length());
}
path = new File(p);
if (path.getParent() != null) {
answer.setRelativeFilePath(path.getParent() + File.separator + file.getName());
} else {
answer.setRelativeFilePath(path.getName());
}
// the file name should be the relative path
answer.setFileName(answer.getRelativeFilePath());
// use file as body as we have converters if needed as stream
answer.setBody(file);
return answer;
}
@Override
protected Supplier<String> getRelativeFilePath(String endpointPath, String path, String absolutePath, File file) {
return () -> {
File f;
String endpointNormalizedSep = FileUtil.normalizePath(endpointPath) + File.separator;
String p = file.getPath();
if (p.startsWith(endpointNormalizedSep)) {
p = p.substring(endpointNormalizedSep.length());
}
f = new File(p);
String answer;
if (f.getParent() != null) {
answer = f.getParent() + File.separator + file.getName();
} else {
answer = f.getName();
}
return answer;
};
}
@Override
protected void updateFileHeaders(GenericFile<File> file, Message message) {
File upToDateFile = file.getFile();
if (fileHasMoved(file)) {
upToDateFile = new File(file.getAbsoluteFilePath());
}
long length = upToDateFile.length();
long modified = upToDateFile.lastModified();
file.setFileLength(length);
file.setLastModified(modified);
if (length >= 0) {
message.setHeader(FileConstants.FILE_LENGTH, length);
}
if (modified >= 0) {
message.setHeader(FileConstants.FILE_LAST_MODIFIED, modified);
}
message.setHeader(FileConstants.INITIAL_OFFSET, Resumables.of(upToDateFile, file.getLastOffsetValue()));
}
@Override
public FileEndpoint getEndpoint() {
return (FileEndpoint) super.getEndpoint();
}
@Override
protected boolean isMatchedHiddenFile(Supplier<GenericFile<File>> file, String name, boolean isDirectory) {
if (isDirectory) {
if (!name.startsWith(".")) {
return true;
}
return getEndpoint().isIncludeHiddenDirs() && !FileConstants.DEFAULT_SUB_FOLDER.equals(name);
}
if (getEndpoint().isIncludeHiddenFiles()) {
return true;
} else {
return super.isMatchedHiddenFile(file, name, isDirectory);
}
}
private boolean fileHasMoved(GenericFile<File> file) {
// GenericFile's absolute path is always up to date whereas the
// underlying file is not
return !file.getFile().getAbsolutePath().equals(file.getAbsoluteFilePath());
}
@Override
protected void doStart() throws Exception {
if (resumeStrategy != null) {
resumeStrategy.loadCache();
}
// turn off scheduler first, so autoCreate is handled before scheduler
// starts
boolean startScheduler = isStartScheduler();
setStartScheduler(false);
try {
super.doStart();
// auto create starting directory if needed
File file = getEndpoint().getFile();
if (!file.exists() && !file.isDirectory()) {
tryCreateDirectory(file);
}
// ensure directory can be read
tryReadingStartDirectory(file);
} finally {
if (startScheduler) {
setStartScheduler(true);
startScheduler();
}
}
super.doStart();
}
private void tryCreateDirectory(File file) throws FileNotFoundException {
if (getEndpoint().isAutoCreate()) {
doCreateStartDirectory(file);
} else if (getEndpoint().isStartingDirectoryMustExist()) {
throw new FileNotFoundException("Starting directory does not exist: " + file);
}
}
private void doCreateStartDirectory(File file) {
LOG.debug("Creating non existing starting directory: {}", file);
boolean absolute = FileUtil.isAbsolute(file);
boolean created = operations.buildDirectory(file.getPath(), absolute);
if (!created) {
LOG.warn("Cannot auto create starting directory: {}", file);
}
}
private void tryReadingStartDirectory(File file) throws IOException {
if (!getEndpoint().isStartingDirectoryMustExist() && getEndpoint().isStartingDirectoryMustHaveAccess()) {
throw new IllegalArgumentException(
"You cannot set startingDirectoryMustHaveAccess=true without setting startingDirectoryMustExist=true");
} else if (getEndpoint().isStartingDirectoryMustExist() && getEndpoint().isStartingDirectoryMustHaveAccess()) {
if (!file.canRead() || !file.canWrite()) {
throw new IOException("Starting directory permission denied: " + file);
}
}
}
@Override
public ResumeStrategy getResumeStrategy() {
return resumeStrategy;
}
@Override
public void setResumeStrategy(ResumeStrategy resumeStrategy) {
this.resumeStrategy = resumeStrategy;
}
@Override
public String adapterFactoryService() {
return "file-adapter-factory";
}
}
| FileConsumer |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/IterationIntermediateTask.java | {
"start": 2242,
"end": 5960
} | class ____<S extends Function, OT>
extends AbstractIterativeTask<S, OT> {
private static final Logger log = LoggerFactory.getLogger(IterationIntermediateTask.class);
private WorksetUpdateOutputCollector<OT> worksetUpdateOutputCollector;
// --------------------------------------------------------------------------------------------
/**
* Create an Invokable task and set its environment.
*
* @param environment The environment assigned to this invokable.
*/
public IterationIntermediateTask(Environment environment) {
super(environment);
}
// --------------------------------------------------------------------------------------------
@Override
protected void initialize() throws Exception {
super.initialize();
// set the last output collector of this task to reflect the iteration intermediate state
// update
// a) workset update
// b) solution set update
// c) none
Collector<OT> delegate = getLastOutputCollector();
if (isWorksetUpdate) {
// sanity check: we should not have a solution set and workset update at the same time
// in an intermediate task
if (isSolutionSetUpdate) {
throw new IllegalStateException(
"Plan bug: Intermediate task performs workset and solutions set update.");
}
Collector<OT> outputCollector = createWorksetUpdateOutputCollector(delegate);
// we need the WorksetUpdateOutputCollector separately to count the collected elements
if (isWorksetIteration) {
worksetUpdateOutputCollector = (WorksetUpdateOutputCollector<OT>) outputCollector;
}
setLastOutputCollector(outputCollector);
} else if (isSolutionSetUpdate) {
setLastOutputCollector(createSolutionSetUpdateOutputCollector(delegate));
}
}
@Override
public void run() throws Exception {
SuperstepKickoffLatch nextSuperstepLatch =
SuperstepKickoffLatchBroker.instance().get(brokerKey());
while (this.running && !terminationRequested()) {
if (log.isInfoEnabled()) {
log.info(formatLogString("starting iteration [" + currentIteration() + "]"));
}
super.run();
// check if termination was requested
verifyEndOfSuperstepState();
if (isWorksetUpdate && isWorksetIteration) {
long numCollected = worksetUpdateOutputCollector.getElementsCollectedAndReset();
worksetAggregator.aggregate(numCollected);
}
if (log.isInfoEnabled()) {
log.info(formatLogString("finishing iteration [" + currentIteration() + "]"));
}
// let the successors know that the end of this superstep data is reached
sendEndOfSuperstep();
if (isWorksetUpdate) {
// notify iteration head if responsible for workset update
worksetBackChannel.notifyOfEndOfSuperstep();
}
boolean terminated =
nextSuperstepLatch.awaitStartOfSuperstepOrTermination(currentIteration() + 1);
if (terminated) {
requestTermination();
} else {
incrementIterationCounter();
}
}
}
private void sendEndOfSuperstep() throws IOException, InterruptedException {
for (RecordWriter eventualOutput : this.eventualOutputs) {
eventualOutput.broadcastEvent(EndOfSuperstepEvent.INSTANCE);
}
}
}
| IterationIntermediateTask |
java | netty__netty | microbench/src/main/java/io/netty/handler/codec/http2/HpackHeader.java | {
"start": 1539,
"end": 3019
} | class ____ {
private static final String ALPHABET =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_";
final CharSequence name;
final CharSequence value;
private HpackHeader(byte[] name, byte[] value) {
this.name = new AsciiString(name, false);
this.value = new AsciiString(value, false);
}
/**
* Creates a number of random headers with the given name/value lengths.
*/
static List<HpackHeader> createHeaders(int numHeaders, int nameLength, int valueLength,
boolean limitToAscii) {
List<HpackHeader> hpackHeaders = new ArrayList<HpackHeader>(numHeaders);
for (int i = 0; i < numHeaders; ++i) {
// Force always ascii for header names
byte[] name = randomBytes(new byte[nameLength], true);
byte[] value = randomBytes(new byte[valueLength], limitToAscii);
hpackHeaders.add(new HpackHeader(name, value));
}
return hpackHeaders;
}
private static byte[] randomBytes(byte[] bytes, boolean limitToAscii) {
Random r = new Random();
if (limitToAscii) {
for (int index = 0; index < bytes.length; ++index) {
int charIndex = r.nextInt(ALPHABET.length());
bytes[index] = (byte) ALPHABET.charAt(charIndex);
}
} else {
r.nextBytes(bytes);
}
return bytes;
}
}
| HpackHeader |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/JSONTest3.java | {
"start": 875,
"end": 977
} | class ____ {
private Map<String, Object> attributes = new HashMap<String, Object>();
}
}
| Model |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_3200/Issue3281.java | {
"start": 1125,
"end": 1528
} | class ____ {
private HashMap<String, Long> counterMap;
private Date formatDate;
private HashMap<String, TGigest> modelScores;
private String modelName;
private Long modelVersion;
private Long pit;
private String useCaseName;
private String variableName;
}
@Builder
@Data
@AllArgsConstructor
public static | ModelState |
java | apache__camel | components/camel-xj/src/test/java/org/apache/camel/component/xj/J2XOutputBytesTest.java | {
"start": 1040,
"end": 4041
} | class ____ extends CamelTestSupport {
@Test
public void testOutput() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("<?xml version=\"1.0\" encoding=\"UTF-8\"?><hello>world!</hello>");
mock.message(0).body().isInstanceOf(byte[].class);
template.sendBody("direct:start", "{\"hello\": \"world!\"}");
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testOutputSourceHeader() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:sourceHeader");
mock.expectedBodiesReceived("<?xml version=\"1.0\" encoding=\"UTF-8\"?><hello>world!</hello>");
mock.message(0).body().isInstanceOf(byte[].class);
template.send("direct:sourceHeader", exchange -> {
exchange.getIn().setHeader("xmlSource", "{\"hello\": \"world!\"}");
});
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testOutputSourceVariable() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:sourceVariable");
mock.expectedBodiesReceived("<?xml version=\"1.0\" encoding=\"UTF-8\"?><hello>world!</hello>");
mock.message(0).body().isInstanceOf(byte[].class);
template.send("direct:sourceVariable", exchange -> {
exchange.setVariable("xmlSource", "{\"hello\": \"world!\"}");
});
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testOutputSourceProperty() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:sourceProperty");
mock.expectedBodiesReceived("<?xml version=\"1.0\" encoding=\"UTF-8\"?><hello>world!</hello>");
mock.message(0).body().isInstanceOf(byte[].class);
template.send("direct:sourceProperty", exchange -> {
exchange.setProperty("xmlSource", "{\"hello\": \"world!\"}");
});
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.to("xj:hellojson2xml.xsl?transformDirection=JSON2XML&output=bytes")
.to("mock:result");
from("direct:sourceHeader")
.to("xj:hellojson2xml.xsl?source=header:xmlSource&transformDirection=JSON2XML&output=bytes")
.to("mock:sourceHeader");
from("direct:sourceVariable")
.to("xj:hellojson2xml.xsl?source=variable:xmlSource&transformDirection=JSON2XML&output=bytes")
.to("mock:sourceVariable");
from("direct:sourceProperty")
.to("xj:hellojson2xml.xsl?source=property:xmlSource&transformDirection=JSON2XML&output=bytes")
.to("mock:sourceProperty");
}
};
}
}
| J2XOutputBytesTest |
java | elastic__elasticsearch | x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java | {
"start": 751,
"end": 1882
} | class ____ extends Attribute {
private final DataType dataType;
protected TypedAttribute(
Source source,
String name,
DataType dataType,
Nullability nullability,
@Nullable NameId id,
boolean synthetic
) {
this(source, null, name, dataType, nullability, id, synthetic);
}
protected TypedAttribute(
Source source,
@Nullable String qualifier,
String name,
DataType dataType,
Nullability nullability,
@Nullable NameId id,
boolean synthetic
) {
super(source, qualifier, name, nullability, id, synthetic);
this.dataType = dataType;
}
@Override
public DataType dataType() {
return dataType;
}
@Override
protected int innerHashCode(boolean ignoreIds) {
return Objects.hash(super.innerHashCode(ignoreIds), dataType);
}
@Override
protected boolean innerEquals(Object o, boolean ignoreIds) {
var other = (TypedAttribute) o;
return super.innerEquals(other, ignoreIds) && dataType == other.dataType;
}
}
| TypedAttribute |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/IndicesTest.java | {
"start": 1100,
"end": 1881
} | class ____ {
@BeforeEach
public void createTestData(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> {
Project project = new Project(1);
Role role = new Role(1);
session.persist( project );
session.persist( role );
Person person = new Person(1, project, role);
session.persist( person );
} );
}
@AfterEach
public void dropTestData(SessionFactoryScope factoryScope) throws Exception {
factoryScope.dropData();
}
@Test
public void testSelectIndices(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> {
List<?> result = session.createQuery("select indices(p.roles) from Person p" ).list();
assertThat( result ).hasSize( 1 );
} );
}
@Entity(name = "Person")
public static | IndicesTest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/StubComponentBuilderFactory.java | {
"start": 1846,
"end": 11582
} | interface ____ extends ComponentBuilder<StubComponent> {
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default StubComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Sets the default number of concurrent threads processing exchanges.
*
* The option is a: <code>int</code> type.
*
* Default: 1
* Group: consumer
*
* @param concurrentConsumers the value to set
* @return the dsl builder
*/
default StubComponentBuilder concurrentConsumers(int concurrentConsumers) {
doSetProperty("concurrentConsumers", concurrentConsumers);
return this;
}
/**
* The timeout (in milliseconds) used when polling. When a timeout
* occurs, the consumer can check whether it is allowed to continue
* running. Setting a lower value allows the consumer to react more
* quickly upon shutdown.
*
* The option is a: <code>int</code> type.
*
* Default: 1000
* Group: consumer (advanced)
*
* @param defaultPollTimeout the value to set
* @return the dsl builder
*/
default StubComponentBuilder defaultPollTimeout(int defaultPollTimeout) {
doSetProperty("defaultPollTimeout", defaultPollTimeout);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default StubComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether a thread that sends messages to a full SEDA queue will block
* until the queue's capacity is no longer exhausted. By default, an
* exception will be thrown stating that the queue is full. By enabling
* this option, the calling thread will instead block and wait until the
* message can be accepted.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param defaultBlockWhenFull the value to set
* @return the dsl builder
*/
default StubComponentBuilder defaultBlockWhenFull(boolean defaultBlockWhenFull) {
doSetProperty("defaultBlockWhenFull", defaultBlockWhenFull);
return this;
}
/**
* Whether a thread that sends messages to a full SEDA queue will be
* discarded. By default, an exception will be thrown stating that the
* queue is full. By enabling this option, the calling thread will give
* up sending and continue, meaning that the message was not sent to the
* SEDA queue.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param defaultDiscardWhenFull the value to set
* @return the dsl builder
*/
default StubComponentBuilder defaultDiscardWhenFull(boolean defaultDiscardWhenFull) {
doSetProperty("defaultDiscardWhenFull", defaultDiscardWhenFull);
return this;
}
/**
* Whether a thread that sends messages to a full SEDA queue will block
* until the queue's capacity is no longer exhausted. By default, an
* exception will be thrown stating that the queue is full. By enabling
* this option, where a configured timeout can be added to the block
* case. Using the offer(timeout) method of the underlining java queue.
*
* The option is a: <code>long</code> type.
*
* Group: producer (advanced)
*
* @param defaultOfferTimeout the value to set
* @return the dsl builder
*/
default StubComponentBuilder defaultOfferTimeout(long defaultOfferTimeout) {
doSetProperty("defaultOfferTimeout", defaultOfferTimeout);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default StubComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* Sets the default queue factory.
*
* The option is a:
* <code>org.apache.camel.component.seda.BlockingQueueFactory&lt;org.apache.camel.Exchange&gt;</code> type.
*
* Group: advanced
*
* @param defaultQueueFactory the value to set
* @return the dsl builder
*/
default StubComponentBuilder defaultQueueFactory(org.apache.camel.component.seda.BlockingQueueFactory<org.apache.camel.Exchange> defaultQueueFactory) {
doSetProperty("defaultQueueFactory", defaultQueueFactory);
return this;
}
/**
* Sets the default maximum capacity of the SEDA queue (i.e., the number
* of messages it can hold).
*
* The option is a: <code>int</code> type.
*
* Default: 1000
* Group: advanced
*
* @param queueSize the value to set
* @return the dsl builder
*/
default StubComponentBuilder queueSize(int queueSize) {
doSetProperty("queueSize", queueSize);
return this;
}
/**
* If shadow is enabled then the stub component will register a shadow
* endpoint with the actual uri that refers to the stub endpoint,
* meaning you can lookup the endpoint via both stub:kafka:cheese and
* kafka:cheese.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param shadow the value to set
* @return the dsl builder
*/
default StubComponentBuilder shadow(boolean shadow) {
doSetProperty("shadow", shadow);
return this;
}
/**
* If shadow is enabled then this pattern can be used to filter which
* components to match. Multiple patterns can be separated by comma.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: advanced
*
* @param shadowPattern the value to set
* @return the dsl builder
*/
default StubComponentBuilder shadowPattern(java.lang.String shadowPattern) {
doSetProperty("shadowPattern", shadowPattern);
return this;
}
}
| StubComponentBuilder |
java | apache__flink | flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/StateHandleTransferSpec.java | {
"start": 970,
"end": 1117
} | class ____ a transfer specification for the content of one {@link
* IncrementalRemoteKeyedStateHandle} to a target {@link Path}.
*/
public | represents |
java | apache__maven | compat/maven-model-builder/src/main/java/org/apache/maven/model/interpolation/reflection/ClassMap.java | {
"start": 11396,
"end": 11726
} | class ____ public.
if ((clazz.getModifiers() & Modifier.PUBLIC) != 0) {
return method;
}
return getPublicMethod(clazz, method.getName(), method.getParameterTypes());
}
/**
* Looks up the method with specified name and signature in the first public
* superclass or implemented | is |
java | quarkusio__quarkus | integration-tests/devmode/src/test/java/io/quarkus/test/qute/QuteErrorPageTest.java | {
"start": 363,
"end": 988
} | class ____ {
@RegisterExtension
static final QuarkusDevModeTest config = new QuarkusDevModeTest()
.withApplicationRoot(
root -> root.addAsResource(new StringAsset("{hello.foo}"), "templates/hello.txt"));
@Test
public void testErrorPage() {
config.modifyResourceFile("templates/hello.txt", file -> "{@java.lang.String hello}{hello.foo}");
RestAssured.when().get("/hello").then()
.body(containsString("hello.txt:1"), containsString("{hello.foo}"))
.statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
| QuteErrorPageTest |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/framework/AdvisedSupport.java | {
"start": 2286,
"end": 2468
} | class ____ used to hold snapshots of proxies.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @author Sam Brannen
* @see org.springframework.aop.framework.AopProxy
*/
public | is |
java | spring-projects__spring-framework | spring-r2dbc/src/test/java/org/springframework/r2dbc/connection/R2dbcTransactionManagerTests.java | {
"start": 2558,
"end": 28416
} | class ____ {
ConnectionFactory connectionFactoryMock = mock();
Connection connectionMock = mock();
private R2dbcTransactionManager tm;
@BeforeEach
@SuppressWarnings({"rawtypes", "unchecked"})
void before() {
when(connectionFactoryMock.create()).thenReturn((Mono) Mono.just(connectionMock));
when(connectionMock.beginTransaction(any(io.r2dbc.spi.TransactionDefinition.class))).thenReturn(Mono.empty());
when(connectionMock.close()).thenReturn(Mono.empty());
tm = new R2dbcTransactionManager(connectionFactoryMock);
}
@Test
void testSimpleTransaction() {
when(connectionMock.isAutoCommit()).thenReturn(false);
AtomicInteger commits = new AtomicInteger();
when(connectionMock.commitTransaction()).thenReturn(
Mono.fromRunnable(commits::incrementAndGet));
TestTransactionSynchronization sync = new TestTransactionSynchronization(
TransactionSynchronization.STATUS_COMMITTED);
TransactionalOperator operator = TransactionalOperator.create(tm);
ConnectionFactoryUtils.getConnection(connectionFactoryMock)
.flatMap(connection -> TransactionSynchronizationManager.forCurrentTransaction()
.doOnNext(synchronizationManager -> synchronizationManager.registerSynchronization(sync)))
.as(operator::transactional)
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
assertThat(commits).hasValue(1);
verify(connectionMock).isAutoCommit();
verify(connectionMock).beginTransaction(any(io.r2dbc.spi.TransactionDefinition.class));
verify(connectionMock).commitTransaction();
verify(connectionMock).close();
verifyNoMoreInteractions(connectionMock);
assertThat(sync.beforeCommitCalled).isTrue();
assertThat(sync.afterCommitCalled).isTrue();
assertThat(sync.beforeCompletionCalled).isTrue();
assertThat(sync.afterCompletionCalled).isTrue();
}
@Test
void testBeginFails() {
reset(connectionFactoryMock);
when(connectionFactoryMock.create()).thenReturn(
Mono.error(new R2dbcBadGrammarException("fail")));
when(connectionMock.rollbackTransaction()).thenReturn(Mono.empty());
DefaultTransactionDefinition definition = new DefaultTransactionDefinition();
definition.setIsolationLevel(TransactionDefinition.ISOLATION_SERIALIZABLE);
TransactionalOperator operator = TransactionalOperator.create(tm, definition);
ConnectionFactoryUtils.getConnection(connectionFactoryMock)
.as(operator::transactional)
.as(StepVerifier::create)
.expectErrorSatisfies(actual -> assertThat(actual).isInstanceOf(
CannotCreateTransactionException.class).hasCauseInstanceOf(R2dbcBadGrammarException.class))
.verify();
}
@Test
void appliesTransactionDefinitionAndAutoCommit() {
when(connectionMock.isAutoCommit()).thenReturn(true, false);
when(connectionMock.commitTransaction()).thenReturn(Mono.empty());
when(connectionMock.setAutoCommit(true)).thenReturn(Mono.empty());
DefaultTransactionDefinition definition = new DefaultTransactionDefinition();
definition.setName("my-transaction");
definition.setTimeout(10);
definition.setReadOnly(true);
definition.setIsolationLevel(TransactionDefinition.ISOLATION_SERIALIZABLE);
TransactionalOperator operator = TransactionalOperator.create(tm, definition);
operator.execute(tx -> {
assertThat(tx.getTransactionName()).isEqualTo("my-transaction");
assertThat(tx.hasTransaction()).isTrue();
assertThat(tx.isNewTransaction()).isTrue();
assertThat(tx.isNested()).isFalse();
assertThat(tx.isReadOnly()).isTrue();
assertThat(tx.isRollbackOnly()).isFalse();
assertThat(tx.isCompleted()).isFalse();
return Mono.empty();
}).as(StepVerifier::create).verifyComplete();
ArgumentCaptor<io.r2dbc.spi.TransactionDefinition> txCaptor = ArgumentCaptor.forClass(io.r2dbc.spi.TransactionDefinition.class);
verify(connectionMock).beginTransaction(txCaptor.capture());
verify(connectionMock, never()).setTransactionIsolationLevel(any());
verify(connectionMock).commitTransaction();
verify(connectionMock).setAutoCommit(true);
verify(connectionMock).close();
io.r2dbc.spi.TransactionDefinition def = txCaptor.getValue();
assertThat(def.getAttribute(io.r2dbc.spi.TransactionDefinition.NAME)).isEqualTo("my-transaction");
assertThat(def.getAttribute(io.r2dbc.spi.TransactionDefinition.LOCK_WAIT_TIMEOUT)).isEqualTo(Duration.ofSeconds(10));
assertThat(def.getAttribute(io.r2dbc.spi.TransactionDefinition.READ_ONLY)).isTrue();
assertThat(def.getAttribute(io.r2dbc.spi.TransactionDefinition.ISOLATION_LEVEL)).isEqualTo(IsolationLevel.SERIALIZABLE);
}
@Test
void doesNotSetAutoCommitIfRestoredByDriver() {
when(connectionMock.isAutoCommit()).thenReturn(true, true);
when(connectionMock.commitTransaction()).thenReturn(Mono.empty());
DefaultTransactionDefinition definition = new DefaultTransactionDefinition();
TransactionalOperator operator = TransactionalOperator.create(tm, definition);
ConnectionFactoryUtils.getConnection(connectionFactoryMock)
.as(operator::transactional)
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
verify(connectionMock).beginTransaction(any(io.r2dbc.spi.TransactionDefinition.class));
verify(connectionMock, never()).setAutoCommit(anyBoolean());
verify(connectionMock).commitTransaction();
}
@Test
void appliesReadOnly() {
when(connectionMock.isAutoCommit()).thenReturn(false);
when(connectionMock.commitTransaction()).thenReturn(Mono.empty());
when(connectionMock.setTransactionIsolationLevel(any())).thenReturn(Mono.empty());
Statement statement = mock();
when(connectionMock.createStatement(anyString())).thenReturn(statement);
when(statement.execute()).thenReturn(Mono.empty());
tm.setEnforceReadOnly(true);
DefaultTransactionDefinition definition = new DefaultTransactionDefinition();
definition.setReadOnly(true);
TransactionalOperator operator = TransactionalOperator.create(tm, definition);
ConnectionFactoryUtils.getConnection(connectionFactoryMock)
.as(operator::transactional)
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
verify(connectionMock).isAutoCommit();
verify(connectionMock).beginTransaction(any(io.r2dbc.spi.TransactionDefinition.class));
verify(connectionMock).createStatement("SET TRANSACTION READ ONLY");
verify(connectionMock).commitTransaction();
verify(connectionMock).close();
verifyNoMoreInteractions(connectionMock);
}
@Test
void testCommitFails() {
when(connectionMock.isAutoCommit()).thenReturn(false);
when(connectionMock.commitTransaction()).thenReturn(Mono.defer(() ->
Mono.error(new R2dbcBadGrammarException("Commit should fail"))));
when(connectionMock.rollbackTransaction()).thenReturn(Mono.empty());
TransactionalOperator operator = TransactionalOperator.create(tm);
ConnectionFactoryUtils.getConnection(connectionFactoryMock)
.doOnNext(connection -> connection.createStatement("foo")).then()
.as(operator::transactional)
.as(StepVerifier::create)
.verifyError(BadSqlGrammarException.class);
verify(connectionMock).isAutoCommit();
verify(connectionMock).beginTransaction(any(io.r2dbc.spi.TransactionDefinition.class));
verify(connectionMock).createStatement("foo");
verify(connectionMock).commitTransaction();
verify(connectionMock).rollbackTransaction();
verify(connectionMock).close();
verifyNoMoreInteractions(connectionMock);
}
@Test
void testRollback() {
when(connectionMock.isAutoCommit()).thenReturn(false);
AtomicInteger commits = new AtomicInteger();
when(connectionMock.commitTransaction()).thenReturn(
Mono.fromRunnable(commits::incrementAndGet));
AtomicInteger rollbacks = new AtomicInteger();
when(connectionMock.rollbackTransaction()).thenReturn(
Mono.fromRunnable(rollbacks::incrementAndGet));
TransactionalOperator operator = TransactionalOperator.create(tm);
ConnectionFactoryUtils.getConnection(connectionFactoryMock)
.doOnNext(connection -> { throw new IllegalStateException(); })
.as(operator::transactional)
.as(StepVerifier::create).verifyError(IllegalStateException.class);
assertThat(commits).hasValue(0);
assertThat(rollbacks).hasValue(1);
verify(connectionMock).isAutoCommit();
verify(connectionMock).beginTransaction(any(io.r2dbc.spi.TransactionDefinition.class));
verify(connectionMock).rollbackTransaction();
verify(connectionMock).close();
verifyNoMoreInteractions(connectionMock);
}
@Test
@SuppressWarnings("unchecked")
void testRollbackFails() {
when(connectionMock.rollbackTransaction()).thenReturn(Mono.defer(() ->
Mono.error(new R2dbcBadGrammarException("Commit should fail"))), Mono.empty());
TransactionalOperator operator = TransactionalOperator.create(tm);
operator.execute(reactiveTransaction -> {
reactiveTransaction.setRollbackOnly();
return ConnectionFactoryUtils.getConnection(connectionFactoryMock)
.doOnNext(connection -> connection.createStatement("foo")).then();
}).as(StepVerifier::create).verifyError(BadSqlGrammarException.class);
verify(connectionMock).isAutoCommit();
verify(connectionMock).beginTransaction(any(io.r2dbc.spi.TransactionDefinition.class));
verify(connectionMock).createStatement("foo");
verify(connectionMock, never()).commitTransaction();
verify(connectionMock).rollbackTransaction();
verify(connectionMock).close();
verifyNoMoreInteractions(connectionMock);
}
@Test
@SuppressWarnings("unchecked")
void testConnectionReleasedWhenRollbackFails() {
when(connectionMock.rollbackTransaction()).thenReturn(Mono.defer(() ->
Mono.error(new R2dbcBadGrammarException("Rollback should fail"))), Mono.empty());
when(connectionMock.setTransactionIsolationLevel(any())).thenReturn(Mono.empty());
TransactionalOperator operator = TransactionalOperator.create(tm);
operator.execute(reactiveTransaction -> ConnectionFactoryUtils.getConnection(connectionFactoryMock)
.doOnNext(connection -> {
throw new IllegalStateException("Intentional error to trigger rollback");
}).then()).as(StepVerifier::create)
.verifyErrorSatisfies(ex -> assertThat(ex)
.isInstanceOf(BadSqlGrammarException.class)
.hasCause(new R2dbcBadGrammarException("Rollback should fail"))
);
verify(connectionMock).beginTransaction(any(io.r2dbc.spi.TransactionDefinition.class));
verify(connectionMock, never()).commitTransaction();
verify(connectionMock).rollbackTransaction();
verify(connectionMock).close();
}
@Test
void testCommitAndRollbackFails() {
when(connectionMock.isAutoCommit()).thenReturn(false);
when(connectionMock.commitTransaction()).thenReturn(Mono.defer(() ->
Mono.error(new R2dbcBadGrammarException("Commit should fail"))));
when(connectionMock.rollbackTransaction()).thenReturn(Mono.defer(() ->
Mono.error(new R2dbcTransientResourceException("Rollback should also fail"))));
TransactionalOperator operator = TransactionalOperator.create(tm);
ConnectionFactoryUtils.getConnection(connectionFactoryMock)
.doOnNext(connection -> connection.createStatement("foo")).then()
.as(operator::transactional)
.as(StepVerifier::create)
.verifyError(TransientDataAccessResourceException.class);
verify(connectionMock).isAutoCommit();
verify(connectionMock).beginTransaction(any(io.r2dbc.spi.TransactionDefinition.class));
verify(connectionMock).createStatement("foo");
verify(connectionMock).commitTransaction();
verify(connectionMock).rollbackTransaction();
verify(connectionMock).close();
verifyNoMoreInteractions(connectionMock);
}
@Test
void testTransactionSetRollbackOnly() {
when(connectionMock.isAutoCommit()).thenReturn(false);
when(connectionMock.rollbackTransaction()).thenReturn(Mono.empty());
TestTransactionSynchronization sync = new TestTransactionSynchronization(
TransactionSynchronization.STATUS_ROLLED_BACK);
TransactionalOperator operator = TransactionalOperator.create(tm);
operator.execute(tx -> {
assertThat(tx.getTransactionName()).isEmpty();
assertThat(tx.hasTransaction()).isTrue();
assertThat(tx.isNewTransaction()).isTrue();
assertThat(tx.isNested()).isFalse();
assertThat(tx.isReadOnly()).isFalse();
assertThat(tx.isRollbackOnly()).isFalse();
tx.setRollbackOnly();
assertThat(tx.isRollbackOnly()).isTrue();
assertThat(tx.isCompleted()).isFalse();
return TransactionSynchronizationManager.forCurrentTransaction().doOnNext(
synchronizationManager -> {
assertThat(synchronizationManager.hasResource(connectionFactoryMock)).isTrue();
synchronizationManager.registerSynchronization(sync);
}).then();
}).as(StepVerifier::create).verifyComplete();
verify(connectionMock).isAutoCommit();
verify(connectionMock).beginTransaction(any(io.r2dbc.spi.TransactionDefinition.class));
verify(connectionMock).rollbackTransaction();
verify(connectionMock).close();
verifyNoMoreInteractions(connectionMock);
assertThat(sync.beforeCommitCalled).isFalse();
assertThat(sync.afterCommitCalled).isFalse();
assertThat(sync.beforeCompletionCalled).isTrue();
assertThat(sync.afterCompletionCalled).isTrue();
}
@Test
void testPropagationNeverWithExistingTransaction() {
when(connectionMock.rollbackTransaction()).thenReturn(Mono.empty());
DefaultTransactionDefinition definition = new DefaultTransactionDefinition();
definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionalOperator operator = TransactionalOperator.create(tm, definition);
operator.execute(tx1 -> {
assertThat(tx1.getTransactionName()).isEmpty();
assertThat(tx1.hasTransaction()).isTrue();
assertThat(tx1.isNewTransaction()).isTrue();
assertThat(tx1.isNested()).isFalse();
assertThat(tx1.isReadOnly()).isFalse();
assertThat(tx1.isRollbackOnly()).isFalse();
assertThat(tx1.isCompleted()).isFalse();
definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_NEVER);
return operator.execute(tx2 -> {
fail("Should have thrown IllegalTransactionStateException");
return Mono.empty();
});
}).as(StepVerifier::create).verifyError(IllegalTransactionStateException.class);
verify(connectionMock).rollbackTransaction();
verify(connectionMock).close();
}
@Test
void testPropagationNestedWithExistingTransaction() {
when(connectionMock.createSavepoint(anyString())).thenReturn(Mono.empty());
when(connectionMock.rollbackTransactionToSavepoint(anyString())).thenReturn(Mono.empty());
when(connectionMock.releaseSavepoint(anyString())).thenReturn(Mono.empty());
when(connectionMock.commitTransaction()).thenReturn(Mono.empty());
DefaultTransactionDefinition definition = new DefaultTransactionDefinition();
definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionalOperator operator = TransactionalOperator.create(tm, definition);
operator.execute(tx -> {
assertThat(tx.hasTransaction()).isTrue();
assertThat(tx.isNewTransaction()).isTrue();
assertThat(tx.isNested()).isFalse();
definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_NESTED);
return Flux.concat(
TransactionalOperator.create(tm, definition).execute(ntx1 -> {
assertThat(ntx1.hasTransaction()).as("ntx1.hasTransaction()").isTrue();
assertThat(ntx1.isNewTransaction()).as("ntx1.isNewTransaction()").isTrue();
assertThat(ntx1.isNested()).as("ntx1.isNested()").isTrue();
assertThat(ntx1.isRollbackOnly()).as("ntx1.isRollbackOnly()").isFalse();
return Mono.empty();
}),
TransactionalOperator.create(tm, definition).execute(ntx2 -> {
assertThat(ntx2.hasTransaction()).as("ntx2.hasTransaction()").isTrue();
assertThat(ntx2.isNewTransaction()).as("ntx2.isNewTransaction()").isTrue();
assertThat(ntx2.isNested()).as("ntx2.isNested()").isTrue();
assertThat(ntx2.isRollbackOnly()).as("ntx2.isRollbackOnly()").isFalse();
ntx2.setRollbackOnly();
assertThat(ntx2.isRollbackOnly()).isTrue();
return Mono.empty();
}),
TransactionalOperator.create(tm, definition).execute(ntx3 -> {
assertThat(ntx3.hasTransaction()).as("ntx3.hasTransaction()").isTrue();
assertThat(ntx3.isNewTransaction()).as("ntx3.isNewTransaction()").isTrue();
assertThat(ntx3.isNested()).as("ntx3.isNested()").isTrue();
assertThat(ntx3.isRollbackOnly()).as("ntx3.isRollbackOnly()").isFalse();
return Mono.empty();
}),
TransactionalOperator.create(tm, definition).execute(ntx4 -> {
assertThat(ntx4.hasTransaction()).as("ntx4.hasTransaction()").isTrue();
assertThat(ntx4.isNewTransaction()).as("ntx4.isNewTransaction()").isTrue();
assertThat(ntx4.isNested()).as("ntx4.isNested()").isTrue();
assertThat(ntx4.isRollbackOnly()).as("ntx4.isRollbackOnly()").isFalse();
ntx4.setRollbackOnly();
assertThat(ntx4.isRollbackOnly()).isTrue();
return Flux.concat(
TransactionalOperator.create(tm, definition).execute(ntx4n1 -> {
assertThat(ntx4n1.hasTransaction()).as("ntx4n1.hasTransaction()").isTrue();
assertThat(ntx4n1.isNewTransaction()).as("ntx4n1.isNewTransaction()").isTrue();
assertThat(ntx4n1.isNested()).as("ntx4n1.isNested()").isTrue();
assertThat(ntx4n1.isRollbackOnly()).as("ntx4n1.isRollbackOnly()").isFalse();
return Mono.empty();
}),
TransactionalOperator.create(tm, definition).execute(ntx4n2 -> {
assertThat(ntx4n2.hasTransaction()).as("ntx4n2.hasTransaction()").isTrue();
assertThat(ntx4n2.isNewTransaction()).as("ntx4n2.isNewTransaction()").isTrue();
assertThat(ntx4n2.isNested()).as("ntx4n2.isNested()").isTrue();
assertThat(ntx4n2.isRollbackOnly()).as("ntx4n2.isRollbackOnly()").isFalse();
ntx4n2.setRollbackOnly();
assertThat(ntx4n2.isRollbackOnly()).isTrue();
return Mono.empty();
})
);
}),
TransactionalOperator.create(tm, definition).execute(ntx5 -> {
assertThat(ntx5.hasTransaction()).as("ntx5.hasTransaction()").isTrue();
assertThat(ntx5.isNewTransaction()).as("ntx5.isNewTransaction()").isTrue();
assertThat(ntx5.isNested()).as("ntx5.isNested()").isTrue();
assertThat(ntx5.isRollbackOnly()).as("ntx5.isRollbackOnly()").isFalse();
ntx5.setRollbackOnly();
assertThat(ntx5.isRollbackOnly()).isTrue();
return Flux.concat(
TransactionalOperator.create(tm, definition).execute(ntx5n1 -> {
assertThat(ntx5n1.hasTransaction()).as("ntx5n1.hasTransaction()").isTrue();
assertThat(ntx5n1.isNewTransaction()).as("ntx5n1.isNewTransaction()").isTrue();
assertThat(ntx5n1.isNested()).as("ntx5n1.isNested()").isTrue();
assertThat(ntx5n1.isRollbackOnly()).as("ntx5n1.isRollbackOnly()").isFalse();
return Mono.empty();
}),
TransactionalOperator.create(tm, definition).execute(ntx5n2 -> {
assertThat(ntx5n2.hasTransaction()).as("ntx5n2.hasTransaction()").isTrue();
assertThat(ntx5n2.isNewTransaction()).as("ntx5n2.isNewTransaction()").isTrue();
assertThat(ntx5n2.isNested()).as("ntx5n2.isNested()").isTrue();
assertThat(ntx5n2.isRollbackOnly()).as("ntx5n2.isRollbackOnly()").isFalse();
ntx5n2.setRollbackOnly();
assertThat(ntx5n2.isRollbackOnly()).isTrue();
return Mono.empty();
})
);
})
);
}).as(StepVerifier::create).verifyComplete();
InOrder inOrder = inOrder(connectionMock);
// ntx1
inOrder.verify(connectionMock).createSavepoint("SAVEPOINT_1");
inOrder.verify(connectionMock).releaseSavepoint("SAVEPOINT_1");
// ntx2
inOrder.verify(connectionMock).createSavepoint("SAVEPOINT_2");
inOrder.verify(connectionMock).rollbackTransactionToSavepoint("SAVEPOINT_2");
inOrder.verify(connectionMock).releaseSavepoint("SAVEPOINT_2");
// ntx3
inOrder.verify(connectionMock).createSavepoint("SAVEPOINT_3");
inOrder.verify(connectionMock).releaseSavepoint("SAVEPOINT_3");
// ntx4
inOrder.verify(connectionMock).createSavepoint("SAVEPOINT_4");
inOrder.verify(connectionMock).createSavepoint("SAVEPOINT_5");
inOrder.verify(connectionMock).releaseSavepoint("SAVEPOINT_5");
inOrder.verify(connectionMock).createSavepoint("SAVEPOINT_6");
inOrder.verify(connectionMock).rollbackTransactionToSavepoint("SAVEPOINT_6");
inOrder.verify(connectionMock).releaseSavepoint("SAVEPOINT_6");
inOrder.verify(connectionMock).releaseSavepoint("SAVEPOINT_4");
// ntx5
inOrder.verify(connectionMock).createSavepoint("SAVEPOINT_7");
inOrder.verify(connectionMock).createSavepoint("SAVEPOINT_8");
inOrder.verify(connectionMock).releaseSavepoint("SAVEPOINT_8");
inOrder.verify(connectionMock).createSavepoint("SAVEPOINT_9");
inOrder.verify(connectionMock).rollbackTransactionToSavepoint("SAVEPOINT_9");
inOrder.verify(connectionMock).releaseSavepoint("SAVEPOINT_9");
inOrder.verify(connectionMock).rollbackTransactionToSavepoint("SAVEPOINT_7");
inOrder.verify(connectionMock).releaseSavepoint("SAVEPOINT_7");
// tx
inOrder.verify(connectionMock).commitTransaction();
inOrder.verify(connectionMock).close();
}
@Test
void testPropagationSupportsAndNested() {
when(connectionMock.commitTransaction()).thenReturn(Mono.empty());
DefaultTransactionDefinition definition = new DefaultTransactionDefinition();
definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_SUPPORTS);
TransactionalOperator operator = TransactionalOperator.create(tm, definition);
operator.execute(tx1 -> {
assertThat(tx1.hasTransaction()).isFalse();
assertThat(tx1.isNewTransaction()).isFalse();
assertThat(tx1.isNested()).isFalse();
DefaultTransactionDefinition innerDef = new DefaultTransactionDefinition();
innerDef.setPropagationBehavior(TransactionDefinition.PROPAGATION_NESTED);
TransactionalOperator inner = TransactionalOperator.create(tm, innerDef);
return inner.execute(tx2 -> {
assertThat(tx2.hasTransaction()).isTrue();
assertThat(tx2.isNewTransaction()).isTrue();
assertThat(tx2.isNested()).isFalse();
return Mono.empty();
});
}).as(StepVerifier::create).verifyComplete();
verify(connectionMock).commitTransaction();
verify(connectionMock).close();
}
@Test
void testPropagationSupportsAndNestedWithRollback() {
when(connectionMock.rollbackTransaction()).thenReturn(Mono.empty());
DefaultTransactionDefinition definition = new DefaultTransactionDefinition();
definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_SUPPORTS);
TransactionalOperator operator = TransactionalOperator.create(tm, definition);
operator.execute(tx1 -> {
assertThat(tx1.hasTransaction()).isFalse();
assertThat(tx1.isNewTransaction()).isFalse();
assertThat(tx1.isNested()).isFalse();
DefaultTransactionDefinition innerDef = new DefaultTransactionDefinition();
innerDef.setPropagationBehavior(TransactionDefinition.PROPAGATION_NESTED);
TransactionalOperator inner = TransactionalOperator.create(tm, innerDef);
return inner.execute(tx2 -> {
assertThat(tx2.hasTransaction()).isTrue();
assertThat(tx2.isNewTransaction()).isTrue();
assertThat(tx2.isNested()).isFalse();
assertThat(tx2.isRollbackOnly()).isFalse();
tx2.setRollbackOnly();
assertThat(tx2.isRollbackOnly()).isTrue();
return Mono.empty();
});
}).as(StepVerifier::create).verifyComplete();
verify(connectionMock).rollbackTransaction();
verify(connectionMock).close();
}
@Test
void testPropagationSupportsAndRequiresNew() {
when(connectionMock.commitTransaction()).thenReturn(Mono.empty());
DefaultTransactionDefinition definition = new DefaultTransactionDefinition();
definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_SUPPORTS);
TransactionalOperator operator = TransactionalOperator.create(tm, definition);
operator.execute(tx1 -> {
assertThat(tx1.hasTransaction()).isFalse();
assertThat(tx1.isNewTransaction()).isFalse();
assertThat(tx1.isNested()).isFalse();
DefaultTransactionDefinition innerDef = new DefaultTransactionDefinition();
innerDef.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionalOperator inner = TransactionalOperator.create(tm, innerDef);
return inner.execute(tx2 -> {
assertThat(tx2.hasTransaction()).isTrue();
assertThat(tx2.isNewTransaction()).isTrue();
assertThat(tx2.isNested()).isFalse();
return Mono.empty();
});
}).as(StepVerifier::create).verifyComplete();
verify(connectionMock).commitTransaction();
verify(connectionMock).close();
}
@Test
void testPropagationSupportsAndRequiresNewWithRollback() {
when(connectionMock.rollbackTransaction()).thenReturn(Mono.empty());
DefaultTransactionDefinition definition = new DefaultTransactionDefinition();
definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_SUPPORTS);
TransactionalOperator operator = TransactionalOperator.create(tm, definition);
operator.execute(tx1 -> {
assertThat(tx1.hasTransaction()).isFalse();
assertThat(tx1.isNewTransaction()).isFalse();
assertThat(tx1.isNested()).isFalse();
DefaultTransactionDefinition innerDef = new DefaultTransactionDefinition();
innerDef.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionalOperator inner = TransactionalOperator.create(tm, innerDef);
return inner.execute(tx2 -> {
assertThat(tx2.hasTransaction()).isTrue();
assertThat(tx2.isNewTransaction()).isTrue();
assertThat(tx2.isNested()).isFalse();
assertThat(tx2.isRollbackOnly()).isFalse();
tx2.setRollbackOnly();
assertThat(tx2.isRollbackOnly()).isTrue();
return Mono.empty();
});
}).as(StepVerifier::create).verifyComplete();
verify(connectionMock).rollbackTransaction();
verify(connectionMock).close();
}
private static | R2dbcTransactionManagerTests |
java | quarkusio__quarkus | extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/reactive/ReactiveMongoCollection.java | {
"start": 19535,
"end": 20408
} | class ____ decode each document into
* @param <D> the target document type of the iterable.
* @return the stream of changes
*/
<D> Multi<ChangeStreamDocument<D>> watch(ClientSession clientSession, Class<D> clazz);
/**
* Creates a change stream for this collection.
*
* @param clientSession the client session with which to associate this operation
* @param pipeline the aggregation pipeline to apply to the change stream
* @return the stream of changes
*/
Multi<ChangeStreamDocument<Document>> watch(ClientSession clientSession, List<? extends Bson> pipeline);
/**
* Creates a change stream for this collection.
*
* @param clientSession the client session with which to associate this operation
* @param pipeline the aggregation pipeline to apply to the change stream
* @param clazz the | to |
java | netty__netty | resolver-dns/src/main/java/io/netty/resolver/dns/MultiDnsServerAddressStreamProvider.java | {
"start": 888,
"end": 2018
} | class ____ implements DnsServerAddressStreamProvider {
private final DnsServerAddressStreamProvider[] providers;
/**
* Create a new instance.
* @param providers The providers to use for DNS resolution. They will be queried in order.
*/
public MultiDnsServerAddressStreamProvider(List<DnsServerAddressStreamProvider> providers) {
this.providers = providers.toArray(new DnsServerAddressStreamProvider[0]);
}
/**
* Create a new instance.
* @param providers The providers to use for DNS resolution. They will be queried in order.
*/
public MultiDnsServerAddressStreamProvider(DnsServerAddressStreamProvider... providers) {
this.providers = providers.clone();
}
@Override
public DnsServerAddressStream nameServerAddressStream(String hostname) {
for (DnsServerAddressStreamProvider provider : providers) {
DnsServerAddressStream stream = provider.nameServerAddressStream(hostname);
if (stream != null) {
return stream;
}
}
return null;
}
}
| MultiDnsServerAddressStreamProvider |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/method/configuration/DelegatingReactiveMessageService.java | {
"start": 1079,
"end": 4655
} | class ____ implements ReactiveMessageService {
private final ReactiveMessageService delegate;
public DelegatingReactiveMessageService(ReactiveMessageService delegate) {
this.delegate = delegate;
}
@Override
@PreAuthorize("denyAll")
public String notPublisherPreAuthorizeFindById(long id) {
return this.delegate.notPublisherPreAuthorizeFindById(id);
}
@Override
public Mono<String> monoFindById(long id) {
return this.delegate.monoFindById(id);
}
@Override
@PreAuthorize("hasRole('ADMIN')")
public Mono<String> monoPreAuthorizeHasRoleFindById(long id) {
return this.delegate.monoPreAuthorizeHasRoleFindById(id);
}
@Override
@PostAuthorize("returnObject?.contains(authentication?.name)")
public Mono<String> monoPostAuthorizeFindById(long id) {
return this.delegate.monoPostAuthorizeFindById(id);
}
@Override
@PreAuthorize("@authz.check(#id)")
public Mono<String> monoPreAuthorizeBeanFindById(long id) {
return this.delegate.monoPreAuthorizeBeanFindById(id);
}
@Override
@PreAuthorize("@authz.checkReactive(#id)")
public Mono<String> monoPreAuthorizeBeanFindByIdReactiveExpression(long id) {
return this.delegate.monoPreAuthorizeBeanFindByIdReactiveExpression(id);
}
@Override
@PostAuthorize("@authz.check(authentication, returnObject)")
public Mono<String> monoPostAuthorizeBeanFindById(long id) {
return this.delegate.monoPostAuthorizeBeanFindById(id);
}
@Override
public Flux<String> fluxFindById(long id) {
return this.delegate.fluxFindById(id);
}
@Override
@PreAuthorize("hasRole('ADMIN')")
public Flux<String> fluxPreAuthorizeHasRoleFindById(long id) {
return this.delegate.fluxPreAuthorizeHasRoleFindById(id);
}
@Override
@PostAuthorize("returnObject?.contains(authentication?.name)")
public Flux<String> fluxPostAuthorizeFindById(long id) {
return this.delegate.fluxPostAuthorizeFindById(id);
}
@Override
@PreAuthorize("@authz.check(#id)")
public Flux<String> fluxPreAuthorizeBeanFindById(long id) {
return this.delegate.fluxPreAuthorizeBeanFindById(id);
}
@Override
@PostAuthorize("@authz.check(authentication, returnObject)")
public Flux<String> fluxPostAuthorizeBeanFindById(long id) {
return this.delegate.fluxPostAuthorizeBeanFindById(id);
}
@PreFilter("filterObject.length > 3")
@PreAuthorize("hasRole('ADMIN')")
@PostFilter("filterObject.length > 5")
@PostAuthorize("returnObject == 'harold' or returnObject == 'jonathan'")
@Override
public Flux<String> fluxManyAnnotations(Flux<String> flux) {
return flux;
}
@PostFilter("filterObject.length > 5")
public Flux<String> fluxPostFilter(Flux<String> flux) {
return flux;
}
@Override
public Publisher<String> publisherFindById(long id) {
return this.delegate.publisherFindById(id);
}
@Override
@PreAuthorize("hasRole('ADMIN')")
public Publisher<String> publisherPreAuthorizeHasRoleFindById(long id) {
return this.delegate.publisherPreAuthorizeHasRoleFindById(id);
}
@Override
@PostAuthorize("returnObject?.contains(authentication?.name)")
public Publisher<String> publisherPostAuthorizeFindById(long id) {
return this.delegate.publisherPostAuthorizeFindById(id);
}
@Override
@PreAuthorize("@authz.check(#id)")
public Publisher<String> publisherPreAuthorizeBeanFindById(long id) {
return this.delegate.publisherPreAuthorizeBeanFindById(id);
}
@Override
@PostAuthorize("@authz.check(authentication, returnObject)")
public Publisher<String> publisherPostAuthorizeBeanFindById(long id) {
return this.delegate.publisherPostAuthorizeBeanFindById(id);
}
}
| DelegatingReactiveMessageService |
java | google__guava | android/guava/src/com/google/common/graph/Network.java | {
"start": 1349,
"end": 4460
} | interface ____ satisfies your use case. See the <a
* href="https://github.com/google/guava/wiki/GraphsExplained#choosing-the-right-graph-type">
* "Choosing the right graph type"</a> section of the Guava User Guide for more details.
*
* <h3>Capabilities</h3>
*
* <p>{@code Network} supports the following use cases (<a
* href="https://github.com/google/guava/wiki/GraphsExplained#definitions">definitions of
* terms</a>):
*
* <ul>
* <li>directed graphs
* <li>undirected graphs
* <li>graphs that do/don't allow parallel edges
* <li>graphs that do/don't allow self-loops
* <li>graphs whose nodes/edges are insertion-ordered, sorted, or unordered
* <li>graphs whose edges are <a
* href="https://github.com/google/guava/wiki/GraphsExplained#uniqueness">unique</a> objects
* </ul>
*
* <h3>Building a {@code Network}</h3>
*
* <p>The implementation classes that {@code common.graph} provides are not public, by design. To
* create an instance of one of the built-in implementations of {@code Network}, use the {@link
* NetworkBuilder} class:
*
* {@snippet :
* MutableNetwork<Integer, MyEdge> network = NetworkBuilder.directed().build();
* }
*
* <p>{@link NetworkBuilder#build()} returns an instance of {@link MutableNetwork}, which is a
* subtype of {@code Network} that provides methods for adding and removing nodes and edges. If you
* do not need to mutate a network (e.g. if you write a method than runs a read-only algorithm on
* the network), you should use the non-mutating {@link Network} interface, or an {@link
* ImmutableNetwork}.
*
* <p>You can create an immutable copy of an existing {@code Network} using {@link
* ImmutableNetwork#copyOf(Network)}:
*
* {@snippet :
* ImmutableNetwork<Integer, MyEdge> immutableGraph = ImmutableNetwork.copyOf(network);
* }
*
* <p>Instances of {@link ImmutableNetwork} do not implement {@link MutableNetwork} (obviously!) and
* are contractually guaranteed to be unmodifiable and thread-safe.
*
* <p>The Guava User Guide has <a
* href="https://github.com/google/guava/wiki/GraphsExplained#building-graph-instances">more
* information on (and examples of) building graphs</a>.
*
* <h3>Additional documentation</h3>
*
* <p>See the Guava User Guide for the {@code common.graph} package (<a
* href="https://github.com/google/guava/wiki/GraphsExplained">"Graphs Explained"</a>) for
* additional documentation, including:
*
* <ul>
* <li><a
* href="https://github.com/google/guava/wiki/GraphsExplained#equals-hashcode-and-graph-equivalence">
* {@code equals()}, {@code hashCode()}, and graph equivalence</a>
* <li><a href="https://github.com/google/guava/wiki/GraphsExplained#synchronization">
* Synchronization policy</a>
* <li><a href="https://github.com/google/guava/wiki/GraphsExplained#notes-for-implementors">Notes
* for implementors</a>
* </ul>
*
* @author James Sexton
* @author Joshua O'Madadhain
* @param <N> Node parameter type
* @param <E> Edge parameter type
* @since 20.0
*/
@Beta
@DoNotMock("Use NetworkBuilder to create a real instance")
public | that |
java | apache__camel | components/camel-cxf/camel-cxf-spring-rest/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsSslAsyncProducerTest.java | {
"start": 1626,
"end": 3405
} | class ____ extends CamelSpringTestSupport {
private static int port1 = CXFTestSupport.getSslPort();
public int getPort1() {
return port1;
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/component/cxf/jaxrs/CxfRsSpringSslAsyncProducer.xml");
}
@Test
public void testCorrectTrustStore() {
Exchange exchange = template.send("direct://trust", new MyProcessor());
// get the response message
Customer response = (Customer) exchange.getMessage().getBody();
assertNotNull(response, "The response should not be null");
assertEquals("123", String.valueOf(response.getId()), "Get a wrong customer id");
assertEquals("John", response.getName(), "Get a wrong customer name");
assertEquals(200, exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE), "Get a wrong response code");
assertEquals("value", exchange.getMessage().getHeader("key"), "Get a wrong header value");
}
@Test
public void testNoTrustStore() {
Exchange exchange = template.send("direct://noTrust", new MyProcessor());
assertTrue(exchange.isFailed());
Exception e = exchange.getException();
assertEquals("javax.net.ssl.SSLHandshakeException", e.getCause().getClass().getCanonicalName());
}
@Test
public void testWrongTrustStore() {
Exchange exchange = template.send("direct://wrongTrust", new MyProcessor());
assertTrue(exchange.isFailed());
Exception e = exchange.getException();
assertEquals("javax.net.ssl.SSLHandshakeException", e.getCause().getClass().getCanonicalName());
}
private | CxfRsSslAsyncProducerTest |
java | quarkusio__quarkus | extensions/jackson/deployment/src/test/java/io/quarkus/jackson/deployment/JacksonFailOnEmptyBeansNotSetTest.java | {
"start": 377,
"end": 770
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest();
@Inject
ObjectMapper objectMapper;
@Test
public void testFailOnEmptyBeans() {
Assertions.assertThrows(InvalidDefinitionException.class,
() -> objectMapper.writeValueAsString(new Pojo("dummy")));
}
public static | JacksonFailOnEmptyBeansNotSetTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/collection/adder/_target/AdderUsageObserver.java | {
"start": 245,
"end": 518
} | class ____ {
private AdderUsageObserver() {
}
private static boolean used = false;
public static boolean isUsed() {
return used;
}
public static void setUsed(boolean used) {
AdderUsageObserver.used = used;
}
}
| AdderUsageObserver |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/context/internal/GarbageFreeSortedArrayThreadContextMapTest.java | {
"start": 1325,
"end": 4096
} | class ____ extends ThreadContextMapSuite {
private GarbageFreeSortedArrayThreadContextMap createThreadContextMap() {
return new GarbageFreeSortedArrayThreadContextMap();
}
private ThreadContextMap createInheritableThreadContextMap() {
final Properties props = new Properties();
props.setProperty("log4j2.isThreadContextMapInheritable", "true");
final PropertiesUtil util = new PropertiesUtil(props);
return new GarbageFreeSortedArrayThreadContextMap(util);
}
@Test
void singleValue() {
singleValue(createThreadContextMap());
}
@Test
void testPutAll() {
final GarbageFreeSortedArrayThreadContextMap map = createThreadContextMap();
assertTrue(map.isEmpty());
assertFalse(map.containsKey("key"));
final int mapSize = 10;
final Map<String, String> newMap = new HashMap<>(mapSize);
for (int i = 1; i <= mapSize; i++) {
newMap.put("key" + i, "value" + i);
}
map.putAll(newMap);
assertFalse(map.isEmpty());
for (int i = 1; i <= mapSize; i++) {
assertTrue(map.containsKey("key" + i));
assertEquals("value" + i, map.get("key" + i));
}
}
@Test
void testClear() {
final GarbageFreeSortedArrayThreadContextMap map = createMap();
map.clear();
assertTrue(map.isEmpty());
assertFalse(map.containsKey("key"));
assertFalse(map.containsKey("key2"));
}
private GarbageFreeSortedArrayThreadContextMap createMap() {
final GarbageFreeSortedArrayThreadContextMap map = createThreadContextMap();
assertTrue(map.isEmpty());
map.put("key", "value");
map.put("key2", "value2");
assertEquals("value", map.get("key"));
assertEquals("value2", map.get("key2"));
return map;
}
@Test
void getCopyReturnsMutableCopy() {
getCopyReturnsMutableCopy(createThreadContextMap());
}
@Test
void getImmutableMapReturnsNullIfEmpty() {
getImmutableMapReturnsNullIfEmpty(createThreadContextMap());
}
@Test
void getImmutableMapReturnsImmutableMapIfNonEmpty() {
getImmutableMapReturnsImmutableMapIfNonEmpty(createThreadContextMap());
}
@Test
void getImmutableMapCopyNotAffectedByContextMapChanges() {
getImmutableMapCopyNotAffectedByContextMapChanges(createThreadContextMap());
}
@Test
void threadLocalNotInheritableByDefault() {
threadLocalNotInheritableByDefault(createThreadContextMap());
}
@Test
void threadLocalInheritableIfConfigured() {
threadLocalInheritableIfConfigured(createInheritableThreadContextMap());
}
}
| GarbageFreeSortedArrayThreadContextMapTest |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequestTemplate.java | {
"start": 9710,
"end": 17287
} | class ____ {
public static HttpRequestTemplate parse(XContentParser parser) throws IOException {
assert parser.currentToken() == XContentParser.Token.START_OBJECT;
Builder builder = new Builder();
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (HttpRequest.Field.PROXY.match(currentFieldName, parser.getDeprecationHandler())) {
builder.proxy(HttpProxy.parse(parser));
} else if (HttpRequest.Field.PATH.match(currentFieldName, parser.getDeprecationHandler())) {
builder.path(parseFieldTemplate(currentFieldName, parser));
} else if (HttpRequest.Field.HEADERS.match(currentFieldName, parser.getDeprecationHandler())) {
builder.putHeaders(parseFieldTemplates(currentFieldName, parser));
} else if (HttpRequest.Field.PARAMS.match(currentFieldName, parser.getDeprecationHandler())) {
builder.putParams(parseFieldTemplates(currentFieldName, parser));
} else if (HttpRequest.Field.BODY.match(currentFieldName, parser.getDeprecationHandler())) {
builder.body(parseFieldTemplate(currentFieldName, parser));
} else if (HttpRequest.Field.URL.match(currentFieldName, parser.getDeprecationHandler())) {
builder.fromUrl(parser.text());
} else if (HttpRequest.Field.CONNECTION_TIMEOUT.match(currentFieldName, parser.getDeprecationHandler())) {
builder.connectionTimeout(TimeValue.timeValueMillis(parser.longValue()));
} else if (HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN.match(currentFieldName, parser.getDeprecationHandler())) {
// Users and 2.x specify the timeout this way
try {
builder.connectionTimeout(
WatcherDateTimeUtils.parseTimeValue(parser, HttpRequest.Field.CONNECTION_TIMEOUT.toString())
);
} catch (ElasticsearchParseException pe) {
throw new ElasticsearchParseException(
"could not parse http request template. invalid time value for [{}] field",
pe,
currentFieldName
);
}
} else if (HttpRequest.Field.READ_TIMEOUT.match(currentFieldName, parser.getDeprecationHandler())) {
builder.readTimeout(TimeValue.timeValueMillis(parser.longValue()));
} else if (HttpRequest.Field.READ_TIMEOUT_HUMAN.match(currentFieldName, parser.getDeprecationHandler())) {
// Users and 2.x specify the timeout this way
try {
builder.readTimeout(WatcherDateTimeUtils.parseTimeValue(parser, HttpRequest.Field.READ_TIMEOUT.toString()));
} catch (ElasticsearchParseException pe) {
throw new ElasticsearchParseException(
"could not parse http request template. invalid time value for [{}] field",
pe,
currentFieldName
);
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (HttpRequest.Field.AUTH.match(currentFieldName, parser.getDeprecationHandler())) {
builder.auth(BasicAuth.parse(parser));
} else {
throw new ElasticsearchParseException(
"could not parse http request template. unexpected object field [{}]",
currentFieldName
);
}
} else if (token == XContentParser.Token.VALUE_STRING) {
if (HttpRequest.Field.SCHEME.match(currentFieldName, parser.getDeprecationHandler())) {
builder.scheme(Scheme.parse(parser.text()));
} else if (HttpRequest.Field.METHOD.match(currentFieldName, parser.getDeprecationHandler())) {
builder.method(HttpMethod.parse(parser.text()));
} else if (HttpRequest.Field.HOST.match(currentFieldName, parser.getDeprecationHandler())) {
builder.host = parser.text();
} else {
throw new ElasticsearchParseException(
"could not parse http request template. unexpected string field [{}]",
currentFieldName
);
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (HttpRequest.Field.PORT.match(currentFieldName, parser.getDeprecationHandler())) {
builder.port = parser.intValue();
} else {
throw new ElasticsearchParseException(
"could not parse http request template. unexpected numeric field [{}]",
currentFieldName
);
}
} else {
throw new ElasticsearchParseException(
"could not parse http request template. unexpected token [{}] for field [{}]",
token,
currentFieldName
);
}
}
if (builder.host == null) {
throw new ElasticsearchParseException(
"could not parse http request template. missing required [{}] string field",
HttpRequest.Field.HOST.getPreferredName()
);
}
if (builder.port <= 0) {
throw new ElasticsearchParseException(
"could not parse http request template. wrong port for [{}]",
HttpRequest.Field.PORT.getPreferredName()
);
}
return builder.build();
}
private static TextTemplate parseFieldTemplate(String field, XContentParser parser) throws IOException {
try {
return TextTemplate.parse(parser);
} catch (ElasticsearchParseException pe) {
throw new ElasticsearchParseException(
"could not parse http request template. could not parse value for [{}] field",
pe,
field
);
}
}
private static Map<String, TextTemplate> parseFieldTemplates(String field, XContentParser parser) throws IOException {
Map<String, TextTemplate> templates = new HashMap<>();
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
templates.put(currentFieldName, parseFieldTemplate(field, parser));
}
}
return templates;
}
}
public static | Parser |
java | grpc__grpc-java | examples/src/test/java/io/grpc/examples/helloworld/HelloWorldClientTest.java | {
"start": 1688,
"end": 3810
} | class ____ {
/**
* This rule manages automatic graceful shutdown for the registered servers and channels at the
* end of test.
*/
@Rule
public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule();
private final GreeterGrpc.GreeterImplBase serviceImpl =
mock(GreeterGrpc.GreeterImplBase.class, delegatesTo(
new GreeterGrpc.GreeterImplBase() {
// By default the client will receive Status.UNIMPLEMENTED for all RPCs.
// You might need to implement necessary behaviors for your test here, like this:
//
// @Override
// public void sayHello(HelloRequest request, StreamObserver<HelloReply> respObserver) {
// respObserver.onNext(HelloReply.getDefaultInstance());
// respObserver.onCompleted();
// }
}));
private HelloWorldClient client;
@Before
public void setUp() throws Exception {
// Generate a unique in-process server name.
String serverName = InProcessServerBuilder.generateName();
// Create a server, add service, start, and register for automatic graceful shutdown.
grpcCleanup.register(InProcessServerBuilder
.forName(serverName).directExecutor().addService(serviceImpl).build().start());
// Create a client channel and register for automatic graceful shutdown.
ManagedChannel channel = grpcCleanup.register(
InProcessChannelBuilder.forName(serverName).directExecutor().build());
// Create a HelloWorldClient using the in-process channel;
client = new HelloWorldClient(channel);
}
/**
* To test the client, call from the client against the fake server, and verify behaviors or state
* changes from the server side.
*/
@Test
public void greet_messageDeliveredToServer() {
ArgumentCaptor<HelloRequest> requestCaptor = ArgumentCaptor.forClass(HelloRequest.class);
client.greet("test name");
verify(serviceImpl)
.sayHello(requestCaptor.capture(), ArgumentMatchers.<StreamObserver<HelloReply>>any());
assertEquals("test name", requestCaptor.getValue().getName());
}
}
| HelloWorldClientTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/SyncEventHandler.java | {
"start": 1348,
"end": 3583
} | class ____ implements EventListener<TaskEvent> {
private final ClassLoader userCodeClassLoader;
private final Map<String, Aggregator<?>> aggregators;
private final int numberOfEventsUntilEndOfSuperstep;
private int workerDoneEventCounter;
private boolean endOfSuperstep;
public SyncEventHandler(
int numberOfEventsUntilEndOfSuperstep,
Map<String, Aggregator<?>> aggregators,
ClassLoader userCodeClassLoader) {
Preconditions.checkArgument(numberOfEventsUntilEndOfSuperstep > 0);
this.userCodeClassLoader = userCodeClassLoader;
this.numberOfEventsUntilEndOfSuperstep = numberOfEventsUntilEndOfSuperstep;
this.aggregators = aggregators;
}
@Override
public void onEvent(TaskEvent event) {
if (WorkerDoneEvent.class.equals(event.getClass())) {
onWorkerDoneEvent((WorkerDoneEvent) event);
return;
}
throw new IllegalStateException("Unable to handle event " + event.getClass().getName());
}
private void onWorkerDoneEvent(WorkerDoneEvent workerDoneEvent) {
if (this.endOfSuperstep) {
throw new RuntimeException(
"Encountered WorderDoneEvent when still in End-of-Superstep status.");
}
workerDoneEventCounter++;
String[] aggNames = workerDoneEvent.getAggregatorNames();
Value[] aggregates = workerDoneEvent.getAggregates(userCodeClassLoader);
if (aggNames.length != aggregates.length) {
throw new RuntimeException("Inconsistent WorkerDoneEvent received!");
}
for (int i = 0; i < aggNames.length; i++) {
@SuppressWarnings("unchecked")
Aggregator<Value> aggregator = (Aggregator<Value>) this.aggregators.get(aggNames[i]);
aggregator.aggregate(aggregates[i]);
}
if (workerDoneEventCounter % numberOfEventsUntilEndOfSuperstep == 0) {
endOfSuperstep = true;
Thread.currentThread().interrupt();
}
}
public boolean isEndOfSuperstep() {
return this.endOfSuperstep;
}
public void resetEndOfSuperstep() {
this.endOfSuperstep = false;
}
}
| SyncEventHandler |
java | apache__camel | components/camel-fhir/camel-fhir-component/src/generated/java/org/apache/camel/component/fhir/FhirTransactionEndpointConfiguration.java | {
"start": 1569,
"end": 3845
} | class ____ extends FhirConfiguration {
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "withBundle", description="Bundle to use in the transaction")})
private org.hl7.fhir.instance.model.api.IBaseBundle bundle;
@UriParam
@ApiParam(optional = true, apiMethods = {@ApiMethod(methodName = "withBundle", description="See ExtraParameters for a full list of parameters that can be passed, may be NULL"), @ApiMethod(methodName = "withBundle", description="See ExtraParameters for a full list of parameters that can be passed, may be NULL"), @ApiMethod(methodName = "withResources", description="See ExtraParameters for a full list of parameters that can be passed, may be NULL")})
private java.util.Map<org.apache.camel.component.fhir.api.ExtraParameters, Object> extraParameters;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "withResources", description="Resources to use in the transaction")})
private java.util.List<org.hl7.fhir.instance.model.api.IBaseResource> resources;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "withBundle", description="Bundle to use in the transaction")})
private String stringBundle;
public org.hl7.fhir.instance.model.api.IBaseBundle getBundle() {
return bundle;
}
public void setBundle(org.hl7.fhir.instance.model.api.IBaseBundle bundle) {
this.bundle = bundle;
}
public java.util.Map<org.apache.camel.component.fhir.api.ExtraParameters, Object> getExtraParameters() {
return extraParameters;
}
public void setExtraParameters(java.util.Map<org.apache.camel.component.fhir.api.ExtraParameters, Object> extraParameters) {
this.extraParameters = extraParameters;
}
public java.util.List<org.hl7.fhir.instance.model.api.IBaseResource> getResources() {
return resources;
}
public void setResources(java.util.List<org.hl7.fhir.instance.model.api.IBaseResource> resources) {
this.resources = resources;
}
public String getStringBundle() {
return stringBundle;
}
public void setStringBundle(String stringBundle) {
this.stringBundle = stringBundle;
}
}
| FhirTransactionEndpointConfiguration |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java | {
"start": 149159,
"end": 150931
} | class ____ extends ArrayinitializerContext {
public TerminalNode NEW() {
return getToken(PainlessParser.NEW, 0);
}
public TypeContext type() {
return getRuleContext(TypeContext.class, 0);
}
public List<TerminalNode> LBRACE() {
return getTokens(PainlessParser.LBRACE);
}
public TerminalNode LBRACE(int i) {
return getToken(PainlessParser.LBRACE, i);
}
public List<ExpressionContext> expression() {
return getRuleContexts(ExpressionContext.class);
}
public ExpressionContext expression(int i) {
return getRuleContext(ExpressionContext.class, i);
}
public List<TerminalNode> RBRACE() {
return getTokens(PainlessParser.RBRACE);
}
public TerminalNode RBRACE(int i) {
return getToken(PainlessParser.RBRACE, i);
}
public PostdotContext postdot() {
return getRuleContext(PostdotContext.class, 0);
}
public List<PostfixContext> postfix() {
return getRuleContexts(PostfixContext.class);
}
public PostfixContext postfix(int i) {
return getRuleContext(PostfixContext.class, i);
}
public NewstandardarrayContext(ArrayinitializerContext ctx) {
copyFrom(ctx);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor<? extends T>) visitor).visitNewstandardarray(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static | NewstandardarrayContext |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/throwable/ThrowableClassifierTest.java | {
"start": 1058,
"end": 4165
} | class ____ {
@Test
void testThrowableType_NonRecoverable() {
assertThat(
ThrowableClassifier.getThrowableType(
new SuppressRestartsException(new Exception(""))))
.isEqualTo(ThrowableType.NonRecoverableError);
}
@Test
void testThrowableType_Recoverable() {
assertThat(ThrowableClassifier.getThrowableType(new Exception("")))
.isEqualTo(ThrowableType.RecoverableError);
assertThat(ThrowableClassifier.getThrowableType(new TestRecoverableErrorException()))
.isEqualTo(ThrowableType.RecoverableError);
}
@Test
void testThrowableType_EnvironmentError() {
assertThat(ThrowableClassifier.getThrowableType(new TestEnvironmentErrorException()))
.isEqualTo(ThrowableType.EnvironmentError);
}
@Test
void testThrowableType_PartitionDataMissingError() {
assertThat(
ThrowableClassifier.getThrowableType(
new TestPartitionDataMissingErrorException()))
.isEqualTo(ThrowableType.PartitionDataMissingError);
}
@Test
void testThrowableType_InheritError() {
assertThat(
ThrowableClassifier.getThrowableType(
new TestPartitionDataMissingErrorSubException()))
.isEqualTo(ThrowableType.PartitionDataMissingError);
}
@Test
void testFindThrowableOfThrowableType() {
// no throwable type
assertThat(
ThrowableClassifier.findThrowableOfThrowableType(
new Exception(), ThrowableType.RecoverableError))
.isNotPresent();
// no recoverable throwable type
assertThat(
ThrowableClassifier.findThrowableOfThrowableType(
new TestPartitionDataMissingErrorException(),
ThrowableType.RecoverableError))
.isNotPresent();
// direct recoverable throwable
assertThat(
ThrowableClassifier.findThrowableOfThrowableType(
new TestRecoverableErrorException(),
ThrowableType.RecoverableError))
.isPresent();
// nested recoverable throwable
assertThat(
ThrowableClassifier.findThrowableOfThrowableType(
new Exception(new TestRecoverableErrorException()),
ThrowableType.RecoverableError))
.isPresent();
// inherit recoverable throwable
assertThat(
ThrowableClassifier.findThrowableOfThrowableType(
new TestRecoverableFailureSubException(),
ThrowableType.RecoverableError))
.isPresent();
}
@ThrowableAnnotation(ThrowableType.PartitionDataMissingError)
private static | ThrowableClassifierTest |
java | apache__camel | components/camel-jcache/src/main/java/org/apache/camel/component/jcache/JCacheConsumer.java | {
"start": 1381,
"end": 4138
} | class ____ extends DefaultConsumer {
private CacheEntryListenerConfiguration<Object, Object> entryListenerConfiguration;
public JCacheConsumer(final JCacheEndpoint endpoint, Processor processor) {
super(endpoint, processor);
this.entryListenerConfiguration = null;
}
@Override
protected void doStart() throws Exception {
if (entryListenerConfiguration != null) {
getCache().deregisterCacheEntryListener(entryListenerConfiguration);
}
entryListenerConfiguration = createEntryListenerConfiguration();
getCache().registerCacheEntryListener(entryListenerConfiguration);
super.doStart();
}
@Override
protected void doStop() throws Exception {
if (entryListenerConfiguration != null) {
getCache().deregisterCacheEntryListener(entryListenerConfiguration);
entryListenerConfiguration = null;
}
super.doStop();
}
private JCacheEndpoint getJCacheEndpoint() {
return (JCacheEndpoint) getEndpoint();
}
private Cache getCache() throws Exception {
return getJCacheEndpoint().getManager().getCache();
}
private CacheEntryListenerConfiguration<Object, Object> createEntryListenerConfiguration() {
Factory<CacheEntryListener<Object, Object>> listenerFactory = () -> new JCacheEntryEventListener() {
@Override
protected void onEvents(Iterable<CacheEntryEvent<?, ?>> events) {
for (CacheEntryEvent<?, ?> event : events) {
Exchange exchange = createExchange(true);
Message message = exchange.getIn();
message.setHeader(JCacheConstants.EVENT_TYPE, event.getEventType().name());
message.setHeader(JCacheConstants.KEY, event.getKey());
message.setBody(event.getValue());
if (event.isOldValueAvailable()) {
message.setHeader(JCacheConstants.OLD_VALUE, event.getOldValue());
}
try {
getProcessor().process(exchange);
} catch (Exception e) {
getExceptionHandler().handleException(e);
}
}
}
};
Factory<CacheEntryEventFilter<Object, Object>> filterFactory = () -> getJCacheEndpoint().getManager().getEventFilter();
return new MutableCacheEntryListenerConfiguration<>(
listenerFactory, filterFactory,
getJCacheEndpoint().getManager().getConfiguration().isOldValueRequired(),
getJCacheEndpoint().getManager().getConfiguration().isSynchronous());
}
}
| JCacheConsumer |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RTransferQueueAsync.java | {
"start": 1563,
"end": 2581
} | class ____ the specified element
* prevents it from being added to this queue
* @throws NullPointerException if the specified element is null
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
RFuture<Void> transferAsync(V e);
/**
* Transfers the element to waiting consumer
* which invoked {@link #takeAsync} or {@link #pollAsync} method
* at the moment of transfer.
* Waits up to defined <code>timeout</code> if necessary for a consumer.
*
* @param e the element to transfer
* @param timeout the maximum time to wait
* @param unit the time unit
* @return <code>true</code> if the element was transferred and <code>false</code>
* otherwise
*/
RFuture<Boolean> tryTransferAsync(V e, long timeout, TimeUnit unit);
/**
* Returns all queue elements at once
*
* @return elements
*/
List<V> readAll();
}
| of |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DuplicateDateFormatFieldTest.java | {
"start": 6027,
"end": 6165
} | class ____ {
SimpleDateFormat format = new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm[':'ss][XXX][X]");
}
""")
.doTest();
}
}
| Test |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/ttl/mock/MockInternalAggregatingState.java | {
"start": 1281,
"end": 2485
} | class ____<K, N, IN, ACC, OUT>
extends MockInternalMergingState<K, N, IN, ACC, OUT>
implements InternalAggregatingState<K, N, IN, ACC, OUT> {
private final AggregateFunction<IN, ACC, OUT> aggregateFunction;
private MockInternalAggregatingState(AggregateFunction<IN, ACC, OUT> aggregateFunction) {
this.aggregateFunction = aggregateFunction;
}
@Override
public OUT get() {
return aggregateFunction.getResult(getInternal());
}
@Override
public void add(IN value) {
updateInternal(aggregateFunction.add(value, getInternal()));
}
@Override
ACC mergeState(ACC acc, ACC nAcc) {
return aggregateFunction.merge(acc, nAcc);
}
@SuppressWarnings({"unchecked", "unused"})
static <IN, OUT, N, ACC, S extends State, IS extends S> IS createState(
TypeSerializer<N> namespaceSerializer, StateDescriptor<S, ACC> stateDesc) {
AggregatingStateDescriptor<IN, ACC, OUT> aggregatingStateDesc =
(AggregatingStateDescriptor<IN, ACC, OUT>) stateDesc;
return (IS) new MockInternalAggregatingState<>(aggregatingStateDesc.getAggregateFunction());
}
}
| MockInternalAggregatingState |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/stereotypes/StereotypeAlternativeTest.java | {
"start": 2098,
"end": 2169
} | interface ____ {
}
@Dependent
static | BeAlternativeWithPriority |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerModelBuilderTests.java | {
"start": 1310,
"end": 13107
} | class ____ extends ESTestCase {
private static final String inferenceId = "inferenceId";
private static final TaskType taskType = TaskType.ANY;
private static final String service = "service";
private SageMakerModelBuilder builder;
@Before
public void setUp() throws Exception {
super.setUp();
builder = new SageMakerModelBuilder(SageMakerSchemasTests.mockSchemas());
}
public void testFromRequestWithRequiredFields() {
var model = fromRequest("""
{
"service_settings": {
"access_key": "test-access-key",
"secret_key": "test-secret-key",
"region": "us-east-1",
"api": "test-api",
"endpoint_name": "test-endpoint"
}
}
""");
assertNotNull(model);
assertTrue(model.awsSecretSettings().isPresent());
assertThat(model.awsSecretSettings().get().accessKey().toString(), equalTo("test-access-key"));
assertThat(model.awsSecretSettings().get().secretKey().toString(), equalTo("test-secret-key"));
assertThat(model.region(), equalTo("us-east-1"));
assertThat(model.api(), equalTo("test-api"));
assertThat(model.endpointName(), equalTo("test-endpoint"));
assertTrue(model.customAttributes().isEmpty());
assertTrue(model.enableExplanations().isEmpty());
assertTrue(model.inferenceComponentName().isEmpty());
assertTrue(model.inferenceIdForDataCapture().isEmpty());
assertTrue(model.sessionId().isEmpty());
assertTrue(model.targetContainerHostname().isEmpty());
assertTrue(model.targetModel().isEmpty());
assertTrue(model.targetVariant().isEmpty());
assertTrue(model.batchSize().isEmpty());
}
public void testFromRequestWithOptionalFields() {
var model = fromRequest("""
{
"service_settings": {
"access_key": "test-access-key",
"secret_key": "test-secret-key",
"region": "us-east-1",
"api": "test-api",
"endpoint_name": "test-endpoint",
"target_model": "test-target",
"target_container_hostname": "test-target-container",
"inference_component_name": "test-inference-component",
"batch_size": 1234
},
"task_settings": {
"custom_attributes": "test-custom-attributes",
"enable_explanations": "test-enable-explanations",
"inference_id": "test-inference-id",
"session_id": "test-session-id",
"target_variant": "test-target-variant"
}
}
""");
assertNotNull(model);
assertTrue(model.awsSecretSettings().isPresent());
assertThat(model.awsSecretSettings().get().accessKey().toString(), equalTo("test-access-key"));
assertThat(model.awsSecretSettings().get().secretKey().toString(), equalTo("test-secret-key"));
assertThat(model.region(), equalTo("us-east-1"));
assertThat(model.api(), equalTo("test-api"));
assertThat(model.endpointName(), equalTo("test-endpoint"));
assertPresent(model.customAttributes(), "test-custom-attributes");
assertPresent(model.enableExplanations(), "test-enable-explanations");
assertPresent(model.inferenceComponentName(), "test-inference-component");
assertPresent(model.inferenceIdForDataCapture(), "test-inference-id");
assertPresent(model.sessionId(), "test-session-id");
assertPresent(model.targetContainerHostname(), "test-target-container");
assertPresent(model.targetModel(), "test-target");
assertPresent(model.targetVariant(), "test-target-variant");
assertPresent(model.batchSize(), 1234);
}
public void testFromRequestWithoutAccessKey() {
testExceptionFromRequest("""
{
"service_settings": {
"secret_key": "test-secret-key",
"region": "us-east-1",
"api": "test-api",
"endpoint_name": "test-endpoint"
}
}
""", ValidationException.class, "Validation Failed: 1: [secret_settings] does not contain the required setting [access_key];");
}
public void testFromRequestWithoutSecretKey() {
testExceptionFromRequest("""
{
"service_settings": {
"access_key": "test-access-key",
"region": "us-east-1",
"api": "test-api",
"endpoint_name": "test-endpoint"
}
}
""", ValidationException.class, "Validation Failed: 1: [secret_settings] does not contain the required setting [secret_key];");
}
public void testFromRequestWithoutRegion() {
testExceptionFromRequest("""
{
"service_settings": {
"access_key": "test-access-key",
"secret_key": "test-secret-key",
"api": "test-api",
"endpoint_name": "test-endpoint"
}
}
""", ValidationException.class, "Validation Failed: 1: [service_settings] does not contain the required setting [region];");
}
public void testFromRequestWithoutApi() {
testExceptionFromRequest("""
{
"service_settings": {
"access_key": "test-access-key",
"secret_key": "test-secret-key",
"region": "us-east-1",
"endpoint_name": "test-endpoint"
}
}
""", ValidationException.class, "Validation Failed: 1: [service_settings] does not contain the required setting [api];");
}
public void testFromRequestWithoutEndpointName() {
testExceptionFromRequest(
"""
{
"service_settings": {
"access_key": "test-access-key",
"secret_key": "test-secret-key",
"region": "us-east-1",
"api": "test-api"
}
}
""",
ValidationException.class,
"Validation Failed: 1: [service_settings] does not contain the required setting [endpoint_name];"
);
}
public void testFromRequestWithExtraServiceKeys() {
testExceptionFromRequest("""
{
"service_settings": {
"access_key": "test-access-key",
"secret_key": "test-secret-key",
"region": "us-east-1",
"api": "test-api",
"endpoint_name": "test-endpoint",
"hello": "there"
}
}
""", ElasticsearchStatusException.class, "Configuration contains settings [{hello=there}] unknown to the [service] service");
}
public void testFromRequestWithExtraTaskKeys() {
testExceptionFromRequest("""
{
"service_settings": {
"access_key": "test-access-key",
"secret_key": "test-secret-key",
"region": "us-east-1",
"api": "test-api",
"endpoint_name": "test-endpoint"
},
"task_settings": {
"hello": "there"
}
}
""", ElasticsearchStatusException.class, "Configuration contains settings [{hello=there}] unknown to the [service] service");
}
public void testRoundTrip() throws IOException {
var expectedModel = fromRequest("""
{
"service_settings": {
"access_key": "test-access-key",
"secret_key": "test-secret-key",
"region": "us-east-1",
"api": "test-api",
"endpoint_name": "test-endpoint",
"target_model": "test-target",
"target_container_hostname": "test-target-container",
"inference_component_name": "test-inference-component",
"batch_size": 1234
},
"task_settings": {
"custom_attributes": "test-custom-attributes",
"enable_explanations": "test-enable-explanations",
"inference_id": "test-inference-id",
"session_id": "test-session-id",
"target_variant": "test-target-variant"
}
}
""");
var unparsedModelWithSecrets = unparsedModel(expectedModel.getConfigurations(), expectedModel.getSecrets());
var modelWithSecrets = builder.fromStorage(
unparsedModelWithSecrets.inferenceEntityId(),
unparsedModelWithSecrets.taskType(),
unparsedModelWithSecrets.service(),
unparsedModelWithSecrets.settings(),
unparsedModelWithSecrets.secrets()
);
assertThat(modelWithSecrets, equalTo(expectedModel));
assertNotNull(modelWithSecrets.getSecrets().getSecretSettings());
var unparsedModelWithoutSecrets = unparsedModel(expectedModel.getConfigurations(), null);
var modelWithoutSecrets = builder.fromStorage(
unparsedModelWithoutSecrets.inferenceEntityId(),
unparsedModelWithoutSecrets.taskType(),
unparsedModelWithoutSecrets.service(),
unparsedModelWithoutSecrets.settings(),
unparsedModelWithoutSecrets.secrets()
);
assertThat(modelWithoutSecrets.getConfigurations(), equalTo(expectedModel.getConfigurations()));
assertNull(modelWithoutSecrets.getSecrets().getSecretSettings());
}
private SageMakerModel fromRequest(String json) {
return builder.fromRequest(inferenceId, taskType, service, map(json));
}
private void testExceptionFromRequest(String json, Class<? extends Exception> exceptionClass, String message) {
var exception = assertThrows(exceptionClass, () -> fromRequest(json));
assertThat(exception.getMessage(), equalTo(message));
}
private static <T> void assertPresent(Optional<T> optional, T expectedValue) {
assertTrue(optional.isPresent());
assertThat(optional.get(), equalTo(expectedValue));
}
private static Map<String, Object> map(String json) {
try (
var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, json.getBytes(StandardCharsets.UTF_8))
) {
return parser.map();
} catch (IOException e) {
throw new AssertionError(e);
}
}
private static UnparsedModel unparsedModel(ModelConfigurations modelConfigurations, ModelSecrets modelSecrets) throws IOException {
var modelConfigMap = new ModelRegistry.ModelConfigMap(
toJsonMap(modelConfigurations),
modelSecrets != null ? toJsonMap(modelSecrets) : null
);
return ModelRegistry.unparsedModelFromMap(modelConfigMap);
}
private static Map<String, Object> toJsonMap(ToXContent toXContent) throws IOException {
try (var builder = JsonXContent.contentBuilder()) {
toXContent.toXContent(builder, new ToXContent.MapParams(Map.of(USE_ID_FOR_INDEX, "true")));
return map(Strings.toString(builder));
}
}
}
| SageMakerModelBuilderTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NullNeedsCastForVarargsTest.java | {
"start": 10258,
"end": 11509
} | class ____ {
void test(Invokable<String, Boolean> invokable) throws Exception {
// BUG: Diagnostic contains:
List<String> arrayList = Lists.newArrayList((String[]) null);
// BUG: Diagnostic contains:
Set<String> hashSet = Sets.newHashSet((String[]) null);
// BUG: Diagnostic contains:
Iterable<String> cycle = Iterables.cycle((String[]) null);
// BUG: Diagnostic contains:
Iterator<String> forArray = Iterators.forArray(null);
// BUG: Diagnostic contains:
invokable.invoke("", null);
}
}
""")
.doTest();
}
@Test
public void negativeCases() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.stream.Stream;
| Test |
java | qos-ch__slf4j | jcl-over-slf4j/src/main/java/org/apache/commons/logging/impl/SimpleLog.java | {
"start": 3412,
"end": 6329
} | class ____ implements Log, Serializable {
private static final long serialVersionUID = 136942970684951178L;
// ------------------------------------------------------- Class Attributes
/** All system properties used by <code>SimpleLog</code> start with this */
static protected final String systemPrefix = "org.apache.commons.logging.simplelog.";
/** Properties loaded from simplelog.properties */
static protected final Properties simpleLogProps = new Properties();
/** The default format to use when formatting dates */
static protected final String DEFAULT_DATE_TIME_FORMAT = "yyyy/MM/dd HH:mm:ss:SSS zzz";
/** Include the instance name in the log message? */
static protected boolean showLogName = false;
/**
* Include the short name ( last component ) of the logger in the log message.
* Defaults to true - otherwise we'll be lost in a flood of messages without
* knowing who sends them.
*/
static protected boolean showShortName = true;
/** Include the current time in the log message */
static protected boolean showDateTime = false;
/** The date and time format to use in the log message */
static protected String dateTimeFormat = DEFAULT_DATE_TIME_FORMAT;
/** Used to format times */
static protected DateFormat dateFormatter = null;
// ---------------------------------------------------- Log Level Constants
/** "Trace" level logging. */
public static final int LOG_LEVEL_TRACE = 1;
/** "Debug" level logging. */
public static final int LOG_LEVEL_DEBUG = 2;
/** "Info" level logging. */
public static final int LOG_LEVEL_INFO = 3;
/** "Warn" level logging. */
public static final int LOG_LEVEL_WARN = 4;
/** "Error" level logging. */
public static final int LOG_LEVEL_ERROR = 5;
/** "Fatal" level logging. */
public static final int LOG_LEVEL_FATAL = 6;
/** Enable all logging levels */
public static final int LOG_LEVEL_ALL = (LOG_LEVEL_TRACE - 1);
/** Enable no logging levels */
public static final int LOG_LEVEL_OFF = (LOG_LEVEL_FATAL + 1);
// ------------------------------------------------------------ Initializer
private static String getStringProperty(String name) {
String prop = null;
try {
prop = System.getProperty(name);
} catch (SecurityException e) {
; // Ignore
}
return (prop == null) ? simpleLogProps.getProperty(name) : prop;
}
private static String getStringProperty(String name, String dephault) {
String prop = getStringProperty(name);
return (prop == null) ? dephault : prop;
}
private static boolean getBooleanProperty(String name, boolean dephault) {
String prop = getStringProperty(name);
return (prop == null) ? dephault : "true".equalsIgnoreCase(prop);
}
// Initialize | SimpleLog |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/fielddata/SourceValueFetcherSortedBooleanIndexFieldData.java | {
"start": 1185,
"end": 2739
} | class ____ extends SourceValueFetcherIndexFieldData.Builder<SortedNumericLongValues> {
public Builder(
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedNumericLongValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
public SourceValueFetcherSortedBooleanIndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) {
return new SourceValueFetcherSortedBooleanIndexFieldData(
fieldName,
valuesSourceType,
valueFetcher,
sourceProvider,
toScriptFieldFactory
);
}
}
protected SourceValueFetcherSortedBooleanIndexFieldData(
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedNumericLongValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
public SourceValueFetcherLeafFieldData<SortedNumericLongValues> loadDirect(LeafReaderContext context) {
return new SourceValueFetcherSortedBooleanLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceProvider);
}
private static | Builder |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/annotations/Options.java | {
"start": 1158,
"end": 1502
} | interface ____ {
* @Options(useGeneratedKeys = true, keyProperty = "id")
* @Insert("INSERT INTO users (name) VALUES(#{name})")
* boolean insert(User user);
* }
* </pre>
*
* @author Clinton Begin
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Repeatable(Options.List.class)
public @ | UserMapper |
java | quarkusio__quarkus | test-framework/junit5/src/main/java/io/quarkus/test/junit/util/QuarkusTestProfileAwareClassOrderer.java | {
"start": 2525,
"end": 3001
} | class ____.
* <p/>
* {@link #getCustomOrderKey(ClassDescriptor, ClassOrdererContext)} can be overridden to provide a custom order number for a
* given test class, e.g. based on {@link org.junit.jupiter.api.Tag} or something else.
* <p/>
* Limitations:
* <ul>
* <li>Only JUnit5 test classes are subject to ordering, e.g. ArchUnit test classes are not passed to this orderer.</li>
* <li>This orderer does not handle {@link Nested} test classes.</li>
* </ul>
*/
public | name |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/config/provider/ConfigProvider.java | {
"start": 1608,
"end": 3461
} | interface ____ extends Configurable, Closeable {
/**
* Retrieves the data at the given path.
*
* @param path the path where the data resides
* @return the configuration data
*/
ConfigData get(String path);
/**
* Retrieves the data with the given keys at the given path.
*
* @param path the path where the data resides
* @param keys the keys whose values will be retrieved
* @return the configuration data
*/
ConfigData get(String path, Set<String> keys);
/**
* Subscribes to changes for the given keys at the given path (optional operation).
*
* @param path the path where the data resides
* @param keys the keys whose values will be retrieved
* @param callback the callback to invoke upon change
* @throws UnsupportedOperationException if the subscribe operation is not supported
*/
default void subscribe(String path, Set<String> keys, ConfigChangeCallback callback) {
throw new UnsupportedOperationException();
}
/**
* Unsubscribes to changes for the given keys at the given path (optional operation).
*
* @param path the path where the data resides
* @param keys the keys whose values will be retrieved
* @param callback the callback to be unsubscribed from changes
* @throws UnsupportedOperationException if the unsubscribe operation is not supported
*/
default void unsubscribe(String path, Set<String> keys, ConfigChangeCallback callback) {
throw new UnsupportedOperationException();
}
/**
* Clears all subscribers (optional operation).
*
* @throws UnsupportedOperationException if the unsubscribeAll operation is not supported
*/
default void unsubscribeAll() {
throw new UnsupportedOperationException();
}
}
| ConfigProvider |
java | bumptech__glide | integration/concurrent/src/main/java/com/bumptech/glide/integration/concurrent/GlideFutures.java | {
"start": 7179,
"end": 8070
} | class ____<T> implements RequestListener<T> {
private final Completer<TargetAndResult<T>> completer;
GlideLoadingListener(Completer<TargetAndResult<T>> completer) {
this.completer = completer;
}
@Override
public boolean onLoadFailed(
@Nullable GlideException e, Object model, @NonNull Target<T> target, boolean isFirst) {
completer.setException(e != null ? e : new RuntimeException("Unknown error"));
return true;
}
@Override
public boolean onResourceReady(
@NonNull T resource,
@NonNull Object model,
Target<T> target,
@NonNull DataSource dataSource,
boolean isFirst) {
try {
completer.set(new TargetAndResult<>(target, resource));
} catch (Throwable t) {
completer.setException(t);
}
return true;
}
}
private static final | GlideLoadingListener |
java | playframework__playframework | core/play-guice/src/main/java/play/inject/guice/GuiceApplicationLoader.java | {
"start": 490,
"end": 2037
} | class ____ implements ApplicationLoader {
/** The initial builder to start construction from. */
protected final GuiceApplicationBuilder initialBuilder;
public GuiceApplicationLoader() {
this(new GuiceApplicationBuilder());
}
public GuiceApplicationLoader(GuiceApplicationBuilder initialBuilder) {
this.initialBuilder = initialBuilder;
}
@Override
public final Application load(ApplicationLoader.Context context) {
return builder(context).build();
}
/**
* Construct a builder to use for loading the given context.
*
* @param context the context the returned builder will load
* @return the builder
*/
public GuiceApplicationBuilder builder(ApplicationLoader.Context context) {
return initialBuilder
.in(context.environment())
.loadConfig(context.initialConfig())
.overrides(overrides(context));
}
/**
* Identify some bindings that should be used as overrides when loading an application using this
* context. The default implementation of this method provides bindings that most applications
* should include.
*
* @param context the context that should be searched for overrides
* @return the bindings that should be used to override
*/
protected GuiceableModule[] overrides(ApplicationLoader.Context context) {
scala.collection.Seq<GuiceableModule> seq =
play.api.inject.guice.GuiceApplicationLoader$.MODULE$.defaultOverrides(context.asScala());
return Scala.asArray(GuiceableModule.class, seq);
}
}
| GuiceApplicationLoader |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/util/internal/UnreflectHandleSupplier.java | {
"start": 460,
"end": 1631
} | class ____ implements Supplier<MethodHandle> {
private final MethodType asType;
private boolean initialized;
private Supplier<MethodHandle> delegate = this::initialize;
public UnreflectHandleSupplier(MethodType asType) {
this.asType = asType;
}
@Override
public MethodHandle get() {
return delegate.get();
}
synchronized MethodHandle initialize() {
if (!initialized) {
MethodHandle mh;
try {
mh = postprocess(unreflect());
} catch (IllegalAccessException e) {
throw ClassUtil.sneakyThrow(e);
}
delegate = () -> mh;
initialized = true;
}
return delegate.get();
}
protected MethodHandle postprocess(MethodHandle mh) {
if (mh == null) {
return mh;
}
if (asType == null) {
return mh.asFixedArity();
}
return mh.asType(asType);
}
protected abstract MethodHandle unreflect() throws IllegalAccessException;
@Override
public String toString() {
return get().toString();
}
}
| UnreflectHandleSupplier |
java | spring-projects__spring-security | access/src/main/java/org/springframework/security/access/prepost/PreInvocationAuthorizationAdvice.java | {
"start": 1139,
"end": 1800
} | interface ____ extends AopInfrastructureBean {
/**
* The "before" advice which should be executed to perform any filtering necessary and
* to decide whether the method call is authorised.
* @param authentication the information on the principal on whose account the
* decision should be made
* @param mi the method invocation being attempted
* @param preInvocationAttribute the attribute built from the @PreFilter
* and @PostFilter annotations.
* @return true if authorised, false otherwise
*/
boolean before(Authentication authentication, MethodInvocation mi, PreInvocationAttribute preInvocationAttribute);
}
| PreInvocationAuthorizationAdvice |
java | apache__kafka | generator/src/main/java/org/apache/kafka/message/MetadataJsonConvertersGenerator.java | {
"start": 950,
"end": 2130
} | class ____ implements TypeClassGenerator {
private final HeaderGenerator headerGenerator;
private final CodeBuffer buffer;
private final TreeMap<Short, MessageSpec> apis;
public MetadataJsonConvertersGenerator(String packageName) {
this.headerGenerator = new HeaderGenerator(packageName);
this.apis = new TreeMap<>();
this.buffer = new CodeBuffer();
}
@Override
public String outputName() {
return MessageGenerator.METADATA_JSON_CONVERTERS_JAVA;
}
@Override
public void registerMessageType(MessageSpec spec) {
if (spec.type() == MessageSpecType.METADATA) {
short id = spec.apiKey().get();
MessageSpec prevSpec = apis.put(id, spec);
if (prevSpec != null) {
throw new RuntimeException("Duplicate metadata record entry for type " +
id + ". Original claimant: " + prevSpec.name() + ". New " +
"claimant: " + spec.name());
}
}
}
@Override
public void generateAndWrite(BufferedWriter writer) throws IOException {
buffer.printf("public | MetadataJsonConvertersGenerator |
java | google__error-prone | check_api/src/test/java/com/google/errorprone/util/FindIdentifiersTest.java | {
"start": 1875,
"end": 2150
} | class ____ {
/** A {@link BugChecker} that prints all identifiers in scope at a call to String.format(). */
@BugPattern(
severity = SeverityLevel.ERROR,
summary = "Prints all identifiers in scope at a call to String.format()")
public static | FindIdentifiersTest |
java | google__dagger | javatests/artifacts/hilt-android/simple/lib/src/main/java/dagger/hilt/android/simple/lib/ThingImpl.java | {
"start": 847,
"end": 961
} | class ____ implements Thing {
@Inject
ThingImpl() {}
@Module
@InstallIn(SingletonComponent.class)
| ThingImpl |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java | {
"start": 1196,
"end": 4275
} | class ____ extends TransportTasksAction<
TransportStartDatafeedAction.DatafeedTask,
IsolateDatafeedAction.Request,
IsolateDatafeedAction.Response,
IsolateDatafeedAction.Response> {
@Inject
public TransportIsolateDatafeedAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) {
super(
IsolateDatafeedAction.NAME,
clusterService,
transportService,
actionFilters,
IsolateDatafeedAction.Request::new,
IsolateDatafeedAction.Response::new,
transportService.getThreadPool().executor(MachineLearning.UTILITY_THREAD_POOL_NAME)
);
}
@Override
protected void doExecute(Task task, IsolateDatafeedAction.Request request, ActionListener<IsolateDatafeedAction.Response> listener) {
final ClusterState state = clusterService.state();
PersistentTasksCustomMetadata tasks = state.getMetadata().getProject().custom(PersistentTasksCustomMetadata.TYPE);
PersistentTasksCustomMetadata.PersistentTask<?> datafeedTask = MlTasks.getDatafeedTask(request.getDatafeedId(), tasks);
if (datafeedTask == null || datafeedTask.getExecutorNode() == null) {
// No running datafeed task to isolate
listener.onResponse(new IsolateDatafeedAction.Response(false));
return;
}
request.setNodes(datafeedTask.getExecutorNode());
super.doExecute(task, request, listener);
}
@Override
protected IsolateDatafeedAction.Response newResponse(
IsolateDatafeedAction.Request request,
List<IsolateDatafeedAction.Response> tasks,
List<TaskOperationFailure> taskOperationFailures,
List<FailedNodeException> failedNodeExceptions
) {
// We only let people isolate one datafeed at a time, so each list will be empty or contain one item
assert tasks.size() <= 1 : "more than 1 item in tasks: " + tasks.size();
assert taskOperationFailures.size() <= 1 : "more than 1 item in taskOperationFailures: " + taskOperationFailures.size();
assert failedNodeExceptions.size() <= 1 : "more than 1 item in failedNodeExceptions: " + failedNodeExceptions.size();
if (taskOperationFailures.isEmpty() == false) {
throw ExceptionsHelper.taskOperationFailureToStatusException(taskOperationFailures.get(0));
} else if (failedNodeExceptions.isEmpty() == false) {
throw failedNodeExceptions.get(0);
} else if (tasks.isEmpty() == false) {
return tasks.get(0);
}
return new IsolateDatafeedAction.Response(false);
}
@Override
protected void taskOperation(
CancellableTask actionTask,
IsolateDatafeedAction.Request request,
TransportStartDatafeedAction.DatafeedTask datafeedTask,
ActionListener<IsolateDatafeedAction.Response> listener
) {
datafeedTask.isolate();
listener.onResponse(new IsolateDatafeedAction.Response(true));
}
}
| TransportIsolateDatafeedAction |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/manytomany/Woman.java | {
"start": 547,
"end": 2083
} | class ____ implements Serializable {
private WomanPk id;
private String carName;
private Set<Man> mens;
private Set<Cat> cats;
@ManyToMany(mappedBy = "humanContacts")
public Set<Cat> getCats() {
return cats;
}
public void setCats(Set<Cat> cats) {
this.cats = cats;
}
@ManyToMany(cascade = {CascadeType.ALL})
@JoinTable(
name = "Man_Woman",
joinColumns = {
@JoinColumn(name = "womanLastName", referencedColumnName = "lastName"),
@JoinColumn(name = "womanFirstName", referencedColumnName = "firstName")
},
inverseJoinColumns = {
@JoinColumn(name = "manIsElder", referencedColumnName = "elder"),
@JoinColumn(name = "manLastName", referencedColumnName = "lastName"),
@JoinColumn(name = "manFirstName", referencedColumnName = "firstName")
},
foreignKey = @ForeignKey(name = "WM_W_FK")
)
public Set<Man> getMens() {
return mens;
}
public void setMens(Set<Man> mens) {
this.mens = mens;
}
@Id
public WomanPk getId() {
return id;
}
public void setId(WomanPk id) {
this.id = id;
}
public String getCarName() {
return carName;
}
public void setCarName(String carName) {
this.carName = carName;
}
public int hashCode() {
//a NPE can occurs, but I don't expect hashcode to be used before pk is set
return getId().hashCode();
}
public boolean equals(Object obj) {
//a NPE can occurs, but I don't expect equals to be used before pk is set
if ( obj instanceof Woman w ) {
return getId().equals( w.getId() );
}
else {
return false;
}
}
}
| Woman |
java | apache__camel | components/camel-test/camel-test-main-junit5/src/main/java/org/apache/camel/test/main/junit5/CamelMainContext.java | {
"start": 9533,
"end": 11766
} | class
____ instance = getOuterClassInstance();
if (instance instanceof DebuggerCallback callback) {
context.setDebugging(true);
context.setDebugger(new DefaultDebugger());
context.getDebugger().addBreakpoint(new BreakpointSupport() {
@Override
public void beforeProcess(Exchange exchange, Processor processor, NamedNode definition) {
callback.debugBefore(exchange, processor, (ProcessorDefinition<?>) definition, definition.getId(),
definition.getLabel());
}
@Override
public void afterProcess(Exchange exchange, Processor processor, NamedNode definition, long timeTaken) {
callback.debugAfter(exchange, processor, (ProcessorDefinition<?>) definition, definition.getId(),
definition.getLabel(), timeTaken);
}
});
}
}
/**
* Initialize the given Camel context like a Camel Main application based on what could be extracted from the
* annotation {@link CamelMainTest}.
*/
private void initCamelContext(ModelCamelContext context) throws Exception {
createMainForTest().init(context);
}
/**
* Create a new Camel Main application for test based on what could be extracted from the annotation
* {@link CamelMainTest}.
*/
private MainForTest createMainForTest() {
final MainForTest main = new MainForTest();
main.configure().setJmxEnabled(useJmx);
for (CamelMainTest annotation : annotations) {
configureMainClass(main, annotation);
addConfigurationClasses(main, annotation);
configureOverrideProperties(main, annotation);
configurePropertyPlaceholderLocations(main, annotation);
}
for (Object instance : instances) {
invokeConfigureMethods(main, instance);
}
return main;
}
/**
* Configure the main | Object |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/StateSnapshotTransformerTest.java | {
"start": 3672,
"end": 3979
} | class ____ {
final Random rnd;
private TestState() {
this.rnd = new Random();
}
abstract void setToRandomValue() throws Exception;
String getRandomString() {
return StringUtils.getRandomString(rnd, 5, 10);
}
}
private | TestState |
java | elastic__elasticsearch | x-pack/plugin/security/qa/saml-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlIdpMetadataBuilder.java | {
"start": 3326,
"end": 4961
} | enum ____ that may not generate the correct value in other locales
Locale.setDefault(Locale.ROOT);
return SamlUtils.getXmlContent(build(), false);
} finally {
Locale.setDefault(oldLocale);
}
}
public EntityDescriptor build() throws CertificateEncodingException {
var roleDescriptor = new IDPSSODescriptorBuilder().buildObject();
roleDescriptor.removeAllSupportedProtocols();
roleDescriptor.addSupportedProtocol(SAMLConstants.SAML20P_NS);
roleDescriptor.setWantAuthnRequestsSigned(wantSignedAuthnRequests);
if (idpUrl != null) {
final SingleSignOnService sso = SamlUtils.buildObject(SingleSignOnService.class, SingleSignOnService.DEFAULT_ELEMENT_NAME);
sso.setLocation(idpUrl);
sso.setBinding(SAMLConstants.SAML2_REDIRECT_BINDING_URI);
roleDescriptor.getSingleSignOnServices().add(sso);
}
for (X509Certificate k : this.signingCertificates) {
final KeyDescriptor keyDescriptor = new KeyDescriptorBuilder().buildObject();
keyDescriptor.setUse(UsageType.SIGNING);
final KeyInfo keyInfo = new KeyInfoBuilder().buildObject();
KeyInfoSupport.addCertificate(keyInfo, k);
keyDescriptor.setKeyInfo(keyInfo);
roleDescriptor.getKeyDescriptors().add(keyDescriptor);
}
final EntityDescriptor descriptor = new EntityDescriptorBuilder().buildObject();
descriptor.setEntityID(this.entityId);
descriptor.getRoleDescriptors().add(roleDescriptor);
return descriptor;
}
}
| and |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java | {
"start": 44754,
"end": 45310
} | class ____ extends FactoryHelper<CatalogStoreFactory> {
public CatalogStoreFactoryHelper(
CatalogStoreFactory catalogStoreFactory, CatalogStoreFactory.Context context) {
super(catalogStoreFactory, context.getOptions(), PROPERTY_VERSION);
}
}
/**
* Helper utility for validating all options for a {@link ModelProviderFactory}.
*
* @see #createModelProviderFactoryHelper(ModelProviderFactory, ModelProviderFactory.Context)
*/
@PublicEvolving
public static | CatalogStoreFactoryHelper |
java | hibernate__hibernate-orm | local-build-plugins/src/main/java/org/hibernate/orm/post/LoggingReportTask.java | {
"start": 1174,
"end": 7595
} | class ____ extends AbstractJandexAwareTask {
public static final DotName SUB_SYS_ANN_NAME = createSimple( "org.hibernate.internal.log.SubSystemLogging" );
public static final DotName MSG_LOGGER_ANN_NAME = createSimple( "org.jboss.logging.annotations.MessageLogger" );
public static final DotName ID_RANGE_ANN_NAME = createSimple( "org.jboss.logging.annotations.ValidIdRange" );
public static final DotName MSG_ANN_NAME = createSimple( "org.jboss.logging.annotations.Message" );
private final Property<RegularFile> reportFile;
public LoggingReportTask() {
setDescription( "Generates a report of \"system\" logging" );
reportFile = getProject().getObjects().fileProperty();
reportFile.convention( getProject().getLayout().getBuildDirectory().file( "orm/generated/logging/index.adoc" ) );
}
@Override
protected Provider<RegularFile> getTaskReportFileReference() {
return reportFile;
}
@TaskAction
public void generateLoggingReport() {
final TreeMap<String, SubSystem> subSystemByName = new TreeMap<>();
final TreeSet<IdRange> idRanges = new TreeSet<>( Comparator.comparing( IdRange::getMinValue ) );
final Index index = getIndexManager().getIndex();
final List<AnnotationInstance> subSysAnnUsages = index.getAnnotations( SUB_SYS_ANN_NAME );
final List<AnnotationInstance> msgLoggerAnnUsages = index.getAnnotations( MSG_LOGGER_ANN_NAME );
subSysAnnUsages.forEach( (ann) -> {
final SubSystem subSystem = new SubSystem(
ann.value( "name" ).asString(),
ann.value( "description" ).asString(),
ann.target().asClass().simpleName()
);
subSystemByName.put( subSystem.name, subSystem );
} );
msgLoggerAnnUsages.forEach( (msgLoggerAnnUsage) -> {
// find its id-range annotation, if one
final ClassInfo loggerClassInfo = msgLoggerAnnUsage.target().asClass();
final AnnotationInstance subSystemAnnUsage = loggerClassInfo.declaredAnnotation( SUB_SYS_ANN_NAME );
final SubSystem subSystem;
if ( subSystemAnnUsage != null ) {
subSystem = subSystemByName.get( subSystemAnnUsage.value( "name" ).asString() );
}
else {
subSystem = null;
}
final IdRange idRange;
final AnnotationInstance idRangeAnnUsage = loggerClassInfo.declaredAnnotation( ID_RANGE_ANN_NAME );
if ( idRangeAnnUsage == null ) {
idRange = calculateIdRange( msgLoggerAnnUsage, subSystem );
}
else {
idRange = new IdRange(
asIntOrDefault( idRangeAnnUsage, "min" , 1 ),
asIntOrDefault( idRangeAnnUsage, "max" , 999999 ),
true,
loggerClassInfo.simpleName(),
subSystem
);
if ( subSystem != null ) {
subSystem.idRange = idRange;
}
}
if ( idRange != null ) {
idRanges.add( idRange );
}
} );
generateReport( subSystemByName, idRanges );
}
private IdRange calculateIdRange(AnnotationInstance msgLoggerAnnUsage, SubSystem subSystem) {
final ClassInfo loggerClassInfo = msgLoggerAnnUsage.target().asClass();
getProject().getLogger().lifecycle( "MessageLogger (`{}`) missing id-range", loggerClassInfo.simpleName() );
final List<AnnotationInstance> messageAnnUsages = loggerClassInfo.annotations( MSG_ANN_NAME );
if ( messageAnnUsages.isEmpty() ) {
return null;
}
int minId = Integer.MAX_VALUE;
int maxId = Integer.MIN_VALUE;
for ( int i = 0; i < messageAnnUsages.size(); i++ ) {
final AnnotationInstance msgAnnUsage = messageAnnUsages.get( i );
final int msgId = asIntOrDefault( msgAnnUsage, "id", -1 );
if ( msgId < minId ) {
minId = msgId;
}
else if ( msgId > maxId ) {
maxId = msgId;
}
}
return new IdRange( minId, maxId, false, loggerClassInfo.simpleName(), subSystem );
}
private void generateReport(TreeMap<String, SubSystem> subSystemByName, TreeSet<IdRange> idRanges) {
final File reportFile = prepareReportFile();
assert reportFile.exists();
try ( final OutputStreamWriter fileWriter = new OutputStreamWriter( new FileOutputStream( reportFile ) ) ) {
writeReport( subSystemByName, idRanges, fileWriter );
}
catch (FileNotFoundException e) {
throw new RuntimeException( "Should never happen" );
}
catch (IOException e) {
throw new RuntimeException( "Error writing to report file", e );
}
}
private void writeReport(TreeMap<String, SubSystem> subSystemByName, TreeSet<IdRange> idRanges, OutputStreamWriter fileWriter) {
try {
fileWriter.write( "= Hibernate logging\n\n" );
fileWriter.write( "[[subsystems]]\n" );
fileWriter.write( "== Sub-system logging\n\n" );
subSystemByName.forEach( (name, subSystem) -> {
try {
fileWriter.write( "[[" + subSystem.getAnchorName() + "]]\n" );
fileWriter.write( "`" + subSystem.getName() + "`::\n" );
fileWriter.write( " * Logging class-name = `" + subSystem.getLoggingClassName() + "`\n" );
fileWriter.write( " * Description = " + subSystem.getDescription() + "\n" );
if ( subSystem.getIdRange() != null ) {
fileWriter.write( String.format(
" * ValidIdRange = <<%s,%s>>\n",
subSystem.getIdRange().getAnchorName(),
subSystem.getIdRange().getLabel()
) );
}
}
catch (IOException e) {
throw new RuntimeException( "Error writing sub-system entry (" + subSystem.getAnchorName() + ") to report file", e );
}
} );
fileWriter.write( "\n\n" );
fileWriter.write( "[[id-ranges]]\n" );
fileWriter.write( "== Message Id Ranges\n\n" );
idRanges.forEach( (idRange) -> {
try {
fileWriter.write( "[[" + idRange.getAnchorName() + "]]\n" );
fileWriter.write( "`" + idRange.getLabel() + "`::\n" );
fileWriter.write( String.format(
" * ValidIdRange = %s - %s (%s)\n",
idRange.minValueText,
idRange.maxValueText,
idRange.explicit ? "explicit" : "implicit"
) );
fileWriter.write( " * MessageLogger = `" + idRange.getLoggerClassName() + "`\n" );
final SubSystem subSystem = idRange.getSubSystem();
if ( subSystem != null ) {
fileWriter.write( String.format(
" * SubSystem = <<%s,%s>>\n",
subSystem.getAnchorName(),
subSystem.getName()
) );
}
}
catch (IOException e) {
throw new RuntimeException( "Error writing msg-id entry (" + idRange.getAnchorName() + ") to report file", e );
}
} );
}
catch (IOException e) {
throw new RuntimeException( "Error writing to report file", e );
}
}
private static | LoggingReportTask |
java | grpc__grpc-java | gcp-csm-observability/src/test/java/io/grpc/gcp/csm/observability/MetadataExchangerTest.java | {
"start": 1218,
"end": 7082
} | class ____ {
@Test
public void enablePluginForChannel_matches() {
MetadataExchanger exchanger =
new MetadataExchanger(Attributes.builder().build(), (name) -> null);
assertThat(exchanger.enablePluginForChannel("xds:///testing")).isTrue();
assertThat(exchanger.enablePluginForChannel("xds:/testing")).isTrue();
assertThat(exchanger.enablePluginForChannel(
"xds://traffic-director-global.xds.googleapis.com/testing:123")).isTrue();
}
@Test
public void enablePluginForChannel_doesNotMatch() {
MetadataExchanger exchanger =
new MetadataExchanger(Attributes.builder().build(), (name) -> null);
assertThat(exchanger.enablePluginForChannel("dns:///localhost")).isFalse();
assertThat(exchanger.enablePluginForChannel("xds:///[]")).isFalse();
assertThat(exchanger.enablePluginForChannel("xds://my-xds-server/testing")).isFalse();
}
@Test
public void addLabels_receivedWrongType() {
MetadataExchanger exchanger =
new MetadataExchanger(Attributes.builder().build(), (name) -> null);
Metadata metadata = new Metadata();
metadata.put(Metadata.Key.of("x-envoy-peer-metadata", Metadata.ASCII_STRING_MARSHALLER),
BaseEncoding.base64().encode(Struct.newBuilder()
.putFields("type", Value.newBuilder().setNumberValue(1).build())
.build()
.toByteArray()));
AttributesBuilder builder = Attributes.builder();
exchanger.newServerStreamPlugin(metadata).addLabels(builder);
assertThat(builder.build()).isEqualTo(Attributes.builder()
.put(stringKey("csm.mesh_id"), "unknown")
.put(stringKey("csm.workload_canonical_service"), "unknown")
.put(stringKey("csm.remote_workload_type"), "unknown")
.put(stringKey("csm.remote_workload_canonical_service"), "unknown")
.build());
}
@Test
public void addLabelsFromExchange_unknownGcpType() {
MetadataExchanger exchanger =
new MetadataExchanger(Attributes.builder().build(), (name) -> null);
Metadata metadata = new Metadata();
metadata.put(Metadata.Key.of("x-envoy-peer-metadata", Metadata.ASCII_STRING_MARSHALLER),
BaseEncoding.base64().encode(Struct.newBuilder()
.putFields("type", Value.newBuilder().setStringValue("gcp_surprise").build())
.putFields("canonical_service", Value.newBuilder().setStringValue("myservice1").build())
.build()
.toByteArray()));
AttributesBuilder builder = Attributes.builder();
exchanger.newServerStreamPlugin(metadata).addLabels(builder);
assertThat(builder.build()).isEqualTo(Attributes.builder()
.put(stringKey("csm.mesh_id"), "unknown")
.put(stringKey("csm.workload_canonical_service"), "unknown")
.put(stringKey("csm.remote_workload_type"), "gcp_surprise")
.put(stringKey("csm.remote_workload_canonical_service"), "myservice1")
.build());
}
@Test
public void addMetadata_k8s() throws Exception {
MetadataExchanger exchanger = new MetadataExchanger(
Attributes.builder()
.put(stringKey("cloud.platform"), "gcp_kubernetes_engine")
.put(stringKey("k8s.namespace.name"), "mynamespace1")
.put(stringKey("k8s.cluster.name"), "mycluster1")
.put(stringKey("cloud.availability_zone"), "myzone1")
.put(stringKey("cloud.account.id"), "0001")
.build(),
ImmutableMap.of(
"CSM_CANONICAL_SERVICE_NAME", "myservice1",
"CSM_WORKLOAD_NAME", "myworkload1")::get);
Metadata metadata = new Metadata();
exchanger.newClientCallPlugin().addMetadata(metadata);
Struct peer = Struct.parseFrom(BaseEncoding.base64().decode(metadata.get(
Metadata.Key.of("x-envoy-peer-metadata", Metadata.ASCII_STRING_MARSHALLER))));
assertThat(peer).isEqualTo(
Struct.newBuilder()
.putFields("type", Value.newBuilder().setStringValue("gcp_kubernetes_engine").build())
.putFields("canonical_service", Value.newBuilder().setStringValue("myservice1").build())
.putFields("workload_name", Value.newBuilder().setStringValue("myworkload1").build())
.putFields("namespace_name", Value.newBuilder().setStringValue("mynamespace1").build())
.putFields("cluster_name", Value.newBuilder().setStringValue("mycluster1").build())
.putFields("location", Value.newBuilder().setStringValue("myzone1").build())
.putFields("project_id", Value.newBuilder().setStringValue("0001").build())
.build());
}
@Test
public void addMetadata_gce() throws Exception {
MetadataExchanger exchanger = new MetadataExchanger(
Attributes.builder()
.put(stringKey("cloud.platform"), "gcp_compute_engine")
.put(stringKey("cloud.availability_zone"), "myzone1")
.put(stringKey("cloud.account.id"), "0001")
.build(),
ImmutableMap.of(
"CSM_CANONICAL_SERVICE_NAME", "myservice1",
"CSM_WORKLOAD_NAME", "myworkload1")::get);
Metadata metadata = new Metadata();
exchanger.newClientCallPlugin().addMetadata(metadata);
Struct peer = Struct.parseFrom(BaseEncoding.base64().decode(metadata.get(
Metadata.Key.of("x-envoy-peer-metadata", Metadata.ASCII_STRING_MARSHALLER))));
assertThat(peer).isEqualTo(
Struct.newBuilder()
.putFields("type", Value.newBuilder().setStringValue("gcp_compute_engine").build())
.putFields("canonical_service", Value.newBuilder().setStringValue("myservice1").build())
.putFields("workload_name", Value.newBuilder().setStringValue("myworkload1").build())
.putFields("location", Value.newBuilder().setStringValue("myzone1").build())
.putFields("project_id", Value.newBuilder().setStringValue("0001").build())
.build());
}
}
| MetadataExchangerTest |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/connector/source/lookup/FullCachingLookupProvider.java | {
"start": 1504,
"end": 2761
} | interface ____ extends LookupFunctionProvider {
/**
* Build a {@link FullCachingLookupProvider} from the specified {@link
* ScanTableSource.ScanRuntimeProvider} and {@link CacheReloadTrigger}.
*/
static FullCachingLookupProvider of(
ScanTableSource.ScanRuntimeProvider scanRuntimeProvider,
CacheReloadTrigger cacheReloadTrigger) {
return new FullCachingLookupProvider() {
@Override
public ScanTableSource.ScanRuntimeProvider getScanRuntimeProvider() {
return scanRuntimeProvider;
}
@Override
public CacheReloadTrigger getCacheReloadTrigger() {
return cacheReloadTrigger;
}
@Override
public LookupFunction createLookupFunction() {
return null;
}
};
}
/**
* Get a {@link ScanTableSource.ScanRuntimeProvider} for scanning all entries from the external
* lookup table and load into the cache.
*/
ScanTableSource.ScanRuntimeProvider getScanRuntimeProvider();
/** Get a {@link CacheReloadTrigger} for triggering the reload operation. */
CacheReloadTrigger getCacheReloadTrigger();
}
| FullCachingLookupProvider |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/UnionResultSubpartitionView.java | {
"start": 7816,
"end": 8859
} | class ____.
throw new UnsupportedOperationException("Method should never be called.");
}
@Override
public void releaseAllResources() throws IOException {
synchronized (lock) {
for (ResultSubpartitionView view : allViews.values()) {
view.releaseAllResources();
}
allViews.clear();
for (ResultSubpartitionView view : unregisteredAvailableViews) {
view.releaseAllResources();
}
unregisteredAvailableViews.clear();
for (Tuple2<ResultSubpartition.BufferAndBacklog, Integer> tuple2 : cachedBuffers) {
tuple2.f0.buffer().recycleBuffer();
}
cachedBuffers.clear();
isReleased = true;
}
}
@Override
public boolean isReleased() {
synchronized (lock) {
return isReleased;
}
}
@Override
public void resumeConsumption() {
// Only used by pipelined shuffle, which is not supported by this | yet |
java | quarkusio__quarkus | extensions/oidc/deployment/src/test/java/io/quarkus/oidc/test/OidcRequestAndResponseFilterTest.java | {
"start": 1404,
"end": 8768
} | class ____ {
@RegisterExtension
static final QuarkusDevModeTest test = new QuarkusDevModeTest()
.withApplicationRoot((jar) -> jar
.addClasses(BearerAuthAndCodeFlowResource.class, BearerTokenRequestFilter.class,
BearerTokenResponseFilter.class, AuthorizationCodeFlowResponseFilter.class,
AuthorizationCodeFlowRequestFilter.class, FilterCalled.class, CallableFilterParent.class,
TenantFeatureBearerTokenResponseFilter.class, TenantFeatureBearerTokenRequestFilter.class,
TenantFeatureAuthorizationCodeFlowResponseFilter.class,
TenantFeatureAuthorizationCodeFlowRequestFilter.class,
RequestTenantFeatureAuthorizationCodeFlowRequestFilter.class,
ResponseTenantFeatureAuthorizationCodeFlowResponseFilter.class,
RequestTenantFeatureBearerTokenRequestFilter.class, RequestAndResponseFilter.class,
ResponseTenantFeatureBearerTokenResponseFilter.class)
.addAsResource(
new StringAsset(
"""
quarkus.oidc.tenant-two.auth-server-url=${keycloak.url}/realms/quarkus
quarkus.oidc.tenant-two.client-id=quarkus-service-app
quarkus.oidc.tenant-two.credentials.secret=secret
quarkus.oidc.tenant-two.authentication.user-info-required=true
quarkus.oidc.tenant-two.tenant-paths=/bearer-and-code-flow/tenant-two*
quarkus.oidc.tenant-one.auth-server-url=${keycloak.url}/realms/quarkus
quarkus.oidc.tenant-one.client-id=quarkus-service-app
quarkus.oidc.tenant-one.credentials.secret=secret
quarkus.oidc.tenant-one.authentication.user-info-required=true
quarkus.oidc.tenant-one.tenant-paths=/bearer-and-code-flow/tenant-one*
quarkus.oidc.code-tenant.auth-server-url=${keycloak.url}/realms/quarkus
quarkus.oidc.code-tenant.client-id=quarkus-web-app
quarkus.oidc.code-tenant.credentials.secret=secret
quarkus.oidc.code-tenant.application-type=web-app
quarkus.oidc.code-tenant.tenant-paths=/bearer-and-code-flow/code-flow*
quarkus.oidc.bearer-tenant.auth-server-url=${keycloak.url}/realms/quarkus
quarkus.oidc.bearer-tenant.client-id=quarkus-service-app
quarkus.oidc.bearer-tenant.credentials.secret=secret
quarkus.oidc.bearer-tenant.authentication.user-info-required=true
quarkus.oidc.bearer-tenant.tenant-paths=/bearer-and-code-flow/bearer*
quarkus.oidc.my-code-tenant.auth-server-url=${keycloak.url}/realms/quarkus
quarkus.oidc.my-code-tenant.client-id=quarkus-web-app
quarkus.oidc.my-code-tenant.credentials.secret=secret
quarkus.oidc.my-code-tenant.application-type=web-app
quarkus.oidc.my-code-tenant.tenant-paths=/bearer-and-code-flow/tenant-feature-code-flow*
quarkus.oidc.my-bearer-tenant.auth-server-url=${keycloak.url}/realms/quarkus
quarkus.oidc.my-bearer-tenant.client-id=quarkus-service-app
quarkus.oidc.my-bearer-tenant.credentials.secret=secret
quarkus.oidc.my-bearer-tenant.authentication.user-info-required=true
quarkus.oidc.my-bearer-tenant.tenant-paths=/bearer-and-code-flow/tenant-feature-bearer*
quarkus.oidc.request-my-code-tenant.auth-server-url=${keycloak.url}/realms/quarkus
quarkus.oidc.request-my-code-tenant.client-id=quarkus-web-app
quarkus.oidc.request-my-code-tenant.credentials.secret=secret
quarkus.oidc.request-my-code-tenant.application-type=web-app
quarkus.oidc.request-my-code-tenant.tenant-paths=/bearer-and-code-flow/request-tenant-feature-code-flow*
quarkus.oidc.request-my-bearer-tenant.auth-server-url=${keycloak.url}/realms/quarkus
quarkus.oidc.request-my-bearer-tenant.client-id=quarkus-service-app
quarkus.oidc.request-my-bearer-tenant.credentials.secret=secret
quarkus.oidc.request-my-bearer-tenant.authentication.user-info-required=true
quarkus.oidc.request-my-bearer-tenant.tenant-paths=/bearer-and-code-flow/request-tenant-feature-bearer*
quarkus.oidc.response-my-code-tenant.auth-server-url=${keycloak.url}/realms/quarkus
quarkus.oidc.response-my-code-tenant.client-id=quarkus-web-app
quarkus.oidc.response-my-code-tenant.credentials.secret=secret
quarkus.oidc.response-my-code-tenant.application-type=web-app
quarkus.oidc.response-my-code-tenant.tenant-paths=/bearer-and-code-flow/response-tenant-feature-code-flow*
quarkus.oidc.response-my-bearer-tenant.auth-server-url=${keycloak.url}/realms/quarkus
quarkus.oidc.response-my-bearer-tenant.client-id=quarkus-service-app
quarkus.oidc.response-my-bearer-tenant.credentials.secret=secret
quarkus.oidc.response-my-bearer-tenant.authentication.user-info-required=true
quarkus.oidc.response-my-bearer-tenant.tenant-paths=/bearer-and-code-flow/response-tenant-feature-bearer*
quarkus.log.category."org.htmlunit.javascript.host.css.CSSStyleSheet".level=FATAL
quarkus.log.category."org.htmlunit.css".level=FATAL
"""),
"application.properties"));
@Test
public void testTwoTenantsAndJointRequestAndResponseFilter() {
// | OidcRequestAndResponseFilterTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/OperatorPrecedenceTest.java | {
"start": 2655,
"end": 3257
} | class ____ {
boolean f(boolean a, boolean b, boolean c, boolean d, boolean e) {
boolean r = a || (b && c) && (d && e);
return r;
}
int f2(int a, int b, int c, int d) {
int e = a << (b + c) + d;
return e;
}
boolean f3(boolean a, boolean b, boolean c, boolean d, boolean e) {
boolean r = a || b && c;
return r;
}
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | apache__kafka | connect/json/src/main/java/org/apache/kafka/connect/json/JsonDeserializer.java | {
"start": 1569,
"end": 2965
} | class ____ implements Deserializer<JsonNode> {
private final ObjectMapper objectMapper = new ObjectMapper();
/**
* Default constructor needed by Kafka
*/
public JsonDeserializer() {
this(Set.of(), new JsonNodeFactory(true), true);
}
/**
* A constructor that additionally specifies some {@link DeserializationFeature}s
* for the deserializer
*
* @param deserializationFeatures the specified deserialization features
* @param jsonNodeFactory the json node factory to use.
*/
JsonDeserializer(
final Set<DeserializationFeature> deserializationFeatures,
final JsonNodeFactory jsonNodeFactory,
final boolean enableBlackbird
) {
objectMapper.enable(JsonReadFeature.ALLOW_LEADING_ZEROS_FOR_NUMBERS.mappedFeature());
deserializationFeatures.forEach(objectMapper::enable);
objectMapper.setNodeFactory(jsonNodeFactory);
if (enableBlackbird) {
objectMapper.registerModule(new BlackbirdModule());
}
}
@Override
public JsonNode deserialize(String topic, byte[] bytes) {
if (bytes == null)
return null;
JsonNode data;
try {
data = objectMapper.readTree(bytes);
} catch (Exception e) {
throw new SerializationException(e);
}
return data;
}
}
| JsonDeserializer |
java | spring-projects__spring-boot | module/spring-boot-data-elasticsearch-test/src/main/java/org/springframework/boot/data/elasticsearch/test/autoconfigure/DataElasticsearchTestContextBootstrapper.java | {
"start": 990,
"end": 1106
} | class ____ extends TestSliceTestContextBootstrapper<DataElasticsearchTest> {
}
| DataElasticsearchTestContextBootstrapper |
java | quarkusio__quarkus | extensions/panache/hibernate-orm-panache/runtime/src/main/java/io/quarkus/hibernate/orm/panache/PanacheEntityBase.java | {
"start": 1565,
"end": 30790
} | class ____ extra operations (eg. CriteriaQueries)
*
* @return the {@link Session} for this entity class
*/
@GenerateBridge
public static Session getSession() {
throw implementationInjectionMissing();
}
/**
* Persist this entity in the database, if not already persisted. This will set your ID field if it is not already set.
*
* @see #isPersistent()
* @see #persist(Iterable)
* @see #persist(Stream)
* @see #persist(Object, Object...)
*/
public void persist() {
JpaOperations.INSTANCE.persist(this);
}
/**
* Persist this entity in the database, if not already persisted. This will set your ID field if it is not already set.
* Then flushes all pending changes to the database.
*
* @see #isPersistent()
* @see #persist(Iterable)
* @see #persist(Stream)
* @see #persist(Object, Object...)
*/
public void persistAndFlush() {
JpaOperations.INSTANCE.persist(this);
JpaOperations.INSTANCE.flush(this);
}
/**
* Delete this entity from the database, if it is already persisted.
*
* @see #isPersistent()
* @see #delete(String, Object...)
* @see #delete(String, Map)
* @see #delete(String, Parameters)
* @see #deleteAll()
*/
public void delete() {
JpaOperations.INSTANCE.delete(this);
}
/**
* Returns true if this entity is persistent in the database. If yes, all modifications to
* its persistent fields will be automatically committed to the database at transaction
* commit time.
*
* @return true if this entity is persistent in the database.
*/
@JsonbTransient
// @JsonIgnore is here to avoid serialization of this property with jackson
@JsonIgnore
public boolean isPersistent() {
return JpaOperations.INSTANCE.isPersistent(this);
}
/**
* Flushes all pending changes to the database.
*/
@GenerateBridge
public static void flush() {
throw implementationInjectionMissing();
}
// Queries
/**
* Find an entity of this type by ID.
*
* @param id the ID of the entity to find.
* @return the entity found, or <code>null</code> if not found.
*/
@GenerateBridge(targetReturnTypeErased = true)
public static <T extends PanacheEntityBase> T findById(Object id) {
throw implementationInjectionMissing();
}
/**
* Find an entity of this type by ID and lock it.
*
* @param id the ID of the entity to find.
* @param lockModeType the locking strategy to be used when retrieving the entity.
* @return the entity found, or <code>null</code> if not found.
*/
@GenerateBridge(targetReturnTypeErased = true)
public static <T extends PanacheEntityBase> T findById(Object id, LockModeType lockModeType) {
throw implementationInjectionMissing();
}
/**
* Find an entity of this type by ID.
*
* @param id the ID of the entity to find.
* @return if found, an optional containing the entity, else <code>Optional.empty()</code>.
*/
@GenerateBridge
public static <T extends PanacheEntityBase> Optional<T> findByIdOptional(Object id) {
throw implementationInjectionMissing();
}
/**
* Find an entity of this type by ID.
*
* @param id the ID of the entity to find.
* @param lockModeType the locking strategy to be used when retrieving the entity.
* @return if found, an optional containing the entity, else <code>Optional.empty()</code>.
*/
@GenerateBridge
public static <T extends PanacheEntityBase> Optional<T> findByIdOptional(Object id, LockModeType lockModeType) {
throw implementationInjectionMissing();
}
/**
* Find entities of this type by their IDs.
*
* @param ids the IDs of the entities to find.
* @return a list containing the entities found, with null elements representing missing entities, with the list ordered by
* the positions of their ids in the given list of identifiers.
*/
@GenerateBridge
public static <T extends PanacheEntityBase> List<T> findByIds(List<?> ids) {
throw implementationInjectionMissing();
}
/**
* Find entities using a query, with optional indexed parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params optional sequence of indexed parameters
* @return a new {@link PanacheQuery} instance for the given query
* @see #find(String, Sort, Object...)
* @see #find(String, Map)
* @see #find(String, Parameters)
* @see #list(String, Object...)
* @see #stream(String, Object...)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> PanacheQuery<T> find(String query, Object... params) {
throw implementationInjectionMissing();
}
/**
* Find entities using a query and the given sort options, with optional indexed parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param sort the sort strategy to use
* @param params optional sequence of indexed parameters
* @return a new {@link PanacheQuery} instance for the given query
* @see #find(String, Object...)
* @see #find(String, Sort, Map)
* @see #find(String, Sort, Parameters)
* @see #list(String, Sort, Object...)
* @see #stream(String, Sort, Object...)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> PanacheQuery<T> find(String query, Sort sort, Object... params) {
throw implementationInjectionMissing();
}
/**
* Find entities using a query, with named parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Map} of named parameters
* @return a new {@link PanacheQuery} instance for the given query
* @see #find(String, Sort, Map)
* @see #find(String, Object...)
* @see #find(String, Parameters)
* @see #list(String, Map)
* @see #stream(String, Map)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> PanacheQuery<T> find(String query, Map<String, Object> params) {
throw implementationInjectionMissing();
}
/**
* Find entities using a query and the given sort options, with named parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param sort the sort strategy to use
* @param params {@link Map} of indexed parameters
* @return a new {@link PanacheQuery} instance for the given query
* @see #find(String, Map)
* @see #find(String, Sort, Object...)
* @see #find(String, Sort, Parameters)
* @see #list(String, Sort, Map)
* @see #stream(String, Sort, Map)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> PanacheQuery<T> find(String query, Sort sort, Map<String, Object> params) {
throw implementationInjectionMissing();
}
/**
* Find entities using a query, with named parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Parameters} of named parameters
* @return a new {@link PanacheQuery} instance for the given query
* @see #find(String, Sort, Parameters)
* @see #find(String, Map)
* @see #find(String, Parameters)
* @see #list(String, Parameters)
* @see #stream(String, Parameters)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> PanacheQuery<T> find(String query, Parameters params) {
throw implementationInjectionMissing();
}
/**
* Find entities using a query and the given sort options, with named parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param sort the sort strategy to use
* @param params {@link Parameters} of indexed parameters
* @return a new {@link PanacheQuery} instance for the given query
* @see #find(String, Parameters)
* @see #find(String, Sort, Map)
* @see #find(String, Sort, Parameters)
* @see #list(String, Sort, Parameters)
* @see #stream(String, Sort, Parameters)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> PanacheQuery<T> find(String query, Sort sort, Parameters params) {
throw implementationInjectionMissing();
}
/**
* Find all entities of this type.
*
* @return a new {@link PanacheQuery} instance to find all entities of this type.
* @see #findAll(Sort)
* @see #listAll()
* @see #streamAll()
*/
@GenerateBridge
public static <T extends PanacheEntityBase> PanacheQuery<T> findAll() {
throw implementationInjectionMissing();
}
/**
* Find all entities of this type, in the given order.
*
* @param sort the sort order to use
* @return a new {@link PanacheQuery} instance to find all entities of this type.
* @see #findAll()
* @see #listAll(Sort)
* @see #streamAll(Sort)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> PanacheQuery<T> findAll(Sort sort) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query, with optional indexed parameters.
* This method is a shortcut for <code>find(query, params).list()</code>.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params optional sequence of indexed parameters
* @return a {@link List} containing all results, without paging
* @see #list(String, Sort, Object...)
* @see #list(String, Map)
* @see #list(String, Parameters)
* @see #find(String, Object...)
* @see #stream(String, Object...)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> List<T> list(String query, Object... params) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query and the given sort options, with optional indexed parameters.
* This method is a shortcut for <code>find(query, sort, params).list()</code>.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param sort the sort strategy to use
* @param params optional sequence of indexed parameters
* @return a {@link List} containing all results, without paging
* @see #list(String, Object...)
* @see #list(String, Sort, Map)
* @see #list(String, Sort, Parameters)
* @see #find(String, Sort, Object...)
* @see #stream(String, Sort, Object...)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> List<T> list(String query, Sort sort, Object... params) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query, with named parameters.
* This method is a shortcut for <code>find(query, params).list()</code>.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Map} of named parameters
* @return a {@link List} containing all results, without paging
* @see #list(String, Sort, Map)
* @see #list(String, Object...)
* @see #list(String, Parameters)
* @see #find(String, Map)
* @see #stream(String, Map)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> List<T> list(String query, Map<String, Object> params) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query and the given sort options, with named parameters.
* This method is a shortcut for <code>find(query, sort, params).list()</code>.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param sort the sort strategy to use
* @param params {@link Map} of indexed parameters
* @return a {@link List} containing all results, without paging
* @see #list(String, Map)
* @see #list(String, Sort, Object...)
* @see #list(String, Sort, Parameters)
* @see #find(String, Sort, Map)
* @see #stream(String, Sort, Map)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> List<T> list(String query, Sort sort, Map<String, Object> params) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query, with named parameters.
* This method is a shortcut for <code>find(query, params).list()</code>.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Parameters} of named parameters
* @return a {@link List} containing all results, without paging
* @see #list(String, Sort, Parameters)
* @see #list(String, Object...)
* @see #list(String, Map)
* @see #find(String, Parameters)
* @see #stream(String, Parameters)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> List<T> list(String query, Parameters params) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query and the given sort options, with named parameters.
* This method is a shortcut for <code>find(query, sort, params).list()</code>.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param sort the sort strategy to use
* @param params {@link Parameters} of indexed parameters
* @return a {@link List} containing all results, without paging
* @see #list(String, Parameters)
* @see #list(String, Sort, Object...)
* @see #list(String, Sort, Map)
* @see #find(String, Sort, Parameters)
* @see #stream(String, Sort, Parameters)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> List<T> list(String query, Sort sort, Parameters params) {
throw implementationInjectionMissing();
}
/**
* Find all entities of this type.
* This method is a shortcut for <code>findAll().list()</code>.
*
* @return a {@link List} containing all results, without paging
* @see #listAll(Sort)
* @see #findAll()
* @see #streamAll()
*/
@GenerateBridge
public static <T extends PanacheEntityBase> List<T> listAll() {
throw implementationInjectionMissing();
}
/**
* Find all entities of this type, in the given order.
* This method is a shortcut for <code>findAll(sort).list()</code>.
*
* @param sort the sort order to use
* @return a {@link List} containing all results, without paging
* @see #listAll()
* @see #findAll(Sort)
* @see #streamAll(Sort)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> List<T> listAll(Sort sort) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query, with optional indexed parameters.
* This method is a shortcut for <code>find(query, params).stream()</code>.
* It requires a transaction to work.
* Without a transaction, the underlying cursor can be closed before the end of the stream.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params optional sequence of indexed parameters
* @return a {@link Stream} containing all results, without paging
* @see #stream(String, Sort, Object...)
* @see #stream(String, Map)
* @see #stream(String, Parameters)
* @see #find(String, Object...)
* @see #list(String, Object...)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> Stream<T> stream(String query, Object... params) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query and the given sort options, with optional indexed parameters.
* This method is a shortcut for <code>find(query, sort, params).stream()</code>.
* It requires a transaction to work.
* Without a transaction, the underlying cursor can be closed before the end of the stream.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param sort the sort strategy to use
* @param params optional sequence of indexed parameters
* @return a {@link Stream} containing all results, without paging
* @see #stream(String, Object...)
* @see #stream(String, Sort, Map)
* @see #stream(String, Sort, Parameters)
* @see #find(String, Sort, Object...)
* @see #list(String, Sort, Object...)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> Stream<T> stream(String query, Sort sort, Object... params) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query, with named parameters.
* This method is a shortcut for <code>find(query, params).stream()</code>.
* It requires a transaction to work.
* Without a transaction, the underlying cursor can be closed before the end of the stream.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Map} of named parameters
* @return a {@link Stream} containing all results, without paging
* @see #stream(String, Sort, Map)
* @see #stream(String, Object...)
* @see #stream(String, Parameters)
* @see #find(String, Map)
* @see #list(String, Map)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> Stream<T> stream(String query, Map<String, Object> params) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query and the given sort options, with named parameters.
* This method is a shortcut for <code>find(query, sort, params).stream()</code>.
* It requires a transaction to work.
* Without a transaction, the underlying cursor can be closed before the end of the stream.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param sort the sort strategy to use
* @param params {@link Map} of indexed parameters
* @return a {@link Stream} containing all results, without paging
* @see #stream(String, Map)
* @see #stream(String, Sort, Object...)
* @see #stream(String, Sort, Parameters)
* @see #find(String, Sort, Map)
* @see #list(String, Sort, Map)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> Stream<T> stream(String query, Sort sort, Map<String, Object> params) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query, with named parameters.
* This method is a shortcut for <code>find(query, params).stream()</code>.
* It requires a transaction to work.
* Without a transaction, the underlying cursor can be closed before the end of the stream.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Parameters} of named parameters
* @return a {@link Stream} containing all results, without paging
* @see #stream(String, Sort, Parameters)
* @see #stream(String, Object...)
* @see #stream(String, Map)
* @see #find(String, Parameters)
* @see #list(String, Parameters)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> Stream<T> stream(String query, Parameters params) {
throw implementationInjectionMissing();
}
/**
* Find entities matching a query and the given sort options, with named parameters.
* This method is a shortcut for <code>find(query, sort, params).stream()</code>.
* It requires a transaction to work.
* Without a transaction, the underlying cursor can be closed before the end of the stream.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param sort the sort strategy to use
* @param params {@link Parameters} of indexed parameters
* @return a {@link Stream} containing all results, without paging
* @see #stream(String, Parameters)
* @see #stream(String, Sort, Object...)
* @see #stream(String, Sort, Map)
* @see #find(String, Sort, Parameters)
* @see #list(String, Sort, Parameters)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> Stream<T> stream(String query, Sort sort, Parameters params) {
throw implementationInjectionMissing();
}
/**
* Find all entities of this type.
* This method is a shortcut for <code>findAll().stream()</code>.
* It requires a transaction to work.
* Without a transaction, the underlying cursor can be closed before the end of the stream.
*
* @return a {@link Stream} containing all results, without paging
* @see #streamAll(Sort)
* @see #findAll()
* @see #listAll()
*/
@GenerateBridge
public static <T extends PanacheEntityBase> Stream<T> streamAll() {
throw implementationInjectionMissing();
}
/**
* Find all entities of this type, in the given order.
* This method is a shortcut for <code>findAll(sort).stream()</code>.
* It requires a transaction to work.
* Without a transaction, the underlying cursor can be closed before the end of the stream.
*
* @param sort the sort order to use
* @return a {@link Stream} containing all results, without paging
* @see #streamAll()
* @see #findAll(Sort)
* @see #listAll(Sort)
*/
@GenerateBridge
public static <T extends PanacheEntityBase> Stream<T> streamAll(Sort sort) {
throw implementationInjectionMissing();
}
/**
* Counts the number of this type of entity in the database.
*
* @return the number of this type of entity in the database.
* @see #count(String, Object...)
* @see #count(String, Map)
* @see #count(String, Parameters)
*/
@GenerateBridge
public static long count() {
throw implementationInjectionMissing();
}
/**
* Counts the number of this type of entity matching the given query, with optional indexed parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params optional sequence of indexed parameters
* @return the number of entities counted.
* @see #count()
* @see #count(String, Map)
* @see #count(String, Parameters)
*/
@GenerateBridge
public static long count(String query, Object... params) {
throw implementationInjectionMissing();
}
/**
* Counts the number of this type of entity matching the given query, with named parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Map} of named parameters
* @return the number of entities counted.
* @see #count()
* @see #count(String, Object...)
* @see #count(String, Parameters)
*/
@GenerateBridge
public static long count(String query, Map<String, Object> params) {
throw implementationInjectionMissing();
}
/**
* Counts the number of this type of entity matching the given query, with named parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Parameters} of named parameters
* @return the number of entities counted.
* @see #count()
* @see #count(String, Object...)
* @see #count(String, Map)
*/
@GenerateBridge
public static long count(String query, Parameters params) {
throw implementationInjectionMissing();
}
/**
* Delete all entities of this type from the database.
*
* WARNING: the default implementation of this method uses a bulk delete query and ignores
* cascading rules from the JPA model.
*
* @return the number of entities deleted.
* @see #delete(String, Object...)
* @see #delete(String, Map)
* @see #delete(String, Parameters)
*/
@GenerateBridge
public static long deleteAll() {
throw implementationInjectionMissing();
}
/**
* Delete an entity of this type by ID.
*
* @param id the ID of the entity to delete.
* @return false if the entity was not deleted (not found).
*/
@GenerateBridge
public static boolean deleteById(Object id) {
throw implementationInjectionMissing();
}
/**
* Delete all entities of this type matching the given query, with optional indexed parameters.
*
* WARNING: the default implementation of this method uses a bulk delete query and ignores
* cascading rules from the JPA model.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params optional sequence of indexed parameters
* @return the number of entities deleted.
* @see #deleteAll()
* @see #delete(String, Map)
* @see #delete(String, Parameters)
*/
@GenerateBridge
public static long delete(String query, Object... params) {
throw implementationInjectionMissing();
}
/**
* Delete all entities of this type matching the given query, with named parameters.
*
* WARNING: the default implementation of this method uses a bulk delete query and ignores
* cascading rules from the JPA model.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Map} of named parameters
* @return the number of entities deleted.
* @see #deleteAll()
* @see #delete(String, Object...)
* @see #delete(String, Parameters)
*/
@GenerateBridge
public static long delete(String query, Map<String, Object> params) {
throw implementationInjectionMissing();
}
/**
* Delete all entities of this type matching the given query, with named parameters.
*
* WARNING: the default implementation of this method uses a bulk delete query and ignores
* cascading rules from the JPA model.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Parameters} of named parameters
* @return the number of entities deleted.
* @see #deleteAll()
* @see #delete(String, Object...)
* @see #delete(String, Map)
*/
@GenerateBridge
public static long delete(String query, Parameters params) {
throw implementationInjectionMissing();
}
/**
* Persist all given entities.
*
* @param entities the entities to persist
* @see #persist()
* @see #persist(Stream)
* @see #persist(Object,Object...)
*/
@GenerateBridge(callSuperMethod = true)
public static void persist(Iterable<?> entities) {
JpaOperations.INSTANCE.persist(entities);
}
/**
* Persist all given entities.
*
* @param entities the entities to persist
* @see #persist()
* @see #persist(Iterable)
* @see #persist(Object,Object...)
*/
@GenerateBridge(callSuperMethod = true)
public static void persist(Stream<?> entities) {
JpaOperations.INSTANCE.persist(entities);
}
/**
* Persist all given entities.
*
* @param entities the entities to persist
* @see #persist()
* @see #persist(Stream)
* @see #persist(Iterable)
*/
@GenerateBridge(callSuperMethod = true)
public static void persist(Object firstEntity, Object... entities) {
JpaOperations.INSTANCE.persist(firstEntity, entities);
}
/**
* Update all entities of this type matching the given query, with optional indexed parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params optional sequence of indexed parameters
* @return the number of entities updated.
* @see #update(String, Map)
* @see #update(String, Parameters)
*/
@GenerateBridge
public static int update(String query, Object... params) {
throw implementationInjectionMissing();
}
/**
* Update all entities of this type matching the given query, with named parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Map} of named parameters
* @return the number of entities updated.
* @see #update(String, Object...)
* @see #update(String, Parameters)
*
*/
@GenerateBridge
public static int update(String query, Map<String, Object> params) {
throw implementationInjectionMissing();
}
/**
* Update all entities of this type matching the given query, with named parameters.
*
* @param query a {@link io.quarkus.hibernate.orm.panache query string}
* @param params {@link Parameters} of named parameters
* @return the number of entities updated.
* @see #update(String, Object...)
* @see #update(String, Map)
*/
@GenerateBridge
public static int update(String query, Parameters params) {
throw implementationInjectionMissing();
}
}
| for |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/ServiceLocatorFactoryBean.java | {
"start": 15514,
"end": 16149
} | interface ____");
Class<?>[] paramTypes = method.getParameterTypes();
Method interfaceMethod = serviceLocatorInterface.getMethod(method.getName(), paramTypes);
Class<?> serviceLocatorReturnType = interfaceMethod.getReturnType();
// Check whether the method is a valid service locator.
if (paramTypes.length > 1 || void.class == serviceLocatorReturnType) {
throw new UnsupportedOperationException(
"May only call methods with signature '<type> xxx()' or '<type> xxx(<idtype> id)' " +
"on factory interface, but tried to call: " + interfaceMethod);
}
return serviceLocatorReturnType;
}
}
}
| specified |
java | apache__maven | compat/maven-model/src/main/java/org/apache/maven/model/merge/ModelMerger.java | {
"start": 93732,
"end": 93999
} | class ____ implements KeyComputer<MailingList> {
@Override
public Object key(MailingList mailingList) {
return getMailingListKey(mailingList);
}
}
/**
* KeyComputer for Developer
*/
private | MailingListKeyComputer |
java | apache__spark | core/src/main/java/org/apache/spark/shuffle/sort/SpillInfo.java | {
"start": 1005,
"end": 1293
} | class ____ {
final long[] partitionLengths;
final File file;
final TempShuffleBlockId blockId;
SpillInfo(int numPartitions, File file, TempShuffleBlockId blockId) {
this.partitionLengths = new long[numPartitions];
this.file = file;
this.blockId = blockId;
}
}
| SpillInfo |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/event/monitor/internal/EmptyEventMonitor.java | {
"start": 775,
"end": 7454
} | class ____ implements EventMonitor {
@Override
public DiagnosticEvent beginSessionOpenEvent() {
return null;
}
@Override
public void completeSessionOpenEvent(DiagnosticEvent sessionOpenEvent, SharedSessionContractImplementor session) {
}
@Override
public DiagnosticEvent beginSessionClosedEvent() {
return null;
}
@Override
public void completeSessionClosedEvent(
DiagnosticEvent sessionClosedEvent,
SharedSessionContractImplementor session) {
}
@Override
public DiagnosticEvent beginJdbcConnectionAcquisitionEvent() {
return null;
}
@Override
public void completeJdbcConnectionAcquisitionEvent(
DiagnosticEvent jdbcConnectionAcquisitionEvent,
SharedSessionContractImplementor session,
Object tenantId) {
}
@Override
public DiagnosticEvent beginJdbcConnectionReleaseEvent() {
return null;
}
@Override
public void completeJdbcConnectionReleaseEvent(
DiagnosticEvent jdbcConnectionReleaseEvent,
SharedSessionContractImplementor session,
Object tenantId) {
}
@Override
public DiagnosticEvent beginJdbcPreparedStatementCreationEvent() {
return null;
}
@Override
public void completeJdbcPreparedStatementCreationEvent(
DiagnosticEvent jdbcPreparedStatementCreation,
String preparedStatementSql) {
}
@Override
public DiagnosticEvent beginJdbcPreparedStatementExecutionEvent() {
return null;
}
@Override
public void completeJdbcPreparedStatementExecutionEvent(
DiagnosticEvent jdbcPreparedStatementExecutionEvent,
String preparedStatementSql) {
}
@Override
public DiagnosticEvent beginJdbcBatchExecutionEvent() {
return null;
}
@Override
public void completeJdbcBatchExecutionEvent(DiagnosticEvent jdbcBatchExecutionEvent, String statementSql) {
}
@Override
public DiagnosticEvent beginCachePutEvent() {
return null;
}
@Override
public void completeCachePutEvent(
DiagnosticEvent cachePutEvent,
SharedSessionContractImplementor session,
Region region,
boolean cacheContentChanged,
CacheActionDescription description) {
}
@Override
public void completeCachePutEvent(
DiagnosticEvent cachePutEvent,
SharedSessionContractImplementor session,
CachedDomainDataAccess cachedDomainDataAccess,
EntityPersister persister,
boolean cacheContentChanged,
CacheActionDescription description) {
}
@Override
public void completeCachePutEvent(
DiagnosticEvent cachePutEvent,
SharedSessionContractImplementor session,
CachedDomainDataAccess cachedDomainDataAccess,
EntityPersister persister,
boolean cacheContentChanged,
boolean isNatualId,
CacheActionDescription description) {
}
@Override
public void completeCachePutEvent(
DiagnosticEvent cachePutEvent,
SharedSessionContractImplementor session,
CachedDomainDataAccess cachedDomainDataAccess,
CollectionPersister persister,
boolean cacheContentChanged,
CacheActionDescription description) {
}
@Override
public DiagnosticEvent beginCacheGetEvent() {
return null;
}
@Override
public void completeCacheGetEvent(
DiagnosticEvent cacheGetEvent,
SharedSessionContractImplementor session,
Region region,
boolean hit) {
}
@Override
public void completeCacheGetEvent(
DiagnosticEvent cacheGetEvent,
SharedSessionContractImplementor session,
Region region,
EntityPersister persister,
boolean isNaturalKey,
boolean hit) {
}
@Override
public void completeCacheGetEvent(
DiagnosticEvent cacheGetEvent,
SharedSessionContractImplementor session,
Region region,
CollectionPersister persister,
boolean hit) {
}
@Override
public DiagnosticEvent beginFlushEvent() {
return null;
}
@Override
public void completeFlushEvent(DiagnosticEvent flushEvent, FlushEvent event) {
}
@Override
public void completeFlushEvent(DiagnosticEvent flushEvent, FlushEvent event, boolean autoFlush) {
}
@Override
public DiagnosticEvent beginPartialFlushEvent() {
return null;
}
@Override
public void completePartialFlushEvent(DiagnosticEvent flushEvent, AutoFlushEvent event) {
}
@Override
public DiagnosticEvent beginDirtyCalculationEvent() {
return null;
}
@Override
public void completeDirtyCalculationEvent(
DiagnosticEvent dirtyCalculationEvent,
SharedSessionContractImplementor session,
EntityPersister persister,
EntityEntry entry,
int[] dirtyProperties) {
}
@Override
public DiagnosticEvent beginPrePartialFlush() {
return null;
}
@Override
public void completePrePartialFlush(
DiagnosticEvent prePartialFlush,
SharedSessionContractImplementor session) {
}
@Override
public DiagnosticEvent beginEntityInsertEvent() {
return null;
}
@Override
public void completeEntityInsertEvent(
DiagnosticEvent event,
Object id, String entityName,
boolean success,
SharedSessionContractImplementor session) {
}
@Override
public DiagnosticEvent beginEntityUpdateEvent() {
return null;
}
@Override
public void completeEntityUpdateEvent(
DiagnosticEvent event,
Object id, String entityName,
boolean success,
SharedSessionContractImplementor session) {
}
@Override
public DiagnosticEvent beginEntityUpsertEvent() {
return null;
}
@Override
public void completeEntityUpsertEvent(DiagnosticEvent event, Object id, String entityName, boolean success, SharedSessionContractImplementor session) {
}
@Override
public DiagnosticEvent beginEntityDeleteEvent() {
return null;
}
@Override
public void completeEntityDeleteEvent(
DiagnosticEvent event,
Object id, String entityName,
boolean success,
SharedSessionContractImplementor session) {
}
@Override
public DiagnosticEvent beginEntityLockEvent() {
return null;
}
@Override
public void completeEntityLockEvent(DiagnosticEvent event, Object id, String entityName, LockMode lockMode, boolean success, SharedSessionContractImplementor session) {
}
@Override
public DiagnosticEvent beginCollectionRecreateEvent() {
return null;
}
@Override
public void completeCollectionRecreateEvent(DiagnosticEvent event, Object id, String role, boolean success, SharedSessionContractImplementor session) {
}
@Override
public DiagnosticEvent beginCollectionUpdateEvent() {
return null;
}
@Override
public void completeCollectionUpdateEvent(DiagnosticEvent event, Object id, String role, boolean success, SharedSessionContractImplementor session) {
}
@Override
public DiagnosticEvent beginCollectionRemoveEvent() {
return null;
}
@Override
public void completeCollectionRemoveEvent(DiagnosticEvent event, Object id, String role, boolean success, SharedSessionContractImplementor session) {
}
}
| EmptyEventMonitor |
java | apache__kafka | metadata/src/main/java/org/apache/kafka/metadata/properties/MetaProperties.java | {
"start": 1018,
"end": 1139
} | class ____ contains the per-log-directory information stored in an individual
* meta.properties file.
*/
public final | which |
java | processing__processing4 | build/shared/tools/MovieMaker/src/processing/app/tools/FFmpegEngine.java | {
"start": 9540,
"end": 10211
} | class ____ implements Runnable {
final private InputStream inputStream;
final private Consumer<String> consumeInputLine;
public StreamGobbler(InputStream inputStream, Consumer<String> consumeInputLine) {
this.inputStream = inputStream;
this.consumeInputLine = consumeInputLine;
}
public void run() {
new BufferedReader(new InputStreamReader(inputStream)).lines().forEach(consumeInputLine);
}
}
static NumberFormat formatter = NumberFormat.getInstance();
static String nfc(int num) {
// if (formatter == null) {
// formatter = NumberFormat.getInstance();
// }
return formatter.format(num);
}
} | StreamGobbler |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/OptaPlannerEndpointBuilderFactory.java | {
"start": 21257,
"end": 22603
} | interface ____
extends
AdvancedOptaPlannerEndpointConsumerBuilder,
AdvancedOptaPlannerEndpointProducerBuilder {
default OptaPlannerEndpointBuilder basic() {
return (OptaPlannerEndpointBuilder) this;
}
/**
* SolverManager.
*
* The option is a:
* <code>org.optaplanner.core.api.solver.SolverManager</code> type.
*
* Group: advanced
*
* @param solverManager the value to set
* @return the dsl builder
*/
default AdvancedOptaPlannerEndpointBuilder solverManager(org.optaplanner.core.api.solver.SolverManager solverManager) {
doSetProperty("solverManager", solverManager);
return this;
}
/**
* SolverManager.
*
* The option will be converted to a
* <code>org.optaplanner.core.api.solver.SolverManager</code> type.
*
* Group: advanced
*
* @param solverManager the value to set
* @return the dsl builder
*/
default AdvancedOptaPlannerEndpointBuilder solverManager(String solverManager) {
doSetProperty("solverManager", solverManager);
return this;
}
}
public | AdvancedOptaPlannerEndpointBuilder |
java | apache__camel | components/camel-mail/src/test/java/org/apache/camel/component/mail/MailMultipleRecipientsTest.java | {
"start": 1293,
"end": 3981
} | class ____ extends CamelTestSupport {
private static final MailboxUser claus = Mailbox.getOrCreateUser("claus", "secret");
private static final MailboxUser willem = Mailbox.getOrCreateUser("willem", "secret");
private static final MailboxUser hadrian = Mailbox.getOrCreateUser("hadrian", "secret");
private static final MailboxUser tracy = Mailbox.getOrCreateUser("tracy", "secret");
@Test
public void testSendWithMultipleRecipientsInHeader() throws Exception {
Mailbox.clearAll();
// START SNIPPET: e1
Map<String, Object> headers = new HashMap<>();
// test with both comma and semi colon as Camel supports both kind of separators
headers.put("to", "claus@localhost, willem@localhost ; hadrian@localhost, \"Snell, Tracy\" <tracy@localhost>");
headers.put("cc", "james@localhost");
assertMailbox("claus");
assertMailbox("willem");
assertMailbox("hadrian");
assertMailbox("tracy");
template.sendBodyAndHeaders(claus.uriPrefix(Protocol.smtp), "Hello World", headers);
// END SNIPPET: e1
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSendWithMultipleRecipientsPreConfigured() throws Exception {
Mailbox.clearAll();
assertMailbox("claus");
assertMailbox("willem");
// START SNIPPET: e2
// here we have pre configured the to receivers to claus and willem. Notice we use comma to separate
// the two recipients. Camel also support using colon as separator char
template.sendBody(claus.uriPrefix(Protocol.smtp) + "&to=claus@localhost,willem@localhost&cc=james@localhost",
"Hello World");
// END SNIPPET: e2
MockEndpoint.assertIsSatisfied(context);
}
private void assertMailbox(String name) {
MockEndpoint mock = getMockEndpoint("mock:" + name);
mock.expectedMessageCount(1);
mock.expectedBodiesReceived("Hello World\r\n");
mock.expectedHeaderReceived("cc", "james@localhost");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(claus.uriPrefix(Protocol.pop3) + "&initialDelay=100&delay=100").to("mock:claus");
from(willem.uriPrefix(Protocol.pop3) + "&initialDelay=100&delay=100").to("mock:willem");
from(hadrian.uriPrefix(Protocol.pop3) + "&initialDelay=100&delay=100").to("mock:hadrian");
from(tracy.uriPrefix(Protocol.pop3) + "&initialDelay=100&delay=100").to("mock:tracy");
}
};
}
}
| MailMultipleRecipientsTest |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/sort/BinaryKVExternalMerger.java | {
"start": 1592,
"end": 4187
} | class ____
extends AbstractBinaryExternalMerger<Tuple2<BinaryRowData, BinaryRowData>> {
private final BinaryRowDataSerializer keySerializer;
private final BinaryRowDataSerializer valueSerializer;
private final RecordComparator comparator;
public BinaryKVExternalMerger(
IOManager ioManager,
int pageSize,
int maxFanIn,
SpillChannelManager channelManager,
BinaryRowDataSerializer keySerializer,
BinaryRowDataSerializer valueSerializer,
RecordComparator comparator,
boolean compressionEnabled,
BlockCompressionFactory compressionCodecFactory,
int compressionBlockSize) {
super(
ioManager,
pageSize,
maxFanIn,
channelManager,
compressionEnabled,
compressionCodecFactory,
compressionBlockSize);
this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer;
this.comparator = comparator;
}
@Override
protected List<Tuple2<BinaryRowData, BinaryRowData>> mergeReusedEntries(int size) {
ArrayList<Tuple2<BinaryRowData, BinaryRowData>> reused = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
reused.add(
new Tuple2<>(keySerializer.createInstance(), valueSerializer.createInstance()));
}
return reused;
}
@Override
protected MutableObjectIterator<Tuple2<BinaryRowData, BinaryRowData>>
channelReaderInputViewIterator(AbstractChannelReaderInputView inView) {
return new ChannelReaderKVInputViewIterator<>(
inView, null, keySerializer.duplicate(), valueSerializer.duplicate());
}
@Override
protected Comparator<Tuple2<BinaryRowData, BinaryRowData>> mergeComparator() {
return (o1, o2) -> comparator.compare(o1.f0, o2.f0);
}
@Override
protected void writeMergingOutput(
MutableObjectIterator<Tuple2<BinaryRowData, BinaryRowData>> mergeIterator,
AbstractPagedOutputView output)
throws IOException {
// read the merged stream and write the data back
Tuple2<BinaryRowData, BinaryRowData> kv =
new Tuple2<>(keySerializer.createInstance(), valueSerializer.createInstance());
while ((kv = mergeIterator.next(kv)) != null) {
keySerializer.serialize(kv.f0, output);
valueSerializer.serialize(kv.f1, output);
}
}
}
| BinaryKVExternalMerger |
java | apache__flink | flink-core-api/src/main/java/org/apache/flink/api/java/tuple/Tuple6.java | {
"start": 2340,
"end": 8615
} | class ____<T0, T1, T2, T3, T4, T5> extends Tuple {
private static final long serialVersionUID = 1L;
/** Field 0 of the tuple. */
public T0 f0;
/** Field 1 of the tuple. */
public T1 f1;
/** Field 2 of the tuple. */
public T2 f2;
/** Field 3 of the tuple. */
public T3 f3;
/** Field 4 of the tuple. */
public T4 f4;
/** Field 5 of the tuple. */
public T5 f5;
/** Creates a new tuple where all fields are null. */
public Tuple6() {}
/**
* Creates a new tuple and assigns the given values to the tuple's fields.
*
* @param f0 The value for field 0
* @param f1 The value for field 1
* @param f2 The value for field 2
* @param f3 The value for field 3
* @param f4 The value for field 4
* @param f5 The value for field 5
*/
public Tuple6(T0 f0, T1 f1, T2 f2, T3 f3, T4 f4, T5 f5) {
this.f0 = f0;
this.f1 = f1;
this.f2 = f2;
this.f3 = f3;
this.f4 = f4;
this.f5 = f5;
}
@Override
public int getArity() {
return 6;
}
@Override
@SuppressWarnings("unchecked")
public <T> T getField(int pos) {
switch (pos) {
case 0:
return (T) this.f0;
case 1:
return (T) this.f1;
case 2:
return (T) this.f2;
case 3:
return (T) this.f3;
case 4:
return (T) this.f4;
case 5:
return (T) this.f5;
default:
throw new IndexOutOfBoundsException(String.valueOf(pos));
}
}
@Override
@SuppressWarnings("unchecked")
public <T> void setField(T value, int pos) {
switch (pos) {
case 0:
this.f0 = (T0) value;
break;
case 1:
this.f1 = (T1) value;
break;
case 2:
this.f2 = (T2) value;
break;
case 3:
this.f3 = (T3) value;
break;
case 4:
this.f4 = (T4) value;
break;
case 5:
this.f5 = (T5) value;
break;
default:
throw new IndexOutOfBoundsException(String.valueOf(pos));
}
}
/**
* Sets new values to all fields of the tuple.
*
* @param f0 The value for field 0
* @param f1 The value for field 1
* @param f2 The value for field 2
* @param f3 The value for field 3
* @param f4 The value for field 4
* @param f5 The value for field 5
*/
public void setFields(T0 f0, T1 f1, T2 f2, T3 f3, T4 f4, T5 f5) {
this.f0 = f0;
this.f1 = f1;
this.f2 = f2;
this.f3 = f3;
this.f4 = f4;
this.f5 = f5;
}
// -------------------------------------------------------------------------------------------------
// standard utilities
// -------------------------------------------------------------------------------------------------
/**
* Creates a string representation of the tuple in the form (f0, f1, f2, f3, f4, f5), where the
* individual fields are the value returned by calling {@link Object#toString} on that field.
*
* @return The string representation of the tuple.
*/
@Override
public String toString() {
return "("
+ arrayAwareToString(this.f0)
+ ","
+ arrayAwareToString(this.f1)
+ ","
+ arrayAwareToString(this.f2)
+ ","
+ arrayAwareToString(this.f3)
+ ","
+ arrayAwareToString(this.f4)
+ ","
+ arrayAwareToString(this.f5)
+ ")";
}
/**
* Deep equality for tuples by calling equals() on the tuple members.
*
* @param o the object checked for equality
* @return true if this is equal to o.
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Tuple6)) {
return false;
}
@SuppressWarnings("rawtypes")
Tuple6 tuple = (Tuple6) o;
if (f0 != null ? !f0.equals(tuple.f0) : tuple.f0 != null) {
return false;
}
if (f1 != null ? !f1.equals(tuple.f1) : tuple.f1 != null) {
return false;
}
if (f2 != null ? !f2.equals(tuple.f2) : tuple.f2 != null) {
return false;
}
if (f3 != null ? !f3.equals(tuple.f3) : tuple.f3 != null) {
return false;
}
if (f4 != null ? !f4.equals(tuple.f4) : tuple.f4 != null) {
return false;
}
if (f5 != null ? !f5.equals(tuple.f5) : tuple.f5 != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = f0 != null ? f0.hashCode() : 0;
result = 31 * result + (f1 != null ? f1.hashCode() : 0);
result = 31 * result + (f2 != null ? f2.hashCode() : 0);
result = 31 * result + (f3 != null ? f3.hashCode() : 0);
result = 31 * result + (f4 != null ? f4.hashCode() : 0);
result = 31 * result + (f5 != null ? f5.hashCode() : 0);
return result;
}
/**
* Shallow tuple copy.
*
* @return A new Tuple with the same fields as this.
*/
@Override
@SuppressWarnings("unchecked")
public Tuple6<T0, T1, T2, T3, T4, T5> copy() {
return new Tuple6<>(this.f0, this.f1, this.f2, this.f3, this.f4, this.f5);
}
/**
* Creates a new tuple and assigns the given values to the tuple's fields. This is more
* convenient than using the constructor, because the compiler can infer the generic type
* arguments implicitly. For example: {@code Tuple3.of(n, x, s)} instead of {@code new
* Tuple3<Integer, Double, String>(n, x, s)}
*/
public static <T0, T1, T2, T3, T4, T5> Tuple6<T0, T1, T2, T3, T4, T5> of(
T0 f0, T1 f1, T2 f2, T3 f3, T4 f4, T5 f5) {
return new Tuple6<>(f0, f1, f2, f3, f4, f5);
}
}
| Tuple6 |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/BuiltInSqlOperator.java | {
"start": 1655,
"end": 1708
} | interface ____ extend from it.
*/
@Internal
public | would |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/AnnotationBeanNameGenerator.java | {
"start": 10876,
"end": 11499
} | class ____ set");
String shortClassName = ClassUtils.getShortName(beanClassName);
return StringUtils.uncapitalizeAsProperty(shortClassName);
}
/**
* Determine if the supplied annotation type declares a {@code value()} attribute
* with an explicit alias configured via {@link AliasFor @AliasFor}.
* @since 6.2.3
*/
private static boolean hasExplicitlyAliasedValueAttribute(Class<? extends Annotation> annotationType) {
Method valueAttribute = ReflectionUtils.findMethod(annotationType, MergedAnnotation.VALUE);
return (valueAttribute != null && valueAttribute.isAnnotationPresent(AliasFor.class));
}
}
| name |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java | {
"start": 4329,
"end": 38099
} | class ____ extends ESTestCase {
private TrainedModelProvider trainedModelProvider;
private ThreadPool threadPool;
private ClusterService clusterService;
private InferenceAuditor auditor;
private TrainedModelStatsService trainedModelStatsService;
private CircuitBreaker circuitBreaker;
@Before
public void setUpComponents() {
threadPool = new TestThreadPool(
"ModelLoadingServiceTests",
new ScalingExecutorBuilder(
UTILITY_THREAD_POOL_NAME,
1,
4,
TimeValue.timeValueMinutes(10),
false,
"xpack.ml.utility_thread_pool"
)
);
trainedModelProvider = mock(TrainedModelProvider.class);
clusterService = mock(ClusterService.class);
auditor = mock(InferenceAuditor.class);
trainedModelStatsService = mock(TrainedModelStatsService.class);
doAnswer(a -> null).when(auditor).error(any(String.class), any(String.class));
doAnswer(a -> null).when(auditor).info(any(String.class), any(String.class));
doAnswer(a -> null).when(auditor).warning(any(String.class), any(String.class));
doAnswer((invocationOnMock) -> null).when(clusterService).addListener(any(ClusterStateListener.class));
when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name")).build());
circuitBreaker = new CustomCircuitBreaker(1000);
}
@After
public void terminateThreadPool() {
terminate(threadPool);
}
public void testGetCachedModels() throws Exception {
String model1 = "test-load-model-1";
String model2 = "test-load-model-2";
String model3 = "test-load-model-3";
withTrainedModel(model1, 1L);
withTrainedModel(model2, 1L);
withTrainedModel(model3, 1L);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
modelLoadingService.clusterChanged(ingestChangedEvent(model1, model2, model3));
String[] modelIds = new String[] { model1, model2, model3 };
for (int i = 0; i < 10; i++) {
String model = modelIds[i % 3];
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model, null, future);
assertThat(future.get(), is(not(nullValue())));
}
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(model1), eq(false), any());
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(model2), eq(false), any());
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(model3), eq(false), any());
assertTrue(modelLoadingService.isModelCached(model1));
assertTrue(modelLoadingService.isModelCached(model2));
assertTrue(modelLoadingService.isModelCached(model3));
// Test invalidate cache for model3
modelLoadingService.clusterChanged(ingestChangedEvent(model1, model2));
for (int i = 0; i < 10; i++) {
String model = modelIds[i % 3];
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model, null, future);
assertThat(future.get(), is(not(nullValue())));
}
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(model1), eq(false), any());
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(model2), eq(false), any());
// It is not referenced, so called eagerly
verify(trainedModelProvider, times(4)).getTrainedModelForInference(eq(model3), eq(false), any());
}
public void testMaxCachedLimitReached() throws Exception {
String model1 = "test-cached-limit-load-model-1";
String model2 = "test-cached-limit-load-model-2";
String model3 = "test-cached-limit-load-model-3";
String[] modelIds = new String[] { model1, model2, model3 };
withTrainedModel(model1, 10L);
withTrainedModel(model2, 6L);
withTrainedModel(model3, 15L);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.builder().put(ModelLoadingService.INFERENCE_MODEL_CACHE_SIZE.getKey(), ByteSizeValue.ofBytes(20L)).build(),
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
// We want to be notified when the models are loaded which happens in a background thread
ModelLoadedTracker loadedTracker = new ModelLoadedTracker(Arrays.asList(modelIds));
for (String modelId : modelIds) {
modelLoadingService.addModelLoadedListener(modelId, loadedTracker.actionListener());
}
modelLoadingService.clusterChanged(ingestChangedEvent(model1, model2, model3));
// Should have been loaded from the cluster change event but it is unknown in what order
// the loading occurred or which models are currently in the cache due to evictions.
// Verify that we have at least loaded all three
assertBusy(() -> {
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(model1), eq(false), any());
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(model2), eq(false), any());
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(model3), eq(false), any());
});
// all models loaded put in the cache
assertBusy(() -> assertTrue(loadedTracker.allModelsLoaded()), 2, TimeUnit.SECONDS);
for (int i = 0; i < 10; i++) {
// Only reference models 1 and 2, so that cache is only invalidated once for model3 (after initial load)
String model = modelIds[i % 2];
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model, null, future);
assertThat(future.get(), is(not(nullValue())));
}
// Depending on the order the models were first loaded in the first step
// models 1 & 2 may have been evicted by model 3 in which case they have
// been loaded at most twice
verify(trainedModelProvider, atMost(2)).getTrainedModelForInference(eq(model1), eq(false), any());
verify(trainedModelProvider, atMost(2)).getTrainedModelForInference(eq(model2), eq(false), any());
// Only loaded requested once on the initial load from the change event
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(model3), eq(false), any());
// model 3 has been loaded and evicted exactly once
verify(trainedModelStatsService, times(1)).queueStats(argThat(o -> o.getModelId().equals(model3)), anyBoolean());
// Load model 3, should invalidate 1 and 2
for (int i = 0; i < 10; i++) {
PlainActionFuture<LocalModel> future3 = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model3, null, future3);
assertThat(future3.get(), is(not(nullValue())));
}
verify(trainedModelProvider, times(2)).getTrainedModelForInference(eq(model3), eq(false), any());
verify(trainedModelStatsService, atMost(2)).queueStats(argThat(o -> o.getModelId().equals(model1)), anyBoolean());
verify(trainedModelStatsService, atMost(2)).queueStats(argThat(o -> o.getModelId().equals(model2)), anyBoolean());
// Load model 1, should invalidate 3
for (int i = 0; i < 10; i++) {
PlainActionFuture<LocalModel> future1 = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model1, null, future1);
assertThat(future1.get(), is(not(nullValue())));
}
verify(trainedModelProvider, atMost(3)).getTrainedModelForInference(eq(model1), eq(false), any());
verify(trainedModelStatsService, times(2)).queueStats(argThat(o -> o.getModelId().equals(model3)), anyBoolean());
// Load model 2
for (int i = 0; i < 10; i++) {
PlainActionFuture<LocalModel> future2 = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model2, null, future2);
assertThat(future2.get(), is(not(nullValue())));
}
verify(trainedModelProvider, atMost(3)).getTrainedModelForInference(eq(model2), eq(false), any());
// Test invalidate cache for model3
// Now both model 1 and 2 should fit in cache without issues
modelLoadingService.clusterChanged(ingestChangedEvent(model1, model2));
for (int i = 0; i < 10; i++) {
String model = modelIds[i % 3];
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model, null, future);
assertThat(future.get(), is(not(nullValue())));
}
verify(trainedModelProvider, atMost(3)).getTrainedModelForInference(eq(model1), eq(false), any());
verify(trainedModelProvider, atMost(3)).getTrainedModelForInference(eq(model2), eq(false), any());
verify(trainedModelProvider, times(5)).getTrainedModelForInference(eq(model3), eq(false), any());
}
public void testWhenCacheEnabledButNotIngestNode() throws Exception {
String model1 = "test-uncached-not-ingest-model-1";
withTrainedModel(model1, 1L);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
modelLoadingService.clusterChanged(ingestChangedEvent(false, model1));
for (int i = 0; i < 10; i++) {
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model1, null, future);
assertThat(future.get(), is(not(nullValue())));
}
assertFalse(modelLoadingService.isModelCached(model1));
verify(trainedModelProvider, times(10)).getTrainedModelForInference(eq(model1), eq(false), any());
verify(trainedModelStatsService, never()).queueStats(any(InferenceStats.class), anyBoolean());
}
public void testGetCachedMissingModel() throws Exception {
String model = "test-load-cached-missing-model";
withMissingModel(model);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
modelLoadingService.clusterChanged(ingestChangedEvent(model));
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model, null, future);
try {
future.get();
fail("Should not have succeeded in loaded model");
} catch (Exception ex) {
assertThat(ex.getCause().getMessage(), equalTo(Messages.getMessage(Messages.INFERENCE_NOT_FOUND, model)));
}
assertFalse(modelLoadingService.isModelCached(model));
verify(trainedModelProvider, atMost(2)).getTrainedModelForInference(eq(model), eq(false), any());
verify(trainedModelStatsService, never()).queueStats(any(InferenceStats.class), anyBoolean());
}
public void testGetMissingModel() {
String model = "test-load-missing-model";
withMissingModel(model);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model, null, future);
try {
future.get();
fail("Should not have succeeded");
} catch (Exception ex) {
assertThat(ex.getCause().getMessage(), containsString(Messages.getMessage(Messages.INFERENCE_NOT_FOUND, model)));
}
assertFalse(modelLoadingService.isModelCached(model));
}
public void testGetModelEagerly() throws Exception {
String model = "test-get-model-eagerly";
withTrainedModel(model, 1L);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
for (int i = 0; i < 3; i++) {
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model, null, future);
assertThat(future.get(), is(not(nullValue())));
}
verify(trainedModelProvider, times(3)).getTrainedModelForInference(eq(model), eq(false), any());
assertFalse(modelLoadingService.isModelCached(model));
verify(trainedModelStatsService, never()).queueStats(any(InferenceStats.class), anyBoolean());
}
public void testGetModelForSearch() throws Exception {
String modelId = "test-get-model-for-search";
withTrainedModel(modelId, 1L);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
for (int i = 0; i < 3; i++) {
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForAggregation(modelId, future);
assertThat(future.get(), is(not(nullValue())));
}
assertTrue(modelLoadingService.isModelCached(modelId));
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(modelId), eq(false), any());
verify(trainedModelStatsService, never()).queueStats(any(InferenceStats.class), anyBoolean());
}
public void testGetModelForLearningToRank() throws Exception {
String modelId = "test-get-model-for-ltr";
withTrainedModel(modelId, 1L, LearningToRankConfig.EMPTY_PARAMS);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
for (int i = 0; i < 3; i++) {
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForLearningToRank(modelId, future);
assertThat(future.get(), is(not(nullValue())));
}
assertTrue(modelLoadingService.isModelCached(modelId));
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(modelId), eq(false), any());
verify(trainedModelStatsService, never()).queueStats(any(InferenceStats.class), anyBoolean());
}
public void testCircuitBreakerBreak() throws Exception {
String model1 = "test-circuit-break-model-1";
String model2 = "test-circuit-break-model-2";
String model3 = "test-circuit-break-model-3";
withTrainedModel(model1, 5L);
withTrainedModel(model2, 5L);
withTrainedModel(model3, 12L);
CircuitBreaker circuitBreaker = new CustomCircuitBreaker(11);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
modelLoadingService.addModelLoadedListener(
model3,
ActionListener.wrap(
r -> fail("Should not have succeeded to load model as breaker should be reached"),
e -> assertThat(e, instanceOf(CircuitBreakingException.class))
)
);
modelLoadingService.clusterChanged(ingestChangedEvent(model1, model2, model3));
// Should have been loaded from the cluster change event but it is unknown in what order
// the loading occurred or which models are currently in the cache due to evictions.
// Verify that we have at least loaded all three
assertBusy(() -> {
verify(trainedModelProvider, times(1)).getTrainedModel(eq(model1), eq(GetTrainedModelsAction.Includes.empty()), any(), any());
verify(trainedModelProvider, times(1)).getTrainedModel(eq(model2), eq(GetTrainedModelsAction.Includes.empty()), any(), any());
verify(trainedModelProvider, times(1)).getTrainedModel(eq(model3), eq(GetTrainedModelsAction.Includes.empty()), any(), any());
});
assertBusy(() -> {
assertThat(circuitBreaker.getUsed(), equalTo(10L));
assertThat(circuitBreaker.getTrippedCount(), equalTo(1L));
});
modelLoadingService.clusterChanged(ingestChangedEvent(model1));
assertBusy(() -> assertThat(circuitBreaker.getUsed(), equalTo(5L)));
}
public void testReferenceCounting() throws Exception {
String modelId = "test-reference-counting";
withTrainedModel(modelId, 1L);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
modelLoadingService.clusterChanged(ingestChangedEvent(modelId));
PlainActionFuture<LocalModel> forPipeline = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(modelId, null, forPipeline);
final LocalModel model = forPipeline.get();
assertBusy(() -> assertEquals(2, model.getReferenceCount()));
PlainActionFuture<LocalModel> forSearch = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(modelId, null, forSearch);
forSearch.get();
assertBusy(() -> assertEquals(3, model.getReferenceCount()));
model.release();
assertBusy(() -> assertEquals(2, model.getReferenceCount()));
PlainActionFuture<LocalModel> forSearch2 = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(modelId, null, forSearch2);
forSearch2.get();
assertBusy(() -> assertEquals(3, model.getReferenceCount()));
}
public void testReferenceCountingForPipeline() throws Exception {
String modelId = "test-reference-counting-for-pipeline";
withTrainedModel(modelId, 1L);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
modelLoadingService.clusterChanged(ingestChangedEvent(modelId));
PlainActionFuture<LocalModel> forPipeline = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(modelId, null, forPipeline);
final LocalModel model = forPipeline.get();
assertBusy(() -> assertEquals(2, model.getReferenceCount()));
PlainActionFuture<LocalModel> forPipeline2 = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(modelId, null, forPipeline2);
forPipeline2.get();
assertBusy(() -> assertEquals(3, model.getReferenceCount()));
// will cause the model to be evicted
modelLoadingService.clusterChanged(ingestChangedEvent());
assertBusy(() -> assertEquals(2, model.getReferenceCount()));
}
public void testReferenceCounting_ModelIsNotCached() throws ExecutionException, InterruptedException {
String modelId = "test-reference-counting-not-cached";
withTrainedModel(modelId, 1L);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(modelId, null, future);
LocalModel model = future.get();
assertEquals(1, model.getReferenceCount());
}
public void testGetCachedModelViaModelAliases() throws Exception {
String model1 = "test-load-model-1";
String model2 = "test-load-model-2";
withTrainedModel(model1, 1L);
withTrainedModel(model2, 1L);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
modelLoadingService.clusterChanged(
aliasChangeEvent(true, new String[] { "loaded_model" }, true, List.of(Tuple.tuple(model1, "loaded_model")))
);
String[] modelIds = new String[] { model1, "loaded_model" };
for (int i = 0; i < 10; i++) {
String model = modelIds[i % 2];
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model, null, future);
assertThat(future.get(), is(not(nullValue())));
}
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(model1), eq(false), any());
assertTrue(modelLoadingService.isModelCached(model1));
assertTrue(modelLoadingService.isModelCached("loaded_model"));
// alias change only
modelLoadingService.clusterChanged(
aliasChangeEvent(true, new String[] { "loaded_model" }, false, List.of(Tuple.tuple(model2, "loaded_model")))
);
modelIds = new String[] { model2, "loaded_model" };
for (int i = 0; i < 10; i++) {
String model = modelIds[i % 2];
PlainActionFuture<LocalModel> future = new PlainActionFuture<>();
modelLoadingService.getModelForPipeline(model, null, future);
assertThat(future.get(), is(not(nullValue())));
}
verify(trainedModelProvider, times(1)).getTrainedModelForInference(eq(model2), eq(false), any());
assertTrue(modelLoadingService.isModelCached(model2));
assertTrue(modelLoadingService.isModelCached("loaded_model"));
}
public void testAliasesGetUpdatedEvenWhenNotIngestNode() throws IOException {
String model1 = "test-load-model-1";
withTrainedModel(model1, 1L);
String model2 = "test-load-model-2";
withTrainedModel(model2, 1L);
ModelLoadingService modelLoadingService = new ModelLoadingService(
trainedModelProvider,
auditor,
threadPool,
clusterService,
trainedModelStatsService,
Settings.EMPTY,
"test-node",
circuitBreaker,
mock(XPackLicenseState.class)
);
modelLoadingService.clusterChanged(aliasChangeEvent(false, new String[0], false, List.of(Tuple.tuple(model1, "loaded_model"))));
assertThat(modelLoadingService.getModelId("loaded_model"), equalTo(model1));
modelLoadingService.clusterChanged(
aliasChangeEvent(
false,
new String[0],
false,
Arrays.asList(
Tuple.tuple(model1, "loaded_model_again"),
Tuple.tuple(model1, "loaded_model_foo"),
Tuple.tuple(model2, "loaded_model")
)
)
);
assertThat(modelLoadingService.getModelId("loaded_model"), equalTo(model2));
assertThat(modelLoadingService.getModelId("loaded_model_foo"), equalTo(model1));
assertThat(modelLoadingService.getModelId("loaded_model_again"), equalTo(model1));
}
private void withTrainedModel(String modelId, long size) {
withTrainedModel(modelId, size, ClassificationConfig.EMPTY_PARAMS);
}
@SuppressWarnings("unchecked")
private void withTrainedModel(String modelId, long size, InferenceConfig inferenceConfig) {
InferenceDefinition definition = mock(InferenceDefinition.class);
when(definition.ramBytesUsed()).thenReturn(size);
TrainedModelConfig trainedModelConfig = mock(TrainedModelConfig.class);
when(trainedModelConfig.getModelId()).thenReturn(modelId);
when(trainedModelConfig.getInferenceConfig()).thenReturn(inferenceConfig);
when(trainedModelConfig.getInput()).thenReturn(new TrainedModelInput(Arrays.asList("foo", "bar", "baz")));
when(trainedModelConfig.getModelSize()).thenReturn(size);
doAnswer(invocationOnMock -> {
@SuppressWarnings("rawtypes")
ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2];
listener.onResponse(definition);
return null;
}).when(trainedModelProvider).getTrainedModelForInference(eq(modelId), eq(false), any());
doAnswer(invocationOnMock -> {
@SuppressWarnings("rawtypes")
ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3];
listener.onResponse(trainedModelConfig);
return null;
}).when(trainedModelProvider).getTrainedModel(eq(modelId), eq(GetTrainedModelsAction.Includes.empty()), any(), any());
}
@SuppressWarnings("unchecked")
private void withMissingModel(String modelId) {
if (randomBoolean()) {
doAnswer(invocationOnMock -> {
@SuppressWarnings("rawtypes")
ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3];
listener.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.INFERENCE_NOT_FOUND, modelId)));
return null;
}).when(trainedModelProvider).getTrainedModel(eq(modelId), eq(GetTrainedModelsAction.Includes.empty()), any(), any());
} else {
TrainedModelConfig trainedModelConfig = mock(TrainedModelConfig.class);
when(trainedModelConfig.getModelSize()).thenReturn(0L);
doAnswer(invocationOnMock -> {
@SuppressWarnings("rawtypes")
ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3];
listener.onResponse(trainedModelConfig);
return null;
}).when(trainedModelProvider).getTrainedModel(eq(modelId), eq(GetTrainedModelsAction.Includes.empty()), any(), any());
doAnswer(invocationOnMock -> {
@SuppressWarnings("rawtypes")
ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2];
listener.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId)));
return null;
}).when(trainedModelProvider).getTrainedModelForInference(eq(modelId), eq(false), any());
}
doAnswer(invocationOnMock -> {
@SuppressWarnings("rawtypes")
ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2];
listener.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.INFERENCE_NOT_FOUND, modelId)));
return null;
}).when(trainedModelProvider).getTrainedModelForInference(eq(modelId), eq(false), any());
}
private static ClusterChangedEvent ingestChangedEvent(String... modelId) throws IOException {
return ingestChangedEvent(true, modelId);
}
private static ClusterChangedEvent aliasChangeEvent(
boolean isIngestNode,
String[] modelId,
boolean ingestToo,
List<Tuple<String, String>> modelIdAndAliases
) throws IOException {
ClusterChangedEvent event = mock(ClusterChangedEvent.class);
Set<String> set = new HashSet<>();
set.add(ModelAliasMetadata.NAME);
if (ingestToo) {
set.add(IngestMetadata.TYPE);
}
when(event.changedCustomProjectMetadataSet()).thenReturn(set);
when(event.state()).thenReturn(withModelReferencesAndAliasChange(isIngestNode, modelId, modelIdAndAliases));
return event;
}
private static ClusterChangedEvent ingestChangedEvent(boolean isIngestNode, String... modelId) throws IOException {
ClusterChangedEvent event = mock(ClusterChangedEvent.class);
when(event.changedCustomProjectMetadataSet()).thenReturn(Collections.singleton(IngestMetadata.TYPE));
when(event.state()).thenReturn(buildClusterStateWithModelReferences(isIngestNode, modelId));
return event;
}
private static ClusterState buildClusterStateWithModelReferences(boolean isIngestNode, String... modelId) throws IOException {
return builder(isIngestNode).metadata(addIngest(Metadata.builder(), modelId)).build();
}
private static ClusterState withModelReferencesAndAliasChange(
boolean isIngestNode,
String[] modelId,
List<Tuple<String, String>> modelIdAndAliases
) throws IOException {
return builder(isIngestNode).metadata(addAliases(addIngest(Metadata.builder(), modelId), modelIdAndAliases)).build();
}
private static ClusterState.Builder builder(boolean isIngestNode) {
return ClusterState.builder(new ClusterName("_name"))
.nodes(
DiscoveryNodes.builder()
.add(
DiscoveryNodeUtils.create(
"node_name",
"node_id",
new TransportAddress(InetAddress.getLoopbackAddress(), 9300),
Collections.emptyMap(),
isIngestNode ? Collections.singleton(DiscoveryNodeRole.INGEST_ROLE) : Collections.emptySet()
)
)
.localNodeId("node_id")
.build()
);
}
private static Metadata.Builder addIngest(Metadata.Builder builder, String... modelId) throws IOException {
Map<String, PipelineConfiguration> configurations = Maps.newMapWithExpectedSize(modelId.length);
for (String id : modelId) {
configurations.put("pipeline_with_model_" + id, newConfigurationWithInferenceProcessor(id));
}
IngestMetadata ingestMetadata = new IngestMetadata(configurations);
return builder.putCustom(IngestMetadata.TYPE, ingestMetadata);
}
private static Metadata.Builder addAliases(Metadata.Builder builder, List<Tuple<String, String>> modelIdAndAliases) {
ModelAliasMetadata modelAliasMetadata = new ModelAliasMetadata(
modelIdAndAliases.stream().collect(Collectors.toMap(Tuple::v2, t -> new ModelAliasMetadata.ModelAliasEntry(t.v1())))
);
return builder.putCustom(ModelAliasMetadata.NAME, modelAliasMetadata);
}
private static PipelineConfiguration newConfigurationWithInferenceProcessor(String modelId) throws IOException {
try (
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
.map(
Collections.singletonMap(
"processors",
Collections.singletonList(
Collections.singletonMap(
InferenceProcessor.TYPE,
Collections.singletonMap(InferenceResults.MODEL_ID_RESULTS_FIELD, modelId)
)
)
)
)
) {
return new PipelineConfiguration("pipeline_with_model_" + modelId, BytesReference.bytes(xContentBuilder), XContentType.JSON);
}
}
private static | ModelLoadingServiceTests |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeFinite.java | {
"start": 657,
"end": 940
} | class ____ extends BasicErrorMessageFactory {
public static ErrorMessageFactory shouldNotBeFinite(Number actual) {
return new ShouldNotBeFinite(actual);
}
private ShouldNotBeFinite(Number actual) {
super("%nExpecting %s not to be finite", actual);
}
}
| ShouldNotBeFinite |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/resource/gif/GifBitmapProvider.java | {
"start": 519,
"end": 2139
} | class ____ implements GifDecoder.BitmapProvider {
private final BitmapPool bitmapPool;
@Nullable private final ArrayPool arrayPool;
/**
* Constructs an instance without a shared byte array pool. Byte arrays will be always constructed
* when requested.
*/
public GifBitmapProvider(BitmapPool bitmapPool) {
this(bitmapPool, /* arrayPool= */ null);
}
/** Constructs an instance with a shared array pool. Arrays will be reused where possible. */
// Public API.
@SuppressWarnings("WeakerAccess")
public GifBitmapProvider(BitmapPool bitmapPool, @Nullable ArrayPool arrayPool) {
this.bitmapPool = bitmapPool;
this.arrayPool = arrayPool;
}
@NonNull
@Override
public Bitmap obtain(int width, int height, @NonNull Bitmap.Config config) {
return bitmapPool.getDirty(width, height, config);
}
@Override
public void release(@NonNull Bitmap bitmap) {
bitmapPool.put(bitmap);
}
@NonNull
@Override
public byte[] obtainByteArray(int size) {
if (arrayPool == null) {
return new byte[size];
}
return arrayPool.get(size, byte[].class);
}
@Override
public void release(@NonNull byte[] bytes) {
if (arrayPool == null) {
return;
}
arrayPool.put(bytes);
}
@NonNull
@Override
public int[] obtainIntArray(int size) {
if (arrayPool == null) {
return new int[size];
}
return arrayPool.get(size, int[].class);
}
@SuppressWarnings("PMD.UseVarargs")
@Override
public void release(@NonNull int[] array) {
if (arrayPool == null) {
return;
}
arrayPool.put(array);
}
}
| GifBitmapProvider |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/AS2EndpointBuilderFactory.java | {
"start": 1588,
"end": 35896
} | interface ____
extends
EndpointConsumerBuilder {
default AdvancedAS2EndpointConsumerBuilder advanced() {
return (AdvancedAS2EndpointConsumerBuilder) this;
}
/**
* The value of the AS2From header of AS2 message.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param as2From the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder as2From(String as2From) {
doSetProperty("as2From", as2From);
return this;
}
/**
* The structure of AS2 Message. One of: PLAIN - No encryption, no
* signature, SIGNED - No encryption, signature, ENCRYPTED - Encryption,
* no signature, ENCRYPTED_SIGNED - Encryption, signature.
*
* The option is a:
* <code>org.apache.camel.component.as2.api.AS2MessageStructure</code>
* type.
*
* Group: common
*
* @param as2MessageStructure the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder as2MessageStructure(org.apache.camel.component.as2.api.AS2MessageStructure as2MessageStructure) {
doSetProperty("as2MessageStructure", as2MessageStructure);
return this;
}
/**
* The structure of AS2 Message. One of: PLAIN - No encryption, no
* signature, SIGNED - No encryption, signature, ENCRYPTED - Encryption,
* no signature, ENCRYPTED_SIGNED - Encryption, signature.
*
* The option will be converted to a
* <code>org.apache.camel.component.as2.api.AS2MessageStructure</code>
* type.
*
* Group: common
*
* @param as2MessageStructure the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder as2MessageStructure(String as2MessageStructure) {
doSetProperty("as2MessageStructure", as2MessageStructure);
return this;
}
/**
* The value of the AS2To header of AS2 message.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param as2To the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder as2To(String as2To) {
doSetProperty("as2To", as2To);
return this;
}
/**
* The version of the AS2 protocol.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: 1.1
* Group: common
*
* @param as2Version the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder as2Version(String as2Version) {
doSetProperty("as2Version", as2Version);
return this;
}
/**
* The port number of asynchronous MDN server.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: common
*
* @param asyncMdnPortNumber the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder asyncMdnPortNumber(Integer asyncMdnPortNumber) {
doSetProperty("asyncMdnPortNumber", asyncMdnPortNumber);
return this;
}
/**
* The port number of asynchronous MDN server.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Group: common
*
* @param asyncMdnPortNumber the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder asyncMdnPortNumber(String asyncMdnPortNumber) {
doSetProperty("asyncMdnPortNumber", asyncMdnPortNumber);
return this;
}
/**
* The name of the attached file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param attachedFileName the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder attachedFileName(String attachedFileName) {
doSetProperty("attachedFileName", attachedFileName);
return this;
}
/**
* The Client Fully Qualified Domain Name (FQDN). Used in message ids
* sent by endpoint.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: camel.apache.org
* Group: common
*
* @param clientFqdn the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder clientFqdn(String clientFqdn) {
doSetProperty("clientFqdn", clientFqdn);
return this;
}
/**
* The algorithm used to compress EDI message.
*
* The option is a:
* <code>org.apache.camel.component.as2.api.AS2CompressionAlgorithm</code> type.
*
* Group: common
*
* @param compressionAlgorithm the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder compressionAlgorithm(org.apache.camel.component.as2.api.AS2CompressionAlgorithm compressionAlgorithm) {
doSetProperty("compressionAlgorithm", compressionAlgorithm);
return this;
}
/**
* The algorithm used to compress EDI message.
*
* The option will be converted to a
* <code>org.apache.camel.component.as2.api.AS2CompressionAlgorithm</code> type.
*
* Group: common
*
* @param compressionAlgorithm the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder compressionAlgorithm(String compressionAlgorithm) {
doSetProperty("compressionAlgorithm", compressionAlgorithm);
return this;
}
/**
* The value of the Disposition-Notification-To header. Assigning a
* value to this parameter requests a message disposition notification
* (MDN) for the AS2 message.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param dispositionNotificationTo the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder dispositionNotificationTo(String dispositionNotificationTo) {
doSetProperty("dispositionNotificationTo", dispositionNotificationTo);
return this;
}
/**
* The charset of the content type of EDI message.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: us-ascii
* Group: common
*
* @param ediMessageCharset the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder ediMessageCharset(String ediMessageCharset) {
doSetProperty("ediMessageCharset", ediMessageCharset);
return this;
}
/**
* The transfer encoding of EDI message.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param ediMessageTransferEncoding the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder ediMessageTransferEncoding(String ediMessageTransferEncoding) {
doSetProperty("ediMessageTransferEncoding", ediMessageTransferEncoding);
return this;
}
/**
* The content type of EDI message. One of application/edifact,
* application/edi-x12, application/edi-consent, application/xml.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param ediMessageType the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder ediMessageType(String ediMessageType) {
doSetProperty("ediMessageType", ediMessageType);
return this;
}
/**
* The value of the From header of AS2 message.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param from the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder from(String from) {
doSetProperty("from", from);
return this;
}
/**
* The maximum size of the connection pool for http connections (client
* only).
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 5
* Group: common
*
* @param httpConnectionPoolSize the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder httpConnectionPoolSize(Integer httpConnectionPoolSize) {
doSetProperty("httpConnectionPoolSize", httpConnectionPoolSize);
return this;
}
/**
* The maximum size of the connection pool for http connections (client
* only).
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Default: 5
* Group: common
*
* @param httpConnectionPoolSize the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder httpConnectionPoolSize(String httpConnectionPoolSize) {
doSetProperty("httpConnectionPoolSize", httpConnectionPoolSize);
return this;
}
/**
* The time to live for connections in the connection pool (client
* only).
*
* The option is a: <code>java.time.Duration</code> type.
*
* Default: 15m
* Group: common
*
* @param httpConnectionPoolTtl the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder httpConnectionPoolTtl(java.time.Duration httpConnectionPoolTtl) {
doSetProperty("httpConnectionPoolTtl", httpConnectionPoolTtl);
return this;
}
/**
* The time to live for connections in the connection pool (client
* only).
*
* The option will be converted to a <code>java.time.Duration</code>
* type.
*
* Default: 15m
* Group: common
*
* @param httpConnectionPoolTtl the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder httpConnectionPoolTtl(String httpConnectionPoolTtl) {
doSetProperty("httpConnectionPoolTtl", httpConnectionPoolTtl);
return this;
}
/**
* The timeout of the http connection (client only).
*
* The option is a: <code>java.time.Duration</code> type.
*
* Default: 5s
* Group: common
*
* @param httpConnectionTimeout the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder httpConnectionTimeout(java.time.Duration httpConnectionTimeout) {
doSetProperty("httpConnectionTimeout", httpConnectionTimeout);
return this;
}
/**
* The timeout of the http connection (client only).
*
* The option will be converted to a <code>java.time.Duration</code>
* type.
*
* Default: 5s
* Group: common
*
* @param httpConnectionTimeout the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder httpConnectionTimeout(String httpConnectionTimeout) {
doSetProperty("httpConnectionTimeout", httpConnectionTimeout);
return this;
}
/**
* The timeout of the underlying http socket (client only).
*
* The option is a: <code>java.time.Duration</code> type.
*
* Default: 5s
* Group: common
*
* @param httpSocketTimeout the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder httpSocketTimeout(java.time.Duration httpSocketTimeout) {
doSetProperty("httpSocketTimeout", httpSocketTimeout);
return this;
}
/**
* The timeout of the underlying http socket (client only).
*
* The option will be converted to a <code>java.time.Duration</code>
* type.
*
* Default: 5s
* Group: common
*
* @param httpSocketTimeout the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder httpSocketTimeout(String httpSocketTimeout) {
doSetProperty("httpSocketTimeout", httpSocketTimeout);
return this;
}
/**
* Sets the name of a parameter to be passed in the exchange In Body.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param inBody the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder inBody(String inBody) {
doSetProperty("inBody", inBody);
return this;
}
/**
* The template used to format MDN message.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param mdnMessageTemplate the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder mdnMessageTemplate(String mdnMessageTemplate) {
doSetProperty("mdnMessageTemplate", mdnMessageTemplate);
return this;
}
/**
* The return URL that the message receiver should send an asynchronous
* MDN to. If not present the receipt is synchronous. (Client only).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param receiptDeliveryOption the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder receiptDeliveryOption(String receiptDeliveryOption) {
doSetProperty("receiptDeliveryOption", receiptDeliveryOption);
return this;
}
/**
* The request URI of EDI message.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: /
* Group: common
*
* @param requestUri the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder requestUri(String requestUri) {
doSetProperty("requestUri", requestUri);
return this;
}
/**
* The value included in the Server message header identifying the AS2
* Server.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: Camel AS2 Server Endpoint
* Group: common
*
* @param server the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder server(String server) {
doSetProperty("server", server);
return this;
}
/**
* The Server Fully Qualified Domain Name (FQDN). Used in message ids
* sent by endpoint.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: camel.apache.org
* Group: common
*
* @param serverFqdn the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder serverFqdn(String serverFqdn) {
doSetProperty("serverFqdn", serverFqdn);
return this;
}
/**
* The port number of server.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: common
*
* @param serverPortNumber the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder serverPortNumber(Integer serverPortNumber) {
doSetProperty("serverPortNumber", serverPortNumber);
return this;
}
/**
* The port number of server.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Group: common
*
* @param serverPortNumber the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder serverPortNumber(String serverPortNumber) {
doSetProperty("serverPortNumber", serverPortNumber);
return this;
}
/**
* The value of Subject header of AS2 message.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param subject the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder subject(String subject) {
doSetProperty("subject", subject);
return this;
}
/**
* The host name (IP or DNS name) of target host.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param targetHostname the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder targetHostname(String targetHostname) {
doSetProperty("targetHostname", targetHostname);
return this;
}
/**
* The port number of target host. -1 indicates the scheme default port.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 80
* Group: common
*
* @param targetPortNumber the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder targetPortNumber(Integer targetPortNumber) {
doSetProperty("targetPortNumber", targetPortNumber);
return this;
}
/**
* The port number of target host. -1 indicates the scheme default port.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Default: 80
* Group: common
*
* @param targetPortNumber the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder targetPortNumber(String targetPortNumber) {
doSetProperty("targetPortNumber", targetPortNumber);
return this;
}
/**
* The value included in the User-Agent message header identifying the
* AS2 user agent.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: Camel AS2 Client Endpoint
* Group: common
*
* @param userAgent the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder userAgent(String userAgent) {
doSetProperty("userAgent", userAgent);
return this;
}
/**
* The access token that is used by the client for bearer
* authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param accessToken the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder accessToken(String accessToken) {
doSetProperty("accessToken", accessToken);
return this;
}
/**
* The key used to encrypt the EDI message.
*
* The option is a: <code>java.security.PrivateKey</code> type.
*
* Group: security
*
* @param decryptingPrivateKey the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder decryptingPrivateKey(java.security.PrivateKey decryptingPrivateKey) {
doSetProperty("decryptingPrivateKey", decryptingPrivateKey);
return this;
}
/**
* The key used to encrypt the EDI message.
*
* The option will be converted to a
* <code>java.security.PrivateKey</code> type.
*
* Group: security
*
* @param decryptingPrivateKey the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder decryptingPrivateKey(String decryptingPrivateKey) {
doSetProperty("decryptingPrivateKey", decryptingPrivateKey);
return this;
}
/**
* The algorithm used to encrypt EDI message.
*
* The option is a:
* <code>org.apache.camel.component.as2.api.AS2EncryptionAlgorithm</code> type.
*
* Group: security
*
* @param encryptingAlgorithm the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder encryptingAlgorithm(org.apache.camel.component.as2.api.AS2EncryptionAlgorithm encryptingAlgorithm) {
doSetProperty("encryptingAlgorithm", encryptingAlgorithm);
return this;
}
/**
* The algorithm used to encrypt EDI message.
*
* The option will be converted to a
* <code>org.apache.camel.component.as2.api.AS2EncryptionAlgorithm</code> type.
*
* Group: security
*
* @param encryptingAlgorithm the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder encryptingAlgorithm(String encryptingAlgorithm) {
doSetProperty("encryptingAlgorithm", encryptingAlgorithm);
return this;
}
/**
* The chain of certificates used to encrypt EDI message.
*
* The option is a: <code>java.security.cert.Certificate[]</code> type.
*
* Group: security
*
* @param encryptingCertificateChain the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder encryptingCertificateChain(java.security.cert.Certificate[] encryptingCertificateChain) {
doSetProperty("encryptingCertificateChain", encryptingCertificateChain);
return this;
}
/**
* The chain of certificates used to encrypt EDI message.
*
* The option will be converted to a
* <code>java.security.cert.Certificate[]</code> type.
*
* Group: security
*
* @param encryptingCertificateChain the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder encryptingCertificateChain(String encryptingCertificateChain) {
doSetProperty("encryptingCertificateChain", encryptingCertificateChain);
return this;
}
/**
* Set hostname verifier for SSL session.
*
* The option is a: <code>javax.net.ssl.HostnameVerifier</code> type.
*
* Group: security
*
* @param hostnameVerifier the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder hostnameVerifier(javax.net.ssl.HostnameVerifier hostnameVerifier) {
doSetProperty("hostnameVerifier", hostnameVerifier);
return this;
}
/**
* Set hostname verifier for SSL session.
*
* The option will be converted to a
* <code>javax.net.ssl.HostnameVerifier</code> type.
*
* Group: security
*
* @param hostnameVerifier the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder hostnameVerifier(String hostnameVerifier) {
doSetProperty("hostnameVerifier", hostnameVerifier);
return this;
}
/**
* The access token that is used by the server when it sends an async
* MDN.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param mdnAccessToken the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder mdnAccessToken(String mdnAccessToken) {
doSetProperty("mdnAccessToken", mdnAccessToken);
return this;
}
/**
* The password that is used by the server for basic authentication when
* it sends an async MDN.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param mdnPassword the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder mdnPassword(String mdnPassword) {
doSetProperty("mdnPassword", mdnPassword);
return this;
}
/**
* The user-name that is used by the server for basic authentication
* when it sends an async MDN. If options for basic authentication and
* bearer authentication are both set then basic authentication takes
* precedence.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param mdnUserName the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder mdnUserName(String mdnUserName) {
doSetProperty("mdnUserName", mdnUserName);
return this;
}
/**
* The password that is used by the client for basic authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* The list of algorithms, in order of preference, requested to generate
* a message integrity check (MIC) returned in message disposition
* notification (MDN). Multiple algorithms can be separated by comma.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param signedReceiptMicAlgorithms the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder signedReceiptMicAlgorithms(String signedReceiptMicAlgorithms) {
doSetProperty("signedReceiptMicAlgorithms", signedReceiptMicAlgorithms);
return this;
}
/**
* The algorithm used to sign EDI message.
*
* The option is a:
* <code>org.apache.camel.component.as2.api.AS2SignatureAlgorithm</code>
* type.
*
* Group: security
*
* @param signingAlgorithm the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder signingAlgorithm(org.apache.camel.component.as2.api.AS2SignatureAlgorithm signingAlgorithm) {
doSetProperty("signingAlgorithm", signingAlgorithm);
return this;
}
/**
* The algorithm used to sign EDI message.
*
* The option will be converted to a
* <code>org.apache.camel.component.as2.api.AS2SignatureAlgorithm</code>
* type.
*
* Group: security
*
* @param signingAlgorithm the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder signingAlgorithm(String signingAlgorithm) {
doSetProperty("signingAlgorithm", signingAlgorithm);
return this;
}
/**
* The chain of certificates used to sign EDI message.
*
* The option is a: <code>java.security.cert.Certificate[]</code> type.
*
* Group: security
*
* @param signingCertificateChain the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder signingCertificateChain(java.security.cert.Certificate[] signingCertificateChain) {
doSetProperty("signingCertificateChain", signingCertificateChain);
return this;
}
/**
* The chain of certificates used to sign EDI message.
*
* The option will be converted to a
* <code>java.security.cert.Certificate[]</code> type.
*
* Group: security
*
* @param signingCertificateChain the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder signingCertificateChain(String signingCertificateChain) {
doSetProperty("signingCertificateChain", signingCertificateChain);
return this;
}
/**
* The key used to sign the EDI message.
*
* The option is a: <code>java.security.PrivateKey</code> type.
*
* Group: security
*
* @param signingPrivateKey the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder signingPrivateKey(java.security.PrivateKey signingPrivateKey) {
doSetProperty("signingPrivateKey", signingPrivateKey);
return this;
}
/**
* The key used to sign the EDI message.
*
* The option will be converted to a
* <code>java.security.PrivateKey</code> type.
*
* Group: security
*
* @param signingPrivateKey the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder signingPrivateKey(String signingPrivateKey) {
doSetProperty("signingPrivateKey", signingPrivateKey);
return this;
}
/**
* Set SSL context for connection to remote server.
*
* The option is a: <code>javax.net.ssl.SSLContext</code> type.
*
* Group: security
*
* @param sslContext the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder sslContext(javax.net.ssl.SSLContext sslContext) {
doSetProperty("sslContext", sslContext);
return this;
}
/**
* Set SSL context for connection to remote server.
*
* The option will be converted to a
* <code>javax.net.ssl.SSLContext</code> type.
*
* Group: security
*
* @param sslContext the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder sslContext(String sslContext) {
doSetProperty("sslContext", sslContext);
return this;
}
/**
* The user-name that is used by the client for basic authentication. If
* options for basic authentication and bearer authentication are both
* set then basic authentication takes precedence.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param userName the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder userName(String userName) {
doSetProperty("userName", userName);
return this;
}
/**
* Certificates to validate the message's signature against. If not
* supplied, validation will not take place. Server: validates the
* received message. Client: not yet implemented, should validate the
* MDN.
*
* The option is a: <code>java.security.cert.Certificate[]</code> type.
*
* Group: security
*
* @param validateSigningCertificateChain the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder validateSigningCertificateChain(java.security.cert.Certificate[] validateSigningCertificateChain) {
doSetProperty("validateSigningCertificateChain", validateSigningCertificateChain);
return this;
}
/**
* Certificates to validate the message's signature against. If not
* supplied, validation will not take place. Server: validates the
* received message. Client: not yet implemented, should validate the
* MDN.
*
* The option will be converted to a
* <code>java.security.cert.Certificate[]</code> type.
*
* Group: security
*
* @param validateSigningCertificateChain the value to set
* @return the dsl builder
*/
default AS2EndpointConsumerBuilder validateSigningCertificateChain(String validateSigningCertificateChain) {
doSetProperty("validateSigningCertificateChain", validateSigningCertificateChain);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the AS2 component.
*/
public | AS2EndpointConsumerBuilder |
java | elastic__elasticsearch | modules/lang-painless/src/test/java/org/elasticsearch/painless/action/SuggestTests.java | {
"start": 850,
"end": 8777
} | class ____ extends ScriptTestCase {
private List<? extends Token> getSuggestTokens(String source) {
ANTLRInputStream stream = new ANTLRInputStream(source);
SuggestLexer lexer = new EnhancedSuggestLexer(stream, scriptEngine.getContextsToLookups().get(PainlessTestScript.CONTEXT));
lexer.removeErrorListeners();
return lexer.getAllTokens();
}
private void compareTokens(List<? extends Token> tokens, String... expected) {
assertEquals(expected.length % 2, 0);
assertEquals(tokens.size(), expected.length / 2);
int index = 0;
for (Token token : tokens) {
assertEquals(SuggestLexer.VOCABULARY.getDisplayName(token.getType()), expected[index++]);
assertEquals(token.getText(), expected[index++]);
}
}
public void testSuggestLexer() {
compareTokens(getSuggestTokens("test"), SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID), "test");
compareTokens(
getSuggestTokens("int test;"),
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.TYPE),
"int",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.SEMICOLON),
";"
);
compareTokens(
getSuggestTokens("ArrayList test;"),
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.TYPE),
"ArrayList",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.SEMICOLON),
";"
);
compareTokens(
getSuggestTokens("def test;"),
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.TYPE),
"def",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.SEMICOLON),
";"
);
compareTokens(
getSuggestTokens("int[] test;"),
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ATYPE),
"int[]",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.SEMICOLON),
";"
);
compareTokens(
getSuggestTokens("ArrayList[] test;"),
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ATYPE),
"ArrayList[]",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.SEMICOLON),
";"
);
compareTokens(
getSuggestTokens("def[] test;"),
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ATYPE),
"def[]",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.SEMICOLON),
";"
);
compareTokens(
getSuggestTokens("List test = new ArrayList(); test."),
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.TYPE),
"List",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ASSIGN),
"=",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.NEW),
"new",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.TYPE),
"ArrayList",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.LP),
"(",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.RP),
")",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.SEMICOLON),
";",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.DOT),
"."
);
compareTokens(
getSuggestTokens("List test = new ArrayList(); test.add"),
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.TYPE),
"List",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ASSIGN),
"=",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.NEW),
"new",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.TYPE),
"ArrayList",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.LP),
"(",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.RP),
")",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.SEMICOLON),
";",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.DOT),
".",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.DOTID),
"add"
);
compareTokens(
getSuggestTokens("List test = new ArrayList(); test.add("),
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.TYPE),
"List",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ASSIGN),
"=",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.NEW),
"new",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.TYPE),
"ArrayList",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.LP),
"(",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.RP),
")",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.SEMICOLON),
";",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.DOT),
".",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.DOTID),
"add",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.LP),
"("
);
compareTokens(
getSuggestTokens("def test(int param) {return param;} test(2);"),
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.TYPE),
"def",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.LP),
"(",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.TYPE),
"int",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"param",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.RP),
")",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.LBRACK),
"{",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.RETURN),
"return",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"param",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.SEMICOLON),
";",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.RBRACK),
"}",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.ID),
"test",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.LP),
"(",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.INTEGER),
"2",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.RP),
")",
SuggestLexer.VOCABULARY.getDisplayName(SuggestLexer.SEMICOLON),
";"
);
}
}
| SuggestTests |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterMetricsService.java | {
"start": 1222,
"end": 2195
} | class ____ extends AbstractService {
/** Router for this metrics. */
private final Router router;
/** Router metrics. */
private RouterMetrics routerMetrics;
/** Router Client metrics. */
private RouterClientMetrics routerClientMetrics;
/** Federation metrics. */
private RBFMetrics rbfMetrics;
/** Namenode mock metrics. */
private NamenodeBeanMetrics nnMetrics;
public RouterMetricsService(final Router router) {
super(RouterMetricsService.class.getName());
this.router = router;
}
@Override
protected void serviceInit(Configuration configuration) throws Exception {
this.routerMetrics = RouterMetrics.create(configuration);
this.routerClientMetrics = RouterClientMetrics.create(configuration);
}
@Override
protected void serviceStart() throws Exception {
// Wrapper for all the FSNamesystem JMX interfaces
this.nnMetrics = new NamenodeBeanMetrics(this.router);
// Federation MBean JMX | RouterMetricsService |
java | spring-projects__spring-security | crypto/src/main/java/org/springframework/security/crypto/password4j/Password4jPasswordEncoder.java | {
"start": 983,
"end": 1116
} | class ____ the
* common functionality for password encoding and verification using the Password4j
* library.
*
* <p>
* This | provides |
java | spring-projects__spring-framework | spring-core-test/src/main/java/org/springframework/core/test/tools/CompilationException.java | {
"start": 802,
"end": 1725
} | class ____ extends RuntimeException {
CompilationException(String errors, SourceFiles sourceFiles, ResourceFiles resourceFiles) {
super(buildMessage(errors, sourceFiles, resourceFiles));
}
private static String buildMessage(String errors, SourceFiles sourceFiles,
ResourceFiles resourceFiles) {
StringBuilder message = new StringBuilder();
message.append("Unable to compile source\n\n");
message.append(errors);
message.append("\n\n");
for (SourceFile sourceFile : sourceFiles) {
message.append("---- source: ").append(sourceFile.getPath()).append("\n\n");
message.append(sourceFile.getContent());
message.append("\n\n");
}
for (ResourceFile resourceFile : resourceFiles) {
message.append("---- resource: ").append(resourceFile.getPath()).append("\n\n");
message.append(resourceFile.getContent());
message.append("\n\n");
}
return message.toString();
}
}
| CompilationException |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/MissingRuntimeRetentionTest.java | {
"start": 8396,
"end": 8967
} | interface ____ {}
""")
.doTest();
}
@Test
public void sourceRetentionStillFiringOnAndroid() {
compilationHelper
.setArgs(Collections.singletonList("-XDandroidCompatible=true"))
.addSourceLines(
"TestAnnotation.java",
"""
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@javax.inject.Scope
// BUG: Diagnostic contains: @Retention(RUNTIME)
@Retention(RetentionPolicy.SOURCE)
public @ | TestAnnotation |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/operators/windowing/functions/InternalSingleValueProcessWindowFunction.java | {
"start": 1462,
"end": 3116
} | class ____<IN, OUT, KEY, W extends Window>
extends WrappingFunction<ProcessWindowFunction<IN, OUT, KEY, W>>
implements InternalWindowFunction<IN, OUT, KEY, W> {
private static final long serialVersionUID = 1L;
private final InternalProcessWindowContext<IN, OUT, KEY, W> ctx;
public InternalSingleValueProcessWindowFunction(
ProcessWindowFunction<IN, OUT, KEY, W> wrappedFunction) {
super(wrappedFunction);
ctx = new InternalProcessWindowContext<>(wrappedFunction);
}
@Override
public void process(
KEY key,
final W window,
final InternalWindowContext context,
IN input,
Collector<OUT> out)
throws Exception {
this.ctx.window = window;
this.ctx.internalContext = context;
ProcessWindowFunction<IN, OUT, KEY, W> wrappedFunction = this.wrappedFunction;
wrappedFunction.process(key, ctx, Collections.singletonList(input), out);
}
@Override
public void clear(final W window, final InternalWindowContext context) throws Exception {
this.ctx.window = window;
this.ctx.internalContext = context;
ProcessWindowFunction<IN, OUT, KEY, W> wrappedFunction = this.wrappedFunction;
wrappedFunction.clear(ctx);
}
@Override
public RuntimeContext getRuntimeContext() {
throw new RuntimeException("This should never be called.");
}
@Override
public IterationRuntimeContext getIterationRuntimeContext() {
throw new RuntimeException("This should never be called.");
}
}
| InternalSingleValueProcessWindowFunction |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/filter/JsonIncludeTest.java | {
"start": 1570,
"end": 1874
} | class ____
{
public int x;
public int y = 3;
public int z = 7;
NonDefaultBeanXYZ(int x, int y, int z) {
this.x = x;
this.y = y;
this.z = z;
}
}
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
static | NonDefaultBeanXYZ |
java | elastic__elasticsearch | x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java | {
"start": 686,
"end": 6297
} | class ____ extends InferenceBaseRestTest {
public void testPutE5Small_withNoModelVariant() {
{
String inferenceEntityId = "testPutE5Small_withNoModelVariant";
expectThrows(
org.elasticsearch.client.ResponseException.class,
() -> putTextEmbeddingModel(inferenceEntityId, noModelIdVariantJsonEntity())
);
}
}
public void testPutE5Small_withPlatformAgnosticVariant() throws IOException {
String inferenceEntityId = "teste5mall_withplatformagnosticvariant";
putTextEmbeddingModel(inferenceEntityId, platformAgnosticModelVariantJsonEntity());
var models = getTrainedModel("_all");
assertThat(models.toString(), containsString("deployment_id=" + inferenceEntityId));
Map<String, Object> results = infer(
inferenceEntityId,
TaskType.TEXT_EMBEDDING,
List.of("hello world", "this is the second document")
);
assertTrue(((List) ((Map) ((List) results.get(DenseEmbeddingFloatResults.TEXT_EMBEDDING)).get(0)).get("embedding")).size() > 1);
// there exists embeddings
assertTrue(((List) results.get(DenseEmbeddingFloatResults.TEXT_EMBEDDING)).size() == 2);
// there are two sets of embeddings
deleteTextEmbeddingModel(inferenceEntityId);
}
public void testPutE5Small_withPlatformSpecificVariant() throws IOException {
String inferenceEntityId = "teste5mall_withplatformspecificvariant";
if ("linux-x86_64".equals(Platforms.PLATFORM_NAME)) {
putTextEmbeddingModel(inferenceEntityId, platformSpecificModelVariantJsonEntity());
var models = getTrainedModel("_all");
assertThat(models.toString(), containsString("deployment_id=" + inferenceEntityId));
Map<String, Object> results = infer(
inferenceEntityId,
TaskType.TEXT_EMBEDDING,
List.of("hello world", "this is the second document")
);
assertTrue(((List) ((Map) ((List) results.get(DenseEmbeddingFloatResults.TEXT_EMBEDDING)).get(0)).get("embedding")).size() > 1);
// there exists embeddings
assertTrue(((List) results.get(DenseEmbeddingFloatResults.TEXT_EMBEDDING)).size() == 2);
// there are two sets of embeddings
deleteTextEmbeddingModel(inferenceEntityId);
} else {
expectThrows(
org.elasticsearch.client.ResponseException.class,
() -> putTextEmbeddingModel(inferenceEntityId, platformSpecificModelVariantJsonEntity())
);
}
}
public void testPutE5Small_withFakeModelVariant() {
String inferenceEntityId = "teste5mall_withfakevariant";
expectThrows(
org.elasticsearch.client.ResponseException.class,
() -> putTextEmbeddingModel(inferenceEntityId, fakeModelVariantJsonEntity())
);
}
public void testPutE5WithTrainedModelAndInference() throws IOException {
putE5TrainedModels();
deployE5TrainedModels();
putTextEmbeddingModel("an-e5-deployment", platformAgnosticModelVariantJsonEntity());
getTrainedModel("an-e5-deployment");
}
private Map<String, Object> deleteTextEmbeddingModel(String inferenceEntityId) throws IOException {
var endpoint = Strings.format("_inference/%s/%s", "text_embedding", inferenceEntityId);
var request = new Request("DELETE", endpoint);
var response = client().performRequest(request);
assertStatusOkOrCreated(response);
return entityAsMap(response);
}
private Map<String, Object> putTextEmbeddingModel(String inferenceEntityId, String jsonEntity) throws IOException {
var endpoint = Strings.format("_inference/%s/%s", TaskType.TEXT_EMBEDDING, inferenceEntityId);
var request = new Request("PUT", endpoint);
request.setJsonEntity(jsonEntity);
var response = client().performRequest(request);
assertStatusOkOrCreated(response);
return entityAsMap(response);
}
private String noModelIdVariantJsonEntity() {
return """
{
"service": "elasticsearch",
"service_settings": {
"num_allocations": 1,
"num_threads": 1
}
}
""";
}
private String platformAgnosticModelVariantJsonEntity() {
return """
{
"service": "elasticsearch",
"service_settings": {
"num_allocations": 1,
"num_threads": 1,
"model_id": ".multilingual-e5-small"
}
}
""";
}
private String platformSpecificModelVariantJsonEntity() {
return """
{
"service": "elasticsearch",
"service_settings": {
"num_allocations": 1,
"num_threads": 1,
"model_id": ".multilingual-e5-small_linux-x86_64"
}
}
""";
}
private String fakeModelVariantJsonEntity() {
return """
{
"service": "elasticsearch",
"service_settings": {
"num_allocations": 1,
"num_threads": 1,
"model_id": ".not-a-real-model-variant"
}
}
""";
}
}
| TextEmbeddingCrudIT |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cut/generic/EnumPlaceholderUserType.java | {
"start": 2316,
"end": 2482
} | class ____ {
Class<? extends Enum> firstEnumClass;
String firstEnumValue;
Class<? extends Enum> secondEnumClass;
String secondEnumValue;
}
}
| EmbeddableMapper |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/snapshots/RepositoriesMetadataSerializationTests.java | {
"start": 1187,
"end": 5226
} | class ____ extends ChunkedToXContentDiffableSerializationTestCase<Metadata.ProjectCustom> {
@Override
protected Metadata.ProjectCustom createTestInstance() {
int numberOfRepositories = randomInt(10);
List<RepositoryMetadata> entries = new ArrayList<>();
for (int i = 0; i < numberOfRepositories; i++) {
// divide by 2 to not overflow when adding to this number for the pending generation below
final long generation = randomNonNegativeLong() / 2L;
entries.add(
new RepositoryMetadata(
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomSettings(),
generation,
generation + randomLongBetween(0, generation)
)
);
}
entries.sort(Comparator.comparing(RepositoryMetadata::name));
return new RepositoriesMetadata(entries);
}
@Override
protected Writeable.Reader<Metadata.ProjectCustom> instanceReader() {
return RepositoriesMetadata::new;
}
@Override
protected Metadata.ProjectCustom mutateInstance(Metadata.ProjectCustom instance) {
List<RepositoryMetadata> entries = new ArrayList<>(((RepositoriesMetadata) instance).repositories());
boolean addEntry = entries.isEmpty() ? true : randomBoolean();
if (addEntry) {
entries.add(new RepositoryMetadata(randomAlphaOfLength(10), randomAlphaOfLength(10), randomSettings()));
} else {
entries.remove(randomIntBetween(0, entries.size() - 1));
}
return new RepositoriesMetadata(entries);
}
public Settings randomSettings() {
if (randomBoolean()) {
return Settings.EMPTY;
} else {
int numberOfSettings = randomInt(10);
Settings.Builder builder = Settings.builder();
for (int i = 0; i < numberOfSettings; i++) {
builder.put(randomAlphaOfLength(10), randomAlphaOfLength(20));
}
return builder.build();
}
}
@Override
protected Metadata.ProjectCustom makeTestChanges(Metadata.ProjectCustom testInstance) {
RepositoriesMetadata repositoriesMetadata = (RepositoriesMetadata) testInstance;
List<RepositoryMetadata> repos = new ArrayList<>(repositoriesMetadata.repositories());
if (randomBoolean() && repos.size() > 1) {
// remove some elements
int leaveElements = randomIntBetween(0, repositoriesMetadata.repositories().size() - 1);
repos = randomSubsetOf(leaveElements, repos.toArray(new RepositoryMetadata[leaveElements]));
}
if (randomBoolean()) {
// add some elements
int addElements = randomInt(10);
for (int i = 0; i < addElements; i++) {
repos.add(new RepositoryMetadata(randomAlphaOfLength(10), randomAlphaOfLength(10), randomSettings()));
}
}
return new RepositoriesMetadata(repos);
}
@Override
protected Writeable.Reader<Diff<Metadata.ProjectCustom>> diffReader() {
return RepositoriesMetadata::readDiffFrom;
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(ClusterModule.getNamedWriteables());
}
@Override
protected Metadata.ProjectCustom doParseInstance(XContentParser parser) throws IOException {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
RepositoriesMetadata repositoriesMetadata = RepositoriesMetadata.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
List<RepositoryMetadata> repos = new ArrayList<>(repositoriesMetadata.repositories());
repos.sort(Comparator.comparing(RepositoryMetadata::name));
return new RepositoriesMetadata(repos);
}
}
| RepositoriesMetadataSerializationTests |
java | quarkusio__quarkus | integration-tests/oidc-token-propagation/src/main/java/io/quarkus/it/keycloak/FrontendResource.java | {
"start": 335,
"end": 1491
} | class ____ {
@Inject
@RestClient
JwtTokenPropagationService jwtTokenPropagationService;
@Inject
@RestClient
AccessTokenPropagationService accessTokenPropagationService;
@Inject
@RestClient
ServiceAccountService serviceAccountService;
@GET
@Path("jwt-token-propagation")
@RolesAllowed("user")
public String userNameJwtTokenPropagation() {
return jwtTokenPropagationService.getUserName();
}
@GET
@Path("client-jwt-token-propagation")
@Authenticated
public String clientUserNameJwtTokenPropagation() {
return jwtTokenPropagationService.getClientName();
}
@GET
@Path("access-token-propagation")
@RolesAllowed("user")
public Response userNameAccessTokenPropagation() {
try {
return Response.ok(accessTokenPropagationService.getUserName()).build();
} catch (Exception ex) {
return Response.serverError().entity(ex.getMessage()).build();
}
}
@GET
@Path("service-account")
public String userNameServiceAccount() {
return serviceAccountService.getUserName();
}
}
| FrontendResource |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java | {
"start": 1567,
"end": 17781
} | class ____ implements AggregatorFunctionSupplier {
// Overriding constructor to support isRateOverTime flag
private final boolean isRateOverTime;
public FunctionSupplier(boolean isRateOverTime) {
this.isRateOverTime = isRateOverTime;
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
throw new UnsupportedOperationException("non-grouping aggregator is not supported");
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public AggregatorFunction aggregator(DriverContext driverContext, List<Integer> channels) {
throw new UnsupportedOperationException("non-grouping aggregator is not supported");
}
@Override
public RateIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List<Integer> channels) {
return new RateIntGroupingAggregatorFunction(channels, driverContext, isRateOverTime);
}
@Override
public String describe() {
return "rate of int";
}
}
static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("timestamps", ElementType.LONG),
new IntermediateStateDesc("values", ElementType.INT),
new IntermediateStateDesc("sampleCounts", ElementType.LONG),
new IntermediateStateDesc("resets", ElementType.DOUBLE)
);
private ObjectArray<Buffer> buffers;
private final List<Integer> channels;
private final DriverContext driverContext;
private final BigArrays bigArrays;
private ObjectArray<ReducedState> reducedStates;
private final boolean isRateOverTime;
public RateIntGroupingAggregatorFunction(List<Integer> channels, DriverContext driverContext, boolean isRateOverTime) {
this.channels = channels;
this.driverContext = driverContext;
this.bigArrays = driverContext.bigArrays();
this.isRateOverTime = isRateOverTime;
ObjectArray<Buffer> buffers = driverContext.bigArrays().newObjectArray(256);
try {
this.reducedStates = driverContext.bigArrays().newObjectArray(256);
this.buffers = buffers;
buffers = null;
} finally {
Releasables.close(buffers);
}
}
@Override
public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) {
// manage nulls via buffers/reducedStates arrays
}
@Override
public AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) {
IntBlock valuesBlock = page.getBlock(channels.get(0));
if (valuesBlock.areAllValuesNull()) {
return new AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
}
@Override
public void add(int positionOffset, IntVector groupIds) {
}
@Override
public void close() {
}
};
}
LongBlock timestampsBlock = page.getBlock(channels.get(1));
LongVector timestampsVector = timestampsBlock.asVector();
if (timestampsVector == null) {
assert false : "expected timestamp vector in time-series aggregation";
throw new IllegalStateException("expected timestamp vector in time-series aggregation");
}
IntVector sliceIndices = ((IntBlock) page.getBlock(channels.get(2))).asVector();
assert sliceIndices != null : "expected slice indices vector in time-series aggregation";
LongVector futureMaxTimestamps = ((LongBlock) page.getBlock(channels.get(3))).asVector();
assert futureMaxTimestamps != null : "expected future max timestamps vector in time-series aggregation";
return new AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
var valuesVector = valuesBlock.asVector();
if (valuesVector != null) {
addRawInput(positionOffset, groupIds, valuesVector, timestampsVector);
} else {
addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector);
}
}
@Override
public void close() {
}
};
}
// Note that this path can be executed randomly in tests, not in production
private void addRawInput(int positionOffset, IntBlock groups, IntBlock valueBlock, LongVector timestampVector) {
int lastGroup = -1;
Buffer buffer = null;
int positionCount = groups.getPositionCount();
for (int p = 0; p < positionCount; p++) {
if (groups.isNull(p)) {
continue;
}
int valuePosition = p + positionOffset;
if (valueBlock.isNull(valuePosition)) {
continue;
}
assert valueBlock.getValueCount(valuePosition) == 1 : "expected single-valued block " + valueBlock;
int groupStart = groups.getFirstValueIndex(p);
int groupEnd = groupStart + groups.getValueCount(p);
long timestamp = timestampVector.getLong(valuePosition);
for (int g = groupStart; g < groupEnd; g++) {
final int groupId = groups.getInt(g);
final var value = valueBlock.getInt(valueBlock.getFirstValueIndex(valuePosition));
if (lastGroup != groupId) {
buffer = getBuffer(groupId, 1, timestamp);
buffer.appendWithoutResize(timestamp, value);
lastGroup = groupId;
} else {
buffer.maybeResizeAndAppend(bigArrays, timestamp, value);
}
}
}
}
private void addRawInput(int positionOffset, IntVector groups, IntBlock valueBlock, LongVector timestampVector) {
int positionCount = groups.getPositionCount();
if (groups.isConstant()) {
int groupId = groups.getInt(0);
addSubRange(groupId, positionOffset, positionOffset + positionCount, valueBlock, timestampVector);
} else {
int lastGroup = groups.getInt(0);
int lastPosition = 0;
for (int p = 1; p < positionCount; p++) {
int group = groups.getInt(p);
if (group != lastGroup) {
addSubRange(lastGroup, positionOffset + lastPosition, positionOffset + p, valueBlock, timestampVector);
lastGroup = group;
lastPosition = p;
}
}
addSubRange(lastGroup, positionOffset + lastPosition, positionOffset + positionCount, valueBlock, timestampVector);
}
}
private void addRawInput(int positionOffset, IntVector groups, IntVector valueVector, LongVector timestampVector) {
int positionCount = groups.getPositionCount();
if (groups.isConstant()) {
int groupId = groups.getInt(0);
addSubRange(groupId, positionOffset, positionOffset + positionCount, valueVector, timestampVector);
} else {
int lastGroup = groups.getInt(0);
int lastPosition = 0;
for (int p = 1; p < positionCount; p++) {
int group = groups.getInt(p);
if (group != lastGroup) {
addSubRange(lastGroup, positionOffset + lastPosition, positionOffset + p, valueVector, timestampVector);
lastGroup = group;
lastPosition = p;
}
}
addSubRange(lastGroup, positionOffset + lastPosition, positionOffset + positionCount, valueVector, timestampVector);
}
}
private void addSubRange(int group, int from, int to, IntVector valueVector, LongVector timestampVector) {
var buffer = getBuffer(group, to - from, timestampVector.getLong(from));
buffer.appendRange(from, to, valueVector, timestampVector);
}
private void addSubRange(int group, int from, int to, IntBlock valueBlock, LongVector timestampVector) {
var buffer = getBuffer(group, to - from, timestampVector.getLong(from));
buffer.appendRange(from, to, valueBlock, timestampVector);
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) {
addIntermediateInputBlock(positionOffset, groups, page);
}
@Override
public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) {
addIntermediateInputBlock(positionOffset, groups, page);
}
@Override
public void addIntermediateInput(int positionOffset, IntVector groups, Page page) {
assert channels.size() == intermediateBlockCount();
LongBlock timestamps = page.getBlock(channels.get(0));
IntBlock values = page.getBlock(channels.get(1));
assert timestamps.getTotalValueCount() == values.getTotalValueCount() : "timestamps=" + timestamps + "; values=" + values;
if (values.areAllValuesNull()) {
return;
}
LongVector sampleCounts = ((LongBlock) page.getBlock(channels.get(2))).asVector();
DoubleVector resets = ((DoubleBlock) page.getBlock(channels.get(3))).asVector();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int valuePosition = positionOffset + groupPosition;
long sampleCount = sampleCounts.getLong(valuePosition);
if (sampleCount == 0) {
continue;
}
int groupId = groups.getInt(groupPosition);
reducedStates = bigArrays.grow(reducedStates, groupId + 1);
ReducedState state = reducedStates.get(groupId);
if (state == null) {
state = new ReducedState();
reducedStates.set(groupId, state);
}
state.appendIntervalsFromBlocks(timestamps, values, valuePosition);
state.samples += sampleCount;
state.resets += resets.getDouble(valuePosition);
}
}
private void addIntermediateInputBlock(int positionOffset, IntBlock groups, Page page) {
assert channels.size() == intermediateBlockCount();
LongBlock timestamps = page.getBlock(channels.get(0));
IntBlock values = page.getBlock(channels.get(1));
assert timestamps.getTotalValueCount() == values.getTotalValueCount() : "timestamps=" + timestamps + "; values=" + values;
if (values.areAllValuesNull()) {
return;
}
LongVector sampleCounts = ((LongBlock) page.getBlock(channels.get(2))).asVector();
DoubleVector resets = ((DoubleBlock) page.getBlock(channels.get(3))).asVector();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int valuePosition = positionOffset + groupPosition;
long sampleCount = sampleCounts.getLong(valuePosition);
if (sampleCount == 0) {
continue;
}
if (groups.isNull(groupPosition)) {
continue;
}
int firstGroup = groups.getFirstValueIndex(groupPosition);
int lastGroup = firstGroup + groups.getValueCount(groupPosition);
for (int g = firstGroup; g < lastGroup; g++) {
int groupId = groups.getInt(g);
reducedStates = bigArrays.grow(reducedStates, groupId + 1);
ReducedState state = reducedStates.get(groupId);
if (state == null) {
state = new ReducedState();
reducedStates.set(groupId, state);
}
state.appendIntervalsFromBlocks(timestamps, values, valuePosition);
state.samples += sampleCount;
state.resets += resets.getDouble(valuePosition);
}
}
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) {
BlockFactory blockFactory = driverContext.blockFactory();
int positionCount = selected.getPositionCount();
try (
var timestamps = blockFactory.newLongBlockBuilder(positionCount * 2);
var values = blockFactory.newIntBlockBuilder(positionCount * 2);
var sampleCounts = blockFactory.newLongVectorFixedBuilder(positionCount);
var resets = blockFactory.newDoubleVectorFixedBuilder(positionCount)
) {
for (int p = 0; p < positionCount; p++) {
int group = selected.getInt(p);
var state = flushAndCombineState(group);
// Do not combine intervals across shards because intervals from different indices may overlap.
if (state != null && state.samples > 0) {
timestamps.beginPositionEntry();
values.beginPositionEntry();
for (Interval interval : state.intervals) {
timestamps.appendLong(interval.t1);
timestamps.appendLong(interval.t2);
values.appendInt(interval.v1);
values.appendInt(interval.v2);
}
timestamps.endPositionEntry();
values.endPositionEntry();
sampleCounts.appendLong(state.samples);
resets.appendDouble(state.resets);
} else {
timestamps.appendLong(0);
values.appendInt(0);
sampleCounts.appendLong(0);
resets.appendDouble(0);
}
}
blocks[offset] = timestamps.build();
blocks[offset + 1] = values.build();
blocks[offset + 2] = sampleCounts.build().asBlock();
blocks[offset + 3] = resets.build().asBlock();
}
}
@Override
public void close() {
for (long i = 0; i < buffers.size(); i++) {
Buffer buffer = buffers.get(i);
if (buffer != null) {
buffer.close();
}
}
Releasables.close(reducedStates, buffers);
}
private Buffer getBuffer(int groupId, int newElements, long firstTimestamp) {
buffers = bigArrays.grow(buffers, groupId + 1);
Buffer buffer = buffers.get(groupId);
if (buffer == null) {
buffer = new Buffer(bigArrays, newElements);
buffers.set(groupId, buffer);
} else {
buffer.ensureCapacity(bigArrays, newElements, firstTimestamp);
}
return buffer;
}
/**
* Buffers data points in two arrays: one for timestamps and one for values, partitioned into multiple slices.
* Each slice is sorted in descending order of timestamp. A new slice is created when a data point has a
* timestamp greater than the last point of the current slice. Since each page is sorted by descending timestamp,
* we only need to compare the first point of the new page with the last point of the current slice to decide
* if a new slice is needed. During merging, a priority queue is used to iterate through the slices, selecting
* the slice with the greatest timestamp.
*/
static final | FunctionSupplier |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/config/AbstractConfig.java | {
"start": 19724,
"end": 20053
} | class ____ by the given configuration key. The configuration
* may specify either null or an empty string to indicate no configured instances. In both cases, this method
* returns an empty list to indicate no configured instances.
*
* @param key The configuration key for the class
* @param t The | specified |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/backoff/BackOff.java | {
"start": 827,
"end": 1490
} | interface ____ expected to use it like this:
*
* <pre class="code">
* BackOffExecution execution = backOff.start();
*
* // In the operation recovery/retry loop:
* long waitInterval = execution.nextBackOff();
* if (waitInterval == BackOffExecution.STOP) {
* // do not retry operation
* }
* else {
* // sleep, for example, Thread.sleep(waitInterval)
* // retry operation
* }</pre>
*
* <p>Once the underlying operation has completed successfully, the execution
* instance can be discarded.
*
* @author Stephane Nicoll
* @since 4.1
* @see BackOffExecution
* @see FixedBackOff
* @see ExponentialBackOff
*/
@FunctionalInterface
public | are |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.