language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__rocketmq
|
proxy/src/main/java/org/apache/rocketmq/proxy/service/receipt/DefaultReceiptHandleManager.java
|
{
"start": 3176,
"end": 16001
}
|
class ____ extends AbstractStartAndShutdown implements ReceiptHandleManager {
protected final static Logger log = LoggerFactory.getLogger(LoggerName.PROXY_LOGGER_NAME);
protected final MetadataService metadataService;
protected final ConsumerManager consumerManager;
protected final ConcurrentMap<ReceiptHandleGroupKey, ReceiptHandleGroup> receiptHandleGroupMap;
protected final StateEventListener<RenewEvent> eventListener;
protected final static RetryPolicy RENEW_POLICY = new RenewStrategyPolicy();
protected final ScheduledExecutorService scheduledExecutorService =
ThreadUtils.newSingleThreadScheduledExecutor(new ThreadFactoryImpl("RenewalScheduledThread_"));
protected final ThreadPoolExecutor renewalWorkerService;
protected final ThreadPoolExecutor returnHandleGroupWorkerService;
public DefaultReceiptHandleManager(MetadataService metadataService, ConsumerManager consumerManager, StateEventListener<RenewEvent> eventListener) {
this.metadataService = metadataService;
this.consumerManager = consumerManager;
this.eventListener = eventListener;
ProxyConfig proxyConfig = ConfigurationManager.getProxyConfig();
this.renewalWorkerService = ThreadPoolMonitor.createAndMonitor(
proxyConfig.getRenewThreadPoolNums(),
proxyConfig.getRenewMaxThreadPoolNums(),
1, TimeUnit.MINUTES,
"RenewalWorkerThread",
proxyConfig.getRenewThreadPoolQueueCapacity()
);
this.returnHandleGroupWorkerService = ThreadPoolMonitor.createAndMonitor(
proxyConfig.getReturnHandleGroupThreadPoolNums(),
proxyConfig.getReturnHandleGroupThreadPoolNums() * 2,
1, TimeUnit.MINUTES,
"ReturnHandleGroupWorkerThread",
proxyConfig.getRenewThreadPoolQueueCapacity()
);
consumerManager.appendConsumerIdsChangeListener(new ConsumerIdsChangeListener() {
@Override
public void handle(ConsumerGroupEvent event, String group, Object... args) {
if (ConsumerGroupEvent.CLIENT_UNREGISTER.equals(event)) {
if (args == null || args.length < 1) {
return;
}
if (args[0] instanceof ClientChannelInfo) {
ClientChannelInfo clientChannelInfo = (ClientChannelInfo) args[0];
if (ChannelHelper.isRemote(clientChannelInfo.getChannel())) {
// if the channel sync from other proxy is expired, not to clear data of connect to current proxy
return;
}
clearGroup(new ReceiptHandleGroupKey(clientChannelInfo.getChannel(), group));
log.info("clear handle of this client when client unregister. group:{}, clientChannelInfo:{}", group, clientChannelInfo);
}
}
}
@Override
public void shutdown() {
}
});
this.receiptHandleGroupMap = new ConcurrentHashMap<>();
this.renewalWorkerService.setRejectedExecutionHandler((r, executor) -> log.warn("add renew task failed. queueSize:{}", executor.getQueue().size()));
this.appendStartAndShutdown(new StartAndShutdown() {
@Override
public void start() throws Exception {
scheduledExecutorService.scheduleWithFixedDelay(() -> scheduleRenewTask(), 0,
ConfigurationManager.getProxyConfig().getRenewSchedulePeriodMillis(), TimeUnit.MILLISECONDS);
}
@Override
public void shutdown() throws Exception {
scheduledExecutorService.shutdown();
clearAllHandle();
}
});
}
public void addReceiptHandle(ProxyContext context, Channel channel, String group, String msgID, MessageReceiptHandle messageReceiptHandle) {
ConcurrentHashMapUtils.computeIfAbsent(this.receiptHandleGroupMap, new ReceiptHandleGroupKey(channel, group),
k -> new ReceiptHandleGroup()).put(msgID, messageReceiptHandle);
}
public MessageReceiptHandle removeReceiptHandle(ProxyContext context, Channel channel, String group, String msgID, String receiptHandle) {
ReceiptHandleGroup handleGroup = receiptHandleGroupMap.get(new ReceiptHandleGroupKey(channel, group));
if (handleGroup == null) {
return null;
}
return handleGroup.remove(msgID, receiptHandle);
}
protected boolean clientIsOffline(ReceiptHandleGroupKey groupKey) {
return this.consumerManager.findChannel(groupKey.getGroup(), groupKey.getChannel()) == null;
}
protected void scheduleRenewTask() {
Stopwatch stopwatch = Stopwatch.createStarted();
try {
ProxyConfig proxyConfig = ConfigurationManager.getProxyConfig();
for (Map.Entry<ReceiptHandleGroupKey, ReceiptHandleGroup> entry : receiptHandleGroupMap.entrySet()) {
ReceiptHandleGroupKey key = entry.getKey();
if (clientIsOffline(key)) {
clearGroup(key);
continue;
}
ReceiptHandleGroup group = entry.getValue();
group.scan((msgID, handleStr, v) -> {
long current = System.currentTimeMillis();
ReceiptHandle handle = ReceiptHandle.decode(v.getReceiptHandleStr());
if (handle.getNextVisibleTime() - current > proxyConfig.getRenewAheadTimeMillis()) {
return;
}
renewalWorkerService.submit(() -> renewMessage(createContext("RenewMessage"), key, group,
msgID, handleStr));
});
}
} catch (Exception e) {
log.error("unexpect error when schedule renew task", e);
}
log.debug("scan for renewal done. cost:{}ms", stopwatch.elapsed().toMillis());
}
protected void renewMessage(ProxyContext context, ReceiptHandleGroupKey key, ReceiptHandleGroup group, String msgID, String handleStr) {
try {
group.computeIfPresent(msgID, handleStr, messageReceiptHandle -> startRenewMessage(context, key, messageReceiptHandle), 0);
} catch (Exception e) {
log.error("error when renew message. msgID:{}, handleStr:{}", msgID, handleStr, e);
}
}
protected CompletableFuture<MessageReceiptHandle> startRenewMessage(ProxyContext context, ReceiptHandleGroupKey key, MessageReceiptHandle messageReceiptHandle) {
CompletableFuture<MessageReceiptHandle> resFuture = new CompletableFuture<>();
ProxyConfig proxyConfig = ConfigurationManager.getProxyConfig();
long current = System.currentTimeMillis();
try {
if (messageReceiptHandle.getRenewRetryTimes() >= proxyConfig.getMaxRenewRetryTimes()) {
log.warn("handle has exceed max renewRetryTimes. handle:{}", messageReceiptHandle);
return CompletableFuture.completedFuture(null);
}
if (current - messageReceiptHandle.getConsumeTimestamp() < proxyConfig.getRenewMaxTimeMillis()) {
CompletableFuture<AckResult> future = new CompletableFuture<>();
eventListener.fireEvent(new RenewEvent(key, messageReceiptHandle, RENEW_POLICY.nextDelayDuration(messageReceiptHandle.getRenewTimes()), RenewEvent.EventType.RENEW, future));
future.whenComplete((ackResult, throwable) -> {
if (throwable != null) {
log.error("error when renew. handle:{}", messageReceiptHandle, throwable);
if (renewExceptionNeedRetry(throwable)) {
messageReceiptHandle.incrementAndGetRenewRetryTimes();
resFuture.complete(messageReceiptHandle);
} else {
resFuture.complete(null);
}
} else if (AckStatus.OK.equals(ackResult.getStatus())) {
messageReceiptHandle.updateReceiptHandle(ackResult.getExtraInfo());
messageReceiptHandle.resetRenewRetryTimes();
messageReceiptHandle.incrementRenewTimes();
resFuture.complete(messageReceiptHandle);
} else {
log.error("renew response is not ok. result:{}, handle:{}", ackResult, messageReceiptHandle);
resFuture.complete(null);
}
});
} else {
SubscriptionGroupConfig subscriptionGroupConfig =
metadataService.getSubscriptionGroupConfig(context, messageReceiptHandle.getGroup());
if (subscriptionGroupConfig == null) {
log.error("group's subscriptionGroupConfig is null when renew. handle: {}", messageReceiptHandle);
return CompletableFuture.completedFuture(null);
}
RetryPolicy retryPolicy = subscriptionGroupConfig.getGroupRetryPolicy().getRetryPolicy();
CompletableFuture<AckResult> future = new CompletableFuture<>();
eventListener.fireEvent(new RenewEvent(key, messageReceiptHandle, retryPolicy.nextDelayDuration(messageReceiptHandle.getReconsumeTimes()), RenewEvent.EventType.STOP_RENEW, future));
future.whenComplete((ackResult, throwable) -> {
if (throwable != null) {
log.error("error when nack in renew. handle:{}", messageReceiptHandle, throwable);
}
resFuture.complete(null);
});
}
} catch (Throwable t) {
log.error("unexpect error when renew message, stop to renew it. handle:{}", messageReceiptHandle, t);
resFuture.complete(null);
}
return resFuture;
}
protected void clearGroup(ReceiptHandleGroupKey key) {
if (key == null) {
return;
}
ReceiptHandleGroup handleGroup = receiptHandleGroupMap.remove(key);
returnHandleGroupWorkerService.submit(() -> returnHandleGroup(key, handleGroup));
}
// There is no longer any waiting for lock, and only the locked handles will be processed immediately,
// while the handles that cannot be acquired will be kept waiting for the next scheduling.
private void returnHandleGroup(ReceiptHandleGroupKey key, ReceiptHandleGroup handleGroup) {
if (handleGroup == null || handleGroup.isEmpty()) {
return;
}
ProxyConfig proxyConfig = ConfigurationManager.getProxyConfig();
handleGroup.scan((msgID, handle, v) -> {
try {
handleGroup.computeIfPresent(msgID, handle, messageReceiptHandle -> {
CompletableFuture<AckResult> future = new CompletableFuture<>();
eventListener.fireEvent(new RenewEvent(key, messageReceiptHandle, proxyConfig.getInvisibleTimeMillisWhenClear(), RenewEvent.EventType.CLEAR_GROUP, future));
return CompletableFuture.completedFuture(null);
}, 0);
} catch (Exception e) {
log.error("error when clear handle for group. key:{}", key, e);
}
});
// scheduleRenewTask will trigger cleanup again
if (!handleGroup.isEmpty()) {
log.warn("The handle cannot be completely cleared, the remaining quantity is {}, key:{}", handleGroup.getHandleNum(), key);
receiptHandleGroupMap.putIfAbsent(key, handleGroup);
}
}
protected void clearAllHandle() {
log.info("start clear all handle in receiptHandleProcessor");
Set<ReceiptHandleGroupKey> keySet = receiptHandleGroupMap.keySet();
for (ReceiptHandleGroupKey key : keySet) {
clearGroup(key);
}
log.info("clear all handle in receiptHandleProcessor done");
}
protected boolean renewExceptionNeedRetry(Throwable t) {
t = ExceptionUtils.getRealException(t);
if (t instanceof ProxyException) {
ProxyException proxyException = (ProxyException) t;
if (ProxyExceptionCode.INVALID_BROKER_NAME.equals(proxyException.getCode()) ||
ProxyExceptionCode.INVALID_RECEIPT_HANDLE.equals(proxyException.getCode())) {
return false;
}
}
return true;
}
protected ProxyContext createContext(String actionName) {
return ProxyContext.createForInner(this.getClass().getSimpleName() + actionName);
}
}
|
DefaultReceiptHandleManager
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/CustomHeadersDecorator.java
|
{
"start": 1188,
"end": 1286
}
|
class ____ {@link MessageHeaders} that adds the ability to include custom HTTP headers.
*/
public
|
for
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DebeziumMySqlEndpointBuilderFactory.java
|
{
"start": 135979,
"end": 136331
}
|
class ____ extends AbstractEndpointBuilder implements DebeziumMySqlEndpointBuilder, AdvancedDebeziumMySqlEndpointBuilder {
public DebeziumMySqlEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new DebeziumMySqlEndpointBuilderImpl(path);
}
}
|
DebeziumMySqlEndpointBuilderImpl
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAclOp.java
|
{
"start": 1450,
"end": 9381
}
|
class ____ {
static FileStatus modifyAclEntries(
FSDirectory fsd, FSPermissionChecker pc, final String srcArg,
List<AclEntry> aclSpec) throws IOException {
String src = srcArg;
checkAclsConfigFlag(fsd);
INodesInPath iip;
fsd.writeLock();
try {
iip = fsd.resolvePath(pc, src, DirOp.WRITE);
src = iip.getPath();
fsd.checkOwner(pc, iip);
INode inode = FSDirectory.resolveLastINode(iip);
int snapshotId = iip.getLatestSnapshotId();
List<AclEntry> existingAcl = AclStorage.readINodeLogicalAcl(inode);
List<AclEntry> newAcl = AclTransformation.mergeAclEntries(
existingAcl, aclSpec);
AclStorage.updateINodeAcl(inode, newAcl, snapshotId);
fsd.getEditLog().logSetAcl(src, newAcl);
} catch (AclException e){
throw new AclException(e.getMessage() + " Path: " + src, e);
} finally {
fsd.writeUnlock();
}
return fsd.getAuditFileInfo(iip);
}
static FileStatus removeAclEntries(
FSDirectory fsd, FSPermissionChecker pc, final String srcArg,
List<AclEntry> aclSpec) throws IOException {
String src = srcArg;
checkAclsConfigFlag(fsd);
INodesInPath iip;
fsd.writeLock();
try {
iip = fsd.resolvePath(pc, src, DirOp.WRITE);
src = iip.getPath();
fsd.checkOwner(pc, iip);
INode inode = FSDirectory.resolveLastINode(iip);
int snapshotId = iip.getLatestSnapshotId();
List<AclEntry> existingAcl = AclStorage.readINodeLogicalAcl(inode);
List<AclEntry> newAcl = AclTransformation.filterAclEntriesByAclSpec(
existingAcl, aclSpec);
AclStorage.updateINodeAcl(inode, newAcl, snapshotId);
fsd.getEditLog().logSetAcl(src, newAcl);
} catch (AclException e){
throw new AclException(e.getMessage() + " Path: " + src, e);
} finally {
fsd.writeUnlock();
}
return fsd.getAuditFileInfo(iip);
}
static FileStatus removeDefaultAcl(FSDirectory fsd, FSPermissionChecker pc,
final String srcArg) throws IOException {
String src = srcArg;
checkAclsConfigFlag(fsd);
INodesInPath iip;
fsd.writeLock();
try {
iip = fsd.resolvePath(pc, src, DirOp.WRITE);
src = iip.getPath();
fsd.checkOwner(pc, iip);
INode inode = FSDirectory.resolveLastINode(iip);
int snapshotId = iip.getLatestSnapshotId();
List<AclEntry> existingAcl = AclStorage.readINodeLogicalAcl(inode);
List<AclEntry> newAcl = AclTransformation.filterDefaultAclEntries(
existingAcl);
AclStorage.updateINodeAcl(inode, newAcl, snapshotId);
fsd.getEditLog().logSetAcl(src, newAcl);
} catch (AclException e){
throw new AclException(e.getMessage() + " Path: " + src, e);
} finally {
fsd.writeUnlock();
}
return fsd.getAuditFileInfo(iip);
}
static FileStatus removeAcl(FSDirectory fsd, FSPermissionChecker pc,
final String srcArg) throws IOException {
String src = srcArg;
checkAclsConfigFlag(fsd);
INodesInPath iip;
fsd.writeLock();
try {
iip = fsd.resolvePath(pc, src, DirOp.WRITE);
src = iip.getPath();
fsd.checkOwner(pc, iip);
unprotectedRemoveAcl(fsd, iip);
} catch (AclException e){
throw new AclException(e.getMessage() + " Path: " + src, e);
} finally {
fsd.writeUnlock();
}
fsd.getEditLog().logSetAcl(src, AclFeature.EMPTY_ENTRY_LIST);
return fsd.getAuditFileInfo(iip);
}
static FileStatus setAcl(
FSDirectory fsd, FSPermissionChecker pc, final String srcArg,
List<AclEntry> aclSpec) throws IOException {
String src = srcArg;
checkAclsConfigFlag(fsd);
INodesInPath iip;
fsd.writeLock();
try {
iip = fsd.resolvePath(pc, src, DirOp.WRITE);
fsd.checkOwner(pc, iip);
List<AclEntry> newAcl = unprotectedSetAcl(fsd, iip, aclSpec, false);
fsd.getEditLog().logSetAcl(iip.getPath(), newAcl);
} catch (AclException e){
throw new AclException(e.getMessage() + " Path: " + src, e);
} finally {
fsd.writeUnlock();
}
return fsd.getAuditFileInfo(iip);
}
static AclStatus getAclStatus(
FSDirectory fsd, FSPermissionChecker pc, String src) throws IOException {
checkAclsConfigFlag(fsd);
fsd.readLock();
try {
INodesInPath iip = fsd.resolvePath(pc, src, DirOp.READ);
// There is no real inode for the path ending in ".snapshot", so return a
// non-null, unpopulated AclStatus. This is similar to getFileInfo.
if (iip.isDotSnapshotDir() && fsd.getINode4DotSnapshot(iip) != null) {
return new AclStatus.Builder().owner("").group("").build();
}
INodeAttributes inodeAttrs = fsd.getAttributes(iip);
List<AclEntry> acl = AclStorage.readINodeAcl(inodeAttrs);
FsPermission fsPermission = inodeAttrs.getFsPermission();
return new AclStatus.Builder()
.owner(inodeAttrs.getUserName()).group(inodeAttrs.getGroupName())
.stickyBit(fsPermission.getStickyBit())
.setPermission(fsPermission)
.addEntries(acl).build();
} catch (AclException e){
throw new AclException(e.getMessage() + " Path: " + src, e);
} finally {
fsd.readUnlock();
}
}
static List<AclEntry> unprotectedSetAcl(FSDirectory fsd, INodesInPath iip,
List<AclEntry> aclSpec, boolean fromEdits) throws IOException {
assert fsd.hasWriteLock();
// ACL removal is logged to edits as OP_SET_ACL with an empty list.
if (aclSpec.isEmpty()) {
unprotectedRemoveAcl(fsd, iip);
return AclFeature.EMPTY_ENTRY_LIST;
}
INode inode = FSDirectory.resolveLastINode(iip);
int snapshotId = iip.getLatestSnapshotId();
List<AclEntry> newAcl = aclSpec;
if (!fromEdits) {
List<AclEntry> existingAcl = AclStorage.readINodeLogicalAcl(inode);
newAcl = AclTransformation.replaceAclEntries(existingAcl, aclSpec);
}
AclStorage.updateINodeAcl(inode, newAcl, snapshotId);
return newAcl;
}
private static void checkAclsConfigFlag(FSDirectory fsd) throws AclException {
if (!fsd.isAclsEnabled()) {
throw new AclException(String.format(
"The ACL operation has been rejected. "
+ "Support for ACLs has been disabled by setting %s to false.",
DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY));
}
}
private static void unprotectedRemoveAcl(FSDirectory fsd, INodesInPath iip)
throws IOException {
assert fsd.hasWriteLock();
INode inode = FSDirectory.resolveLastINode(iip);
int snapshotId = iip.getLatestSnapshotId();
AclFeature f = inode.getAclFeature();
if (f == null) {
return;
}
FsPermission perm = inode.getFsPermission();
List<AclEntry> featureEntries = AclStorage.getEntriesFromAclFeature(f);
if (featureEntries.get(0).getScope() == AclEntryScope.ACCESS) {
// Restore group permissions from the feature's entry to permission
// bits, overwriting the mask, which is not part of a minimal ACL.
AclEntry groupEntryKey = new AclEntry.Builder()
.setScope(AclEntryScope.ACCESS).setType(AclEntryType.GROUP).build();
int groupEntryIndex = Collections.binarySearch(
featureEntries, groupEntryKey,
AclTransformation.ACL_ENTRY_COMPARATOR);
if (groupEntryIndex < 0 || groupEntryIndex > featureEntries.size()) {
throw new IndexOutOfBoundsException(
"Invalid group entry index after binary-searching inode: "
+ inode.getFullPathName() + "(" + inode.getId() + ") "
+ "with featureEntries:" + featureEntries);
}
FsAction groupPerm = featureEntries.get(groupEntryIndex).getPermission();
FsPermission newPerm = new FsPermission(perm.getUserAction(), groupPerm,
perm.getOtherAction(), perm.getStickyBit());
inode.setPermission(newPerm, snapshotId);
}
inode.removeAclFeature(snapshotId);
}
}
|
FSDirAclOp
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/privileged/PrivilegedOperation.java
|
{
"start": 1488,
"end": 4483
}
|
enum ____ {
CHECK_SETUP("--checksetup"),
MOUNT_CGROUPS("--mount-cgroups"),
INITIALIZE_CONTAINER(""), //no CLI switch supported yet
LAUNCH_CONTAINER(""), //no CLI switch supported yet
SIGNAL_CONTAINER(""), //no CLI switch supported yet
EXEC_CONTAINER("--exec-container"), //no CLI switch supported yet
DELETE_AS_USER(""), //no CLI switch supported yet
LAUNCH_DOCKER_CONTAINER(""), //no CLI switch supported yet
TC_MODIFY_STATE("--tc-modify-state"),
TC_READ_STATE("--tc-read-state"),
TC_READ_STATS("--tc-read-stats"),
ADD_PID_TO_CGROUP(""), //no CLI switch supported yet.
RUN_DOCKER_CMD("--run-docker"),
GPU("--module-gpu"),
FPGA("--module-fpga"),
DEVICE("--module-devices"),
LIST_AS_USER(""), // no CLI switch supported yet.
ADD_NUMA_PARAMS(""), // no CLI switch supported yet.
REMOVE_DOCKER_CONTAINER("--remove-docker-container"),
INSPECT_DOCKER_CONTAINER("--inspect-docker-container"),
SYNC_YARN_SYSFS(""),
RUN_RUNC_CONTAINER("--run-runc-container"),
REAP_RUNC_LAYER_MOUNTS("--reap-runc-layer-mounts");
private final String option;
OperationType(String option) {
this.option = option;
}
public String getOption() {
return option;
}
}
public static final String CGROUP_ARG_PREFIX = "cgroups=";
public static final String CGROUP_ARG_NO_TASKS = "none";
private final OperationType opType;
private final List<String> args;
private boolean failureLogging;
public PrivilegedOperation(OperationType opType) {
this.opType = opType;
this.args = new ArrayList<String>();
this.failureLogging = true;
}
public PrivilegedOperation(OperationType opType, String arg) {
this(opType);
if (arg != null) {
this.args.add(arg);
}
}
public PrivilegedOperation(OperationType opType, List<String> args) {
this(opType);
if (args != null) {
this.args.addAll(args);
}
}
public void appendArgs(String... args) {
for (String arg : args) {
this.args.add(arg);
}
}
public void appendArgs(List<String> args) {
this.args.addAll(args);
}
public void enableFailureLogging() {
this.failureLogging = true;
}
public void disableFailureLogging() {
this.failureLogging = false;
}
public boolean isFailureLoggingEnabled() {
return failureLogging;
}
public OperationType getOperationType() {
return opType;
}
public List<String> getArguments() {
return Collections.unmodifiableList(this.args);
}
@Override
public boolean equals(Object other) {
if (!(other instanceof PrivilegedOperation)) {
return false;
}
PrivilegedOperation otherOp = (PrivilegedOperation) other;
return otherOp.opType.equals(opType) && otherOp.args.equals(args);
}
@Override
public int hashCode() {
return opType.hashCode() + 97 * args.hashCode();
}
/**
* List of commands that the container-executor will execute.
*/
public
|
OperationType
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/RHyperLogLogReactive.java
|
{
"start": 926,
"end": 2289
}
|
interface ____<V> extends RExpirableReactive {
/**
* Adds element into this structure.
*
* @param obj - element to add
* @return <code>true</code> if object has been added
* or <code>false</code> if it was already added
*/
Mono<Boolean> add(V obj);
/**
* Adds all elements contained in <code>objects</code> collection into this structure
*
* @param objects - elements to add
* @return <code>true</code> if at least one object has been added
* or <code>false</code> if all were already added
*/
Mono<Boolean> addAll(Collection<V> objects);
/**
* Returns approximated number of unique elements added into this structure.
*
* @return approximated number of unique elements added into this structure
*/
Mono<Long> count();
/**
* Returns approximated number of unique elements
* added into this instances and other instances defined through <code>otherLogNames</code>.
*
* @param otherLogNames - name of instances
* @return number
*/
Mono<Long> countWith(String... otherLogNames);
/**
* Merges multiple instances into this instance.
*
* @param otherLogNames - name of instances
* @return void
*/
Mono<Void> mergeWith(String... otherLogNames);
}
|
RHyperLogLogReactive
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/file/strategy/FileChangedReadLockTimeoutTest.java
|
{
"start": 908,
"end": 1389
}
|
class ____ extends FileChangedReadLockTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(fileUri("in?initialDelay=0&delay=10&readLock=changed&readLockCheckInterval=100&readLockTimeout=2000"))
.to(fileUri("out"),
"mock:result");
}
};
}
}
|
FileChangedReadLockTimeoutTest
|
java
|
playframework__playframework
|
documentation/manual/working/commonGuide/filters/code/javaguide/detailed/filters/FiltersTest.java
|
{
"start": 505,
"end": 1637
}
|
class ____ extends WithApplication {
@Test
public void testRequestBuilder() {
Router router =
new RoutingDsl(instanceOf(play.mvc.BodyParser.Default.class))
.GET("/xx/Kiwi")
.routingTo(request -> Results.ok("success"))
.build();
// #test-with-request-builder
Http.RequestBuilder request =
new Http.RequestBuilder()
.method(GET)
.header(Http.HeaderNames.HOST, "localhost")
.uri("/xx/Kiwi");
// #test-with-request-builder
Helpers.routeAndCall(app, router, request, 10_000 /* 10 seconds */);
}
@Test
public void test() {
Router router =
new RoutingDsl(instanceOf(play.mvc.BodyParser.Default.class))
.POST("/xx/Kiwi")
.routingTo(request -> Results.ok("success"))
.build();
// #test-with-addCSRFToken
Http.RequestBuilder request = new Http.RequestBuilder().method(POST).uri("/xx/Kiwi");
request = CSRFTokenHelper.addCSRFToken(request);
// #test-with-addCSRFToken
Helpers.routeAndCall(app, router, request, 10_000 /* 10 seconds */);
}
}
|
FiltersTest
|
java
|
micronaut-projects__micronaut-core
|
discovery-core/src/main/java/io/micronaut/discovery/cloud/digitalocean/DigitalOceanMetadataKeys.java
|
{
"start": 663,
"end": 758
}
|
enum ____ Digital Ocean metadata.
*
* @author Alvaro Sanchez-Mariscal
* @since 1.1
*/
public
|
of
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/engine/VersionValue.java
|
{
"start": 709,
"end": 2489
}
|
class ____ implements Accountable {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(VersionValue.class);
/** the version of the document. used for versioned indexed operations and as a BWC layer, where no seq# are set yet */
final long version;
/** the seq number of the operation that last changed the associated uuid */
final long seqNo;
/** the term of the operation that last changed the associated uuid */
final long term;
VersionValue(long version, long seqNo, long term) {
this.version = version;
this.seqNo = seqNo;
this.term = term;
}
public boolean isDelete() {
return false;
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
VersionValue that = (VersionValue) o;
if (version != that.version) return false;
if (seqNo != that.seqNo) return false;
return term == that.term;
}
@Override
public int hashCode() {
int result = (int) (version ^ (version >>> 32));
result = 31 * result + (int) (seqNo ^ (seqNo >>> 32));
result = 31 * result + (int) (term ^ (term >>> 32));
return result;
}
@Override
public String toString() {
return "VersionValue{version=" + version + ", seqNo=" + seqNo + ", term=" + term + '}';
}
/**
* Returns the translog location for this version value or null. This is optional and might not be tracked all the time.
*/
@Nullable
public Translog.Location getLocation() {
return null;
}
}
|
VersionValue
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_1124/Issue1124Mapper.java
|
{
"start": 1122,
"end": 1265
}
|
class ____ {
}
@Mapping(target = "id", source = "entity.id")
DTO map(Entity entity, @Context MappingContext context);
}
|
MappingContext
|
java
|
apache__camel
|
components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/model/complex/generateheader/Order.java
|
{
"start": 1179,
"end": 3338
}
|
class ____ {
@DataField(pos = 1, columnName = "Order Nr")
private int orderNr;
@Link
private Client client;
@Link
private Security security;
@DataField(pos = 7, columnName = "Order Type")
private String orderType;
@DataField(name = "Name", pos = 8, columnName = "Instrument Type")
private String instrumentType;
@DataField(pos = 9, precision = 2)
private BigDecimal amount;
@DataField(pos = 10)
private String currency;
@DataField(pos = 11, pattern = "dd-MM-yyyy", columnName = "Order Date")
private Date orderDate;
public Client getClient() {
return client;
}
public void setClient(Client client) {
this.client = client;
}
public Security getSecurity() {
return security;
}
public void setSecurity(Security security) {
this.security = security;
}
public int getOrderNr() {
return orderNr;
}
public void setOrderNr(int orderNr) {
this.orderNr = orderNr;
}
public String getOrderType() {
return orderType;
}
public void setOrderType(String orderType) {
this.orderType = orderType;
}
public String getInstrumentType() {
return instrumentType;
}
public void setInstrumentType(String instrumentType) {
this.instrumentType = instrumentType;
}
public BigDecimal getAmount() {
return amount;
}
public void setAmount(BigDecimal amount) {
this.amount = amount;
}
public String getCurrency() {
return currency;
}
public void setCurrency(String currency) {
this.currency = currency;
}
public Date getOrderDate() {
return orderDate;
}
public void setOrderDate(Date orderDate) {
this.orderDate = orderDate;
}
@Override
public String toString() {
return "Model : " + Order.class.getName() + " : " + this.orderNr + ", " + this.orderType + ", "
+ String.valueOf(this.amount) + ", " + ", " + this.instrumentType + ", "
+ this.currency + ", " + String.valueOf(this.orderDate);
}
}
|
Order
|
java
|
quarkusio__quarkus
|
integration-tests/vertx-web-jackson/src/test/java/com/example/reactivejackson/SimpleEndpointTest.java
|
{
"start": 346,
"end": 654
}
|
class ____ {
@Test
public void ensure_there_is_no_null_attribute() {
when().get("person")
.then()
.statusCode(HttpStatus.SC_OK)
.body(
"name", is("Foo"),
"size()", is(1));
}
}
|
SimpleEndpointTest
|
java
|
spring-projects__spring-framework
|
spring-aop/src/main/java/org/springframework/aop/target/AbstractPoolingTargetSource.java
|
{
"start": 1103,
"end": 1335
}
|
class ____ pooling {@link org.springframework.aop.TargetSource}
* implementations which maintain a pool of target instances, acquiring and
* releasing a target object from the pool for each method invocation.
* This abstract base
|
for
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/fetchstrategyhelper/FetchStrategyHelperTest.java
|
{
"start": 1254,
"end": 8443
}
|
class ____ {
@Test
public void testManyToOneDefaultFetch(SessionFactoryScope factoryScope) {
final SessionFactoryImplementor sessionFactory = factoryScope.getSessionFactory();
final AssociationType associationType = determineAssociationType( AnEntity.class, "otherEntityDefault", sessionFactory );
final org.hibernate.FetchMode fetchMode = determineFetchMode( AnEntity.class, "otherEntityDefault", sessionFactory );
Assertions.assertSame( org.hibernate.FetchMode.JOIN, fetchMode );
final FetchStyle fetchStyle = FetchOptionsHelper.determineFetchStyleByMetadata(
fetchMode,
associationType,
sessionFactory
);
Assertions.assertSame( FetchStyle.JOIN, fetchStyle );
final FetchTiming fetchTiming = FetchOptionsHelper.determineFetchTiming(
fetchStyle,
associationType,
sessionFactory
);
Assertions.assertSame( FetchTiming.IMMEDIATE, fetchTiming );
}
@Test
public void testManyToOneJoinFetch(SessionFactoryScope factoryScope) {
final SessionFactoryImplementor sessionFactory = factoryScope.getSessionFactory();
final AssociationType associationType = determineAssociationType( AnEntity.class, "otherEntityJoin", sessionFactory );
final org.hibernate.FetchMode fetchMode = determineFetchMode( AnEntity.class, "otherEntityJoin", sessionFactory );
Assertions.assertSame( org.hibernate.FetchMode.JOIN, fetchMode );
final FetchStyle fetchStyle = FetchOptionsHelper.determineFetchStyleByMetadata(
fetchMode,
associationType,
sessionFactory
);
Assertions.assertSame( FetchStyle.JOIN, fetchStyle );
final FetchTiming fetchTiming = FetchOptionsHelper.determineFetchTiming(
fetchStyle,
associationType,
sessionFactory
);
Assertions.assertSame( FetchTiming.IMMEDIATE, fetchTiming );
}
@Test
public void testManyToOneSelectFetch(SessionFactoryScope factoryScope) {
final SessionFactoryImplementor sessionFactory = factoryScope.getSessionFactory();
final AssociationType associationType = determineAssociationType( AnEntity.class, "otherEntitySelect", sessionFactory );
final org.hibernate.FetchMode fetchMode = determineFetchMode( AnEntity.class, "otherEntitySelect", sessionFactory );
Assertions.assertSame( org.hibernate.FetchMode.SELECT, fetchMode );
final FetchStyle fetchStyle = FetchOptionsHelper.determineFetchStyleByMetadata(
fetchMode,
associationType,
sessionFactory
);
Assertions.assertSame( FetchStyle.SELECT, fetchStyle );
final FetchTiming fetchTiming = FetchOptionsHelper.determineFetchTiming(
fetchStyle,
associationType,
sessionFactory
);
Assertions.assertSame( FetchTiming.DELAYED, fetchTiming );
}
@Test
public void testCollectionDefaultFetch(SessionFactoryScope factoryScope) {
final SessionFactoryImplementor sessionFactory = factoryScope.getSessionFactory();
final AssociationType associationType = determineAssociationType( AnEntity.class, "colorsDefault", sessionFactory );
final org.hibernate.FetchMode fetchMode = determineFetchMode( AnEntity.class, "colorsDefault", sessionFactory );
Assertions.assertSame( org.hibernate.FetchMode.SELECT, fetchMode );
final FetchStyle fetchStyle = FetchOptionsHelper.determineFetchStyleByMetadata(
fetchMode,
associationType,
sessionFactory
);
Assertions.assertSame( FetchStyle.SELECT, fetchStyle );
final FetchTiming fetchTiming = FetchOptionsHelper.determineFetchTiming(
fetchStyle,
associationType,
sessionFactory
);
Assertions.assertSame( FetchTiming.DELAYED, fetchTiming );
}
@Test
public void testCollectionJoinFetch(SessionFactoryScope factoryScope) {
final SessionFactoryImplementor sessionFactory = factoryScope.getSessionFactory();
final AssociationType associationType = determineAssociationType( AnEntity.class, "colorsJoin", sessionFactory );
final org.hibernate.FetchMode fetchMode = determineFetchMode( AnEntity.class, "colorsJoin", sessionFactory );
Assertions.assertSame( org.hibernate.FetchMode.JOIN, fetchMode );
final FetchStyle fetchStyle = FetchOptionsHelper.determineFetchStyleByMetadata(
fetchMode,
associationType,
sessionFactory
);
Assertions.assertSame( FetchStyle.JOIN, fetchStyle );
final FetchTiming fetchTiming = FetchOptionsHelper.determineFetchTiming(
fetchStyle,
associationType,
sessionFactory
);
Assertions.assertSame( FetchTiming.IMMEDIATE, fetchTiming );
}
@Test
public void testCollectionSelectFetch(SessionFactoryScope factoryScope) {
final SessionFactoryImplementor sessionFactory = factoryScope.getSessionFactory();
final AssociationType associationType = determineAssociationType( AnEntity.class, "colorsSelect", sessionFactory );
final org.hibernate.FetchMode fetchMode = determineFetchMode( AnEntity.class, "colorsSelect", sessionFactory );
Assertions.assertSame( org.hibernate.FetchMode.SELECT, fetchMode );
final FetchStyle fetchStyle = FetchOptionsHelper.determineFetchStyleByMetadata(
fetchMode,
associationType,
sessionFactory
);
Assertions.assertSame( FetchStyle.SELECT, fetchStyle );
final FetchTiming fetchTiming = FetchOptionsHelper.determineFetchTiming(
fetchStyle,
associationType,
sessionFactory
);
Assertions.assertSame( FetchTiming.DELAYED, fetchTiming );
}
@Test
public void testCollectionSubselectFetch(SessionFactoryScope factoryScope) {
final SessionFactoryImplementor sessionFactory = factoryScope.getSessionFactory();
final AssociationType associationType = determineAssociationType( AnEntity.class, "colorsSubselect", sessionFactory );
final org.hibernate.FetchMode fetchMode = determineFetchMode( AnEntity.class, "colorsSubselect", sessionFactory );
Assertions.assertSame( org.hibernate.FetchMode.SELECT, fetchMode );
final FetchStyle fetchStyle = FetchOptionsHelper.determineFetchStyleByMetadata(
fetchMode,
associationType,
sessionFactory
);
Assertions.assertSame( FetchStyle.SUBSELECT, fetchStyle );
final FetchTiming fetchTiming = FetchOptionsHelper.determineFetchTiming(
fetchStyle,
associationType,
sessionFactory
);
Assertions.assertSame( FetchTiming.DELAYED, fetchTiming );
}
private org.hibernate.FetchMode determineFetchMode(
@SuppressWarnings("SameParameterValue") Class<?> entityClass,
String path,
SessionFactoryImplementor sessionFactory) {
AbstractEntityPersister entityPersister = (AbstractEntityPersister)
sessionFactory.getMappingMetamodel().getEntityDescriptor(entityClass.getName());
//noinspection removal
int index = entityPersister.getPropertyIndex( path );
return entityPersister.getFetchMode( index );
}
private AssociationType determineAssociationType(
@SuppressWarnings("SameParameterValue") Class<?> entityClass,
String path,
SessionFactoryImplementor sessionFactory) {
AbstractEntityPersister entityPersister = (AbstractEntityPersister)
sessionFactory.getMappingMetamodel().getEntityDescriptor(entityClass.getName());
//noinspection removal
int index = entityPersister.getPropertyIndex( path );
return (AssociationType) entityPersister.getSubclassPropertyType( index );
}
@jakarta.persistence.Entity
@Table(name="entity")
public static
|
FetchStrategyHelperTest
|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-api/src/main/java/org/apache/dubbo/remoting/api/ProtocolDetector.java
|
{
"start": 1095,
"end": 2269
}
|
class ____ {
private final Flag flag;
private final Map<String, String> detectContext = new HashMap<>(4);
private Result(Flag flag) {
this.flag = flag;
}
public void setAttribute(String key, String value) {
this.detectContext.put(key, value);
}
public String getAttribute(String key) {
return this.detectContext.get(key);
}
public void removeAttribute(String key) {
this.detectContext.remove(key);
}
public Flag flag() {
return flag;
}
public static Result recognized() {
return new Result(Flag.RECOGNIZED);
}
public static Result unrecognized() {
return new Result(Flag.UNRECOGNIZED);
}
public static Result needMoreData() {
return new Result(Flag.NEED_MORE_DATA);
}
@Override
public int hashCode() {
return flag.hashCode();
}
@Override
public boolean equals(Object obj) {
return obj instanceof Result && flag == ((Result) obj).flag;
}
}
|
Result
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/settings/Setting.java
|
{
"start": 96274,
"end": 97005
}
|
class ____ extends SimpleKey {
public GroupKey(String key) {
super(key);
if (key.endsWith(".") == false) {
throw new IllegalArgumentException("key must end with a '.'");
}
}
@Override
public boolean match(String toTest) {
return toTest != null && toTest.startsWith(key);
}
@Override
public boolean exists(Set<String> keys, Set<String> exclusions) {
if (exclusions.isEmpty()) {
return keys.stream().anyMatch(this::match);
}
return keys.stream().filter(Predicate.not(exclusions::contains)).anyMatch(this::match);
}
}
public static final
|
GroupKey
|
java
|
quarkusio__quarkus
|
extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unused/ArcContainerSupplierLookupProblemDetectedTest.java
|
{
"start": 1633,
"end": 2056
}
|
class ____.quarkus.arc.test.unused.ArcContainerSupplierLookupProblemDetectedTest$Alpha"),
message);
});
@Test
public void testWarning() {
// Note that the warning is only displayed once, subsequent calls use a cached result
assertNull(Arc.container().beanInstanceSupplier(Alpha.class));
}
// unused bean, will be removed
@ApplicationScoped
static
|
io
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationIT.java
|
{
"start": 55549,
"end": 60749
}
|
class ____ lie within [0, 1] interval.
classProbabilities.forEach(p -> assertThat(p, allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(1.0))));
// Assert that the top classes are listed in the order of decreasing scores.
double prevScore = classScores.get(0);
for (int i = 1; i < classScores.size(); ++i) {
double score = classScores.get(i);
assertThat("class " + i, score, lessThanOrEqualTo(prevScore));
}
}
private <T> void assertEvaluation(String dependentVariable, List<T> dependentVariableValues, String predictedClassField) {
List<String> dependentVariableValuesAsStrings = dependentVariableValues.stream().map(String::valueOf).collect(toList());
EvaluateDataFrameAction.Response evaluateDataFrameResponse = evaluateDataFrame(
destIndex,
new org.elasticsearch.xpack.core.ml.dataframe.evaluation.classification.Classification(
dependentVariable,
predictedClassField,
null,
Arrays.asList(
new Accuracy(),
new AucRoc(true, dependentVariableValues.get(0).toString()),
new MulticlassConfusionMatrix(),
new Precision(),
new Recall()
)
)
);
assertThat(evaluateDataFrameResponse.getEvaluationName(), equalTo(Classification.NAME.getPreferredName()));
assertThat(evaluateDataFrameResponse.getMetrics(), hasSize(5));
{ // Accuracy
Accuracy.Result accuracyResult = (Accuracy.Result) evaluateDataFrameResponse.getMetrics().get(0);
assertThat(accuracyResult.getMetricName(), equalTo(Accuracy.NAME.getPreferredName()));
for (PerClassSingleValue klass : accuracyResult.getClasses()) {
assertThat(klass.getClassName(), is(in(dependentVariableValuesAsStrings)));
assertThat(klass.getValue(), allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(1.0)));
}
}
{ // AucRoc
AucRoc.Result aucRocResult = (AucRoc.Result) evaluateDataFrameResponse.getMetrics().get(1);
assertThat(aucRocResult.getMetricName(), equalTo(AucRoc.NAME.getPreferredName()));
assertThat(aucRocResult.getValue(), allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(1.0)));
assertThat(aucRocResult.getCurve(), hasSize(greaterThan(0)));
}
{ // MulticlassConfusionMatrix
MulticlassConfusionMatrix.Result confusionMatrixResult = (MulticlassConfusionMatrix.Result) evaluateDataFrameResponse
.getMetrics()
.get(2);
assertThat(confusionMatrixResult.getMetricName(), equalTo(MulticlassConfusionMatrix.NAME.getPreferredName()));
List<MulticlassConfusionMatrix.ActualClass> actualClasses = confusionMatrixResult.getConfusionMatrix();
assertThat(
actualClasses.stream().map(MulticlassConfusionMatrix.ActualClass::getActualClass).collect(toList()),
equalTo(dependentVariableValuesAsStrings)
);
for (MulticlassConfusionMatrix.ActualClass actualClass : actualClasses) {
assertThat(actualClass.getOtherPredictedClassDocCount(), equalTo(0L));
assertThat(
actualClass.getPredictedClasses()
.stream()
.map(MulticlassConfusionMatrix.PredictedClass::getPredictedClass)
.collect(toList()),
equalTo(dependentVariableValuesAsStrings)
);
}
assertThat(confusionMatrixResult.getOtherActualClassCount(), equalTo(0L));
}
{ // Precision
Precision.Result precisionResult = (Precision.Result) evaluateDataFrameResponse.getMetrics().get(3);
assertThat(precisionResult.getMetricName(), equalTo(Precision.NAME.getPreferredName()));
for (PerClassSingleValue klass : precisionResult.getClasses()) {
assertThat(klass.getClassName(), is(in(dependentVariableValuesAsStrings)));
assertThat(klass.getValue(), allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(1.0)));
}
}
{ // Recall
Recall.Result recallResult = (Recall.Result) evaluateDataFrameResponse.getMetrics().get(4);
assertThat(recallResult.getMetricName(), equalTo(Recall.NAME.getPreferredName()));
for (PerClassSingleValue klass : recallResult.getClasses()) {
assertThat(klass.getClassName(), is(in(dependentVariableValuesAsStrings)));
assertThat(klass.getValue(), allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(1.0)));
}
}
}
private String stateDocId() {
return jobId + "_classification_state#1";
}
private String expectedDestIndexAuditMessage() {
return (analysisUsesExistingDestIndex ? "Using existing" : "Creating") + " destination index [" + destIndex + "]";
}
@Override
boolean supportsInference() {
return true;
}
}
|
probabilities
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassPostProcessorTests.java
|
{
"start": 63057,
"end": 63207
}
|
class ____ {
@Bean public ExtendedAgainFoo foo() {
return new ExtendedAgainFoo();
}
}
@Configuration
static
|
OverridingAgainSingletonBeanConfig
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/doubles/Doubles_assertIsNaN_Test.java
|
{
"start": 1186,
"end": 2288
}
|
class ____ extends DoublesBaseTest {
@Test
void should_succeed_since_actual_is_equal_to_NaN() {
doubles.assertIsNaN(someInfo(), Double.NaN);
}
@Test
void should_fail_since_actual_is_not_equal_to_NaN() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> doubles.assertIsNaN(someInfo(), 6d))
.withMessage(shouldBeEqualMessage("6.0", "NaN"));
}
@Test
void should_succeed_since_actual_is_equal_to_NaN_whatever_custom_comparison_strategy_is() {
doublesWithAbsValueComparisonStrategy.assertIsNaN(someInfo(), Double.NaN);
}
@Test
void should_fail_since_actual_is_not_equal_to_NaN_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> doublesWithAbsValueComparisonStrategy.assertIsNaN(someInfo(),
6d))
.withMessage(shouldBeEqualMessage("6.0", "NaN"));
}
}
|
Doubles_assertIsNaN_Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/embeddables/EmbeddableAndMappedSuperClassWithGenericsTest.java
|
{
"start": 4114,
"end": 4369
}
|
class ____ extends Book<String> {
@Id
private Long id;
public PopularBook() {
}
public PopularBook(Long id, Edition edition, String code) {
super( edition, code );
this.id = id;
}
}
@Entity(name = "RareBook")
public static
|
PopularBook
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/test/codec/JacksonCodec.java
|
{
"start": 266,
"end": 2034
}
|
class ____ implements Codec {
private ObjectMapper mapper = new ObjectMapper();
public String getName() {
return "jackson ";
}
public final <T> T decodeObject(String text, Class<T> clazz) {
try {
return mapper.readValue(text, clazz);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
public <T> T decodeObject(byte[] input, Class<T> clazz) throws Exception {
try {
return mapper.readValue(input, clazz);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
public <T> Collection<T> decodeArray(String text, Class<T> clazz) throws Exception {
try {
return (Collection<T>) mapper.readValue(text, new TypeReference<T>() {
});
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
public final Object decodeObject(String text) {
try {
return (ObjectNode) mapper.readTree(text);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
public Object decode(String text) {
try {
return mapper.readTree(text);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
public String encode(Object object) throws Exception {
return mapper.writeValueAsString(object);
}
public byte[] encodeToBytes(Object object) throws Exception {
return mapper.writeValueAsBytes(object);
}
public void encode(OutputStream out, Object object) throws Exception {
out.write(encodeToBytes(object));
}
}
|
JacksonCodec
|
java
|
quarkusio__quarkus
|
integration-tests/maven/src/test/resources-filtered/projects/test-flaky-test-multiple-profiles/src/test/java/org/acme/FlakyHelloResourceWithSomeProfileTest.java
|
{
"start": 1232,
"end": 1333
}
|
class ____ implements QuarkusTestProfile {
public SomeProfile() {
}
}
}
|
SomeProfile
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/bug/Issue1063_date.java
|
{
"start": 1013,
"end": 1262
}
|
class ____ {
private Date timestamp = null;
public Date getTimestamp() {
return timestamp;
}
public void setTimestamp(Date timestamp) {
this.timestamp = timestamp;
}
}
}
|
TimestampBean
|
java
|
alibaba__nacos
|
core/src/main/java/com/alibaba/nacos/core/remote/LongConnectionMetricsCollector.java
|
{
"start": 872,
"end": 1558
}
|
class ____ implements ConnectionMetricsCollector {
@Override
public String getName() {
return "long_connection";
}
@Override
public int getTotalCount() {
return ApplicationUtils.getBean(ConnectionManager.class).currentClientsCount();
}
@Override
public int getCountForIp(String ip) {
ConnectionManager connectionManager = ApplicationUtils.getBean(ConnectionManager.class);
if (connectionManager.getConnectionForClientIp().containsKey(ip)) {
return connectionManager.getConnectionForClientIp().get(ip).intValue();
} else {
return 0;
}
}
}
|
LongConnectionMetricsCollector
|
java
|
apache__camel
|
components/camel-micrometer/src/test/java/org/apache/camel/component/micrometer/routepolicy/ZMicrometerRoutePolicyExcludePatternTest.java
|
{
"start": 1353,
"end": 3021
}
|
class ____ extends AbstractMicrometerRoutePolicyTest {
@Override
protected MicrometerRoutePolicyFactory createMicrometerRoutePolicyFactory() {
MicrometerRoutePolicyFactory factory = super.createMicrometerRoutePolicyFactory();
factory.getPolicyConfiguration().setContextEnabled(false);
factory.getPolicyConfiguration().setRouteEnabled(true);
factory.getPolicyConfiguration().setExcludePattern("bar");
return factory;
}
@Test
public void testMetricsRoutePolicy() throws Exception {
int count = 10;
getMockEndpoint("mock:foo").expectedMessageCount(count);
getMockEndpoint("mock:bar").expectedMessageCount(count);
for (int i = 0; i < count; i++) {
template.sendBody("direct:foo", "Hello World");
}
MockEndpoint.assertIsSatisfied(context);
// there should be 6 metrics per route (only 1 route as bar is excluded)
// additionally one for App info gauge
List<Meter> meters = meterRegistry.getMeters();
assertEquals(7, meters.size());
meters.forEach(meter -> assertTrue(meter instanceof Timer || meter instanceof Counter || meter instanceof Gauge));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:foo").routeId("foo")
.to("direct:bar")
.to("mock:foo");
from("direct:bar").routeId("bar")
.to("mock:bar");
}
};
}
}
|
ZMicrometerRoutePolicyExcludePatternTest
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-multiple/src/main/java/org/apache/dubbo/registry/multiple/MultipleServiceDiscovery.java
|
{
"start": 9706,
"end": 10482
}
|
class ____ extends ServiceInstancesChangedListener {
private final MultiServiceInstancesChangedListener multiListener;
volatile ServiceInstancesChangedEvent event;
public SingleServiceInstancesChangedListener(
Set<String> serviceNames,
ServiceDiscovery serviceDiscovery,
MultiServiceInstancesChangedListener multiListener) {
super(serviceNames, serviceDiscovery);
this.multiListener = multiListener;
}
@Override
public void onEvent(ServiceInstancesChangedEvent event) {
this.event = event;
if (multiListener != null) {
multiListener.onEvent(event);
}
}
}
}
|
SingleServiceInstancesChangedListener
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/context/annotation/primary/Green.java
|
{
"start": 915,
"end": 1042
}
|
class ____ implements ColorPicker {
@Override
public String color() {
return "green";
}
}
//end::clazz[]
|
Green
|
java
|
quarkusio__quarkus
|
extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/DisabledEndpointTestCase.java
|
{
"start": 1921,
"end": 2181
}
|
class ____ {
@Path("/hello5")
@GET
public String hello5() {
return null;
}
}
@EndpointDisabled(name = "xxx", disableIfMissing = true, stringValue = "xxx")
@Path("/")
public static
|
DisabledOtherEndpoint
|
java
|
apache__camel
|
components/camel-jetty/src/test/java/org/apache/camel/component/jetty/HttpCharacterEncodingTest.java
|
{
"start": 2001,
"end": 2586
}
|
class ____ implements Processor {
@Override
public void process(Exchange exchange) {
// just get the body as a string
String body = exchange.getIn().getBody(String.class);
// for unit testing make sure we got right message
assertEquals("Hello World Thai Elephant \u0E08", body);
// send a html response
exchange.getMessage().setHeader("Content-Type", "text/html; charset=utf-8");
exchange.getMessage().setBody("Response message is Thai Elephant \u0E08");
}
}
}
|
MyBookService
|
java
|
apache__camel
|
components/camel-pdf/src/main/java/org/apache/camel/component/pdf/text/DefaultWriteStrategy.java
|
{
"start": 1350,
"end": 3609
}
|
class ____ implements WriteStrategy {
private final PdfConfiguration pdfConfiguration;
public DefaultWriteStrategy(PdfConfiguration pdfConfiguration) {
this.pdfConfiguration = pdfConfiguration;
}
@Override
public PDDocument write(Collection<String> lines, PDDocument document) throws IOException {
PDPage page = new PDPage(pdfConfiguration.getPageSize());
document.addPage(page);
float x = pdfConfiguration.getMarginLeft();
float y = page.getMediaBox().getHeight() - pdfConfiguration.getMarginTop();
float averageFontHeight
= PdfUtils.getAverageFontHeight(new PDType1Font(Standard14Fonts.FontName.valueOf(pdfConfiguration.getFont())),
pdfConfiguration.getFontSize());
float lineSpacing = averageFontHeight * 2;
PDPageContentStream contentStream = initializeContentStream(document, page);
for (String line : lines) {
writeLine(x, y, line, contentStream);
y -= lineSpacing;
if (goToNextPage(y)) {
contentStream.close();
page = new PDPage(pdfConfiguration.getPageSize());
document.addPage(page);
contentStream = initializeContentStream(document, page);
y = page.getMediaBox().getHeight() - pdfConfiguration.getMarginTop();
}
}
contentStream.close();
return document;
}
private boolean goToNextPage(float y) {
return y < pdfConfiguration.getMarginBottom();
}
private void writeLine(float x, float y, String currentLine, PDPageContentStream contentStream) throws IOException {
contentStream.beginText();
contentStream.newLineAtOffset(x, y);
contentStream.showText(currentLine);
contentStream.endText();
}
private PDPageContentStream initializeContentStream(PDDocument document, PDPage page) throws IOException {
PDPageContentStream contentStream = new PDPageContentStream(document, page);
contentStream.setFont(new PDType1Font(Standard14Fonts.FontName.valueOf(pdfConfiguration.getFont())),
pdfConfiguration.getFontSize());
return contentStream;
}
}
|
DefaultWriteStrategy
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/ForwardingNavigableSet.java
|
{
"start": 2400,
"end": 6279
}
|
class ____<E extends @Nullable Object>
extends ForwardingSortedSet<E> implements NavigableSet<E> {
/** Constructor for use by subclasses. */
protected ForwardingNavigableSet() {}
@Override
protected abstract NavigableSet<E> delegate();
@Override
public @Nullable E lower(@ParametricNullness E e) {
return delegate().lower(e);
}
/**
* A sensible definition of {@link #lower} in terms of the {@code descendingIterator} method of
* {@link #headSet(Object, boolean)}. If you override {@link #headSet(Object, boolean)}, you may
* wish to override {@link #lower} to forward to this implementation.
*/
protected @Nullable E standardLower(@ParametricNullness E e) {
return Iterators.getNext(headSet(e, false).descendingIterator(), null);
}
@Override
public @Nullable E floor(@ParametricNullness E e) {
return delegate().floor(e);
}
/**
* A sensible definition of {@link #floor} in terms of the {@code descendingIterator} method of
* {@link #headSet(Object, boolean)}. If you override {@link #headSet(Object, boolean)}, you may
* wish to override {@link #floor} to forward to this implementation.
*/
protected @Nullable E standardFloor(@ParametricNullness E e) {
return Iterators.getNext(headSet(e, true).descendingIterator(), null);
}
@Override
public @Nullable E ceiling(@ParametricNullness E e) {
return delegate().ceiling(e);
}
/**
* A sensible definition of {@link #ceiling} in terms of the {@code iterator} method of {@link
* #tailSet(Object, boolean)}. If you override {@link #tailSet(Object, boolean)}, you may wish to
* override {@link #ceiling} to forward to this implementation.
*/
protected @Nullable E standardCeiling(@ParametricNullness E e) {
return Iterators.getNext(tailSet(e, true).iterator(), null);
}
@Override
public @Nullable E higher(@ParametricNullness E e) {
return delegate().higher(e);
}
/**
* A sensible definition of {@link #higher} in terms of the {@code iterator} method of {@link
* #tailSet(Object, boolean)}. If you override {@link #tailSet(Object, boolean)}, you may wish to
* override {@link #higher} to forward to this implementation.
*/
protected @Nullable E standardHigher(@ParametricNullness E e) {
return Iterators.getNext(tailSet(e, false).iterator(), null);
}
@Override
public @Nullable E pollFirst() {
return delegate().pollFirst();
}
/**
* A sensible definition of {@link #pollFirst} in terms of the {@code iterator} method. If you
* override {@link #iterator} you may wish to override {@link #pollFirst} to forward to this
* implementation.
*/
protected @Nullable E standardPollFirst() {
return Iterators.pollNext(iterator());
}
@Override
public @Nullable E pollLast() {
return delegate().pollLast();
}
/**
* A sensible definition of {@link #pollLast} in terms of the {@code descendingIterator} method.
* If you override {@link #descendingIterator} you may wish to override {@link #pollLast} to
* forward to this implementation.
*/
protected @Nullable E standardPollLast() {
return Iterators.pollNext(descendingIterator());
}
@ParametricNullness
protected E standardFirst() {
return iterator().next();
}
@ParametricNullness
protected E standardLast() {
return descendingIterator().next();
}
@Override
public NavigableSet<E> descendingSet() {
return delegate().descendingSet();
}
/**
* A sensible implementation of {@link NavigableSet#descendingSet} in terms of the other methods
* of {@link NavigableSet}, notably including {@link NavigableSet#descendingIterator}.
*
* <p>In many cases, you may wish to override {@link ForwardingNavigableSet#descendingSet} to
* forward to this implementation or a subclass thereof.
*
* @since 12.0
*/
protected
|
ForwardingNavigableSet
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/domain/animal/Animal.java
|
{
"start": 594,
"end": 2359
}
|
class ____ {
private Long id;
private float bodyWeight;
private Set offspring;
private Animal mother;
private Animal father;
private String description;
private Zoo zoo;
private String serialNumber;
public Animal() {
}
public Animal(String description, float bodyWeight) {
this.description = description;
this.bodyWeight = bodyWeight;
}
@Id
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Column( name = "body_weight" )
public float getBodyWeight() {
return bodyWeight;
}
public void setBodyWeight(float bodyWeight) {
this.bodyWeight = bodyWeight;
}
public String getSerialNumber() {
return serialNumber;
}
public void setSerialNumber(String serialNumber) {
this.serialNumber = serialNumber;
}
@ManyToOne
@JoinColumn( name = "zoo_fk" )
public Zoo getZoo() {
return zoo;
}
public void setZoo(Zoo zoo) {
this.zoo = zoo;
}
@ManyToOne
@JoinColumn( name = "mother_fk" )
public Animal getMother() {
return mother;
}
public void setMother(Animal mother) {
this.mother = mother;
}
@ManyToOne
@JoinColumn( name = "father_fk" )
public Animal getFather() {
return father;
}
public void setFather(Animal father) {
this.father = father;
}
@OneToMany
@JoinColumn( name = "mother_fk" )
@OrderBy( "father_fk" )
public Set<Human> getOffspring() {
return offspring;
}
public void addOffspring(Animal offspring) {
if ( this.offspring == null ) {
this.offspring = new HashSet();
}
this.offspring.add( offspring );
}
public void setOffspring(Set offspring) {
this.offspring = offspring;
}
}
|
Animal
|
java
|
apache__camel
|
dsl/camel-java-joor-dsl/src/test/java/org/apache/camel/dsl/java/joor/JavaMultiCompileTest.java
|
{
"start": 1084,
"end": 2084
}
|
class ____ {
@Test
public void testMainRoutesCollector() throws Exception {
// will load XML from target/classes when testing
doTestMain(
"routes/MyBar*.java",
null);
}
protected void doTestMain(String includes, String excludes) throws Exception {
Main main = new Main();
main.configure().withRoutesIncludePattern(includes);
main.configure().withRoutesExcludePattern(excludes);
main.start();
CamelContext camelContext = main.getCamelContext();
assertNotNull(camelContext);
assertEquals(1, camelContext.getRoutes().size());
String out = main.getCamelTemplate().requestBody("direct:start", "Jack", String.class);
assertEquals("Jack is at Moes Bar", out);
Object b = main.getCamelContext().getRegistry().lookupByName("myBarEcho");
assertNotNull(b);
assertEquals("MyBarEcho1", b.toString());
main.stop();
}
}
|
JavaMultiCompileTest
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/StringUtilsIsMixedCaseTest.java
|
{
"start": 1409,
"end": 3385
}
|
class ____ {
public static final String LOWER_CASE_LETTERS = "abcdefghijklmnopqrstuvwxyz";
public static final String END_MATCH = "at the enD";
public static final String Middle_MATCH = "at tHe Mid";
public static final String EARLY_MATCH = "At tHe beginning";
public static boolean oldIsMixedCase(final CharSequence cs) {
if (StringUtils.isEmpty(cs) || cs.length() == 1) {
return false;
}
boolean containsUppercase = false;
boolean containsLowercase = false;
final int sz = cs.length();
for (int i = 0; i < sz; i++) {
if (containsUppercase && containsLowercase) {
return true;
}
if (Character.isUpperCase(cs.charAt(i))) {
containsUppercase = true;
} else if (Character.isLowerCase(cs.charAt(i))) {
containsLowercase = true;
}
}
return containsUppercase && containsLowercase;
}
@Benchmark
public boolean newIsMixedCaseBeginningMatch() {
return StringUtils.isMixedCase(EARLY_MATCH);
}
@Benchmark
public boolean newIsMixedCaseEndMatch() {
return StringUtils.isMixedCase(END_MATCH);
}
@Benchmark
public boolean newIsMixedCaseMiddleMatch() {
return StringUtils.isMixedCase(Middle_MATCH);
}
@Benchmark
public boolean newIsMixedCaseNoneMatch() {
return StringUtils.isMixedCase(LOWER_CASE_LETTERS);
}
@Benchmark
public boolean oldIsMixedCaseBeginningMatch() {
return oldIsMixedCase(EARLY_MATCH);
}
@Benchmark
public boolean oldIsMixedCaseEndMatch() {
return oldIsMixedCase(END_MATCH);
}
@Benchmark
public boolean oldIsMixedCaseMiddleMatch() {
return oldIsMixedCase(Middle_MATCH);
}
@Benchmark
public boolean oldIsMixedCaseNoneMatch() {
return oldIsMixedCase(LOWER_CASE_LETTERS);
}
}
|
StringUtilsIsMixedCaseTest
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/cache/UnusedContextsIntegrationTests.java
|
{
"start": 10220,
"end": 10388
}
|
class ____ {
@Test
void test(@Value("${magicKey}") String magicKey) {
assertThat(magicKey).isEqualTo("enigma");
}
}
}
}
}
|
OverridingNestedTestCase2
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/memory/MemoryManagerConcurrentModReleaseTest.java
|
{
"start": 2676,
"end": 3364
}
|
class ____ implements Runnable {
private final ArrayList<MemorySegment> toModify;
private volatile boolean running = true;
private Modifier(ArrayList<MemorySegment> toModify) {
this.toModify = toModify;
}
public void cancel() {
running = false;
}
@Override
public void run() {
while (running) {
try {
MemorySegment seg = toModify.remove(0);
toModify.add(seg);
} catch (IndexOutOfBoundsException e) {
// may happen, just retry
}
}
}
}
private
|
Modifier
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/id/PooledHiLoSequenceIdentifierTest.java
|
{
"start": 1167,
"end": 2282
}
|
class ____ {
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testSequenceIdentifierGenerator(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
for ( int i = 0; i < 5; i++ ) {
session.persist( new SequenceIdentifier() );
}
session.flush();
assertEquals( 5, countInsertedRows( session ) );
insertNewRow( session );
insertNewRow( session );
assertEquals( 7, countInsertedRows( session ) );
List<Number> ids = session.createQuery( "SELECT id FROM sequenceIdentifier" ).list();
assertEquals( 7, ids.size() );
for ( int i = 0; i < 3; i++ ) {
session.persist( new SequenceIdentifier() );
}
session.flush();
assertEquals( 10, countInsertedRows( session ) );
}
);
}
private int countInsertedRows(Session s) {
return ( (Number) s.createNativeQuery( "SELECT COUNT(*) FROM sequenceIdentifier" )
.uniqueResult() ).intValue();
}
@Entity(name = "sequenceIdentifier")
public static
|
PooledHiLoSequenceIdentifierTest
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/AsyncCalcTestPrograms.java
|
{
"start": 14001,
"end": 14566
}
|
class ____ extends AsyncScalarFunction {
private static final int TOTAL_FAILURES = 1;
private final AtomicInteger calls = new AtomicInteger(0);
public void eval(CompletableFuture<Integer> future, Integer c) {
if (c != 2) {
future.complete(c);
return;
}
if (calls.incrementAndGet() > TOTAL_FAILURES) {
future.complete(c);
return;
}
throw new RuntimeException("Failure " + calls.get());
}
}
}
|
TwosFailFunction
|
java
|
quarkusio__quarkus
|
extensions/vertx/deployment/src/test/java/io/quarkus/vertx/CodecRegistrationTest.java
|
{
"start": 6720,
"end": 8628
}
|
class ____ {
private final List<String> address1 = new CopyOnWriteArrayList<>();
private final List<String> address2 = new CopyOnWriteArrayList<>();
public List<String> getAddress1() {
return address1;
}
public List<String> getAddress2() {
return address2;
}
@ConsumeEvent("address-1")
void listenAddress1(String message) {
address1.add(message);
}
@ConsumeEvent("address-2")
CompletionStage<Void> listenAddress2(String message) {
address2.add(message);
return CompletableFuture.completedFuture(null);
}
@ConsumeEvent("address-3")
long listenAddress3(int i) {
return i + 1;
}
@ConsumeEvent("address-4")
CompletionStage<Long> listenAddress4(int i) {
return CompletableFuture.completedFuture((long) (i + 1));
}
List<Object> sink = new CopyOnWriteArrayList<>();
@ConsumeEvent("address-5")
void codecRegistrationBasedOnParam(CustomType1 ct) {
sink.add(ct);
}
@ConsumeEvent("address-6")
void codecRegistrationBasedOnParam(Message<CustomType2> ct) {
sink.add(ct.body());
}
@ConsumeEvent("address-7")
CustomType3 codecRegistrationBasedReturnType(String n) {
return new CustomType3(n);
}
@ConsumeEvent("address-8")
CompletionStage<CustomType4> codecRegistrationBasedReturnTypeAndCS(String n) {
return CompletableFuture.completedFuture(new CustomType4(n));
}
@ConsumeEvent("address-9")
void codecRegistrationBasedOnHeadersParam(MultiMap headers, CustomType5 ct) {
sink.add(ct);
}
public List<Object> getSink() {
return sink;
}
}
static
|
EventBusConsumers
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/bug/Bug_for_yangzhou.java
|
{
"start": 36804,
"end": 39206
}
|
class ____ {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
public
|
Criterion
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/graphs/CacheableEntityGraphTest.java
|
{
"start": 2479,
"end": 2864
}
|
class ____ {
@Id
@GeneratedValue
public int id;
@Version
public long version;
@Enumerated(EnumType.STRING)
@ElementCollection(fetch = FetchType.EAGER)
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public final Set<TagType> types = new LinkedHashSet<>();
public Tag() {
}
public Tag(Set<TagType> types) {
this.types.addAll( types );
}
}
public
|
Tag
|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-http12/src/main/java/org/apache/dubbo/remoting/http12/HttpTransportListener.java
|
{
"start": 852,
"end": 1025
}
|
interface ____<HEADER extends HttpMetadata, MESSAGE extends HttpInputMessage> {
void onMetadata(HEADER metadata);
void onData(MESSAGE message);
}
|
HttpTransportListener
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/ModelTestStep.java
|
{
"start": 2890,
"end": 4585
}
|
class ____ {
final String name;
final List<String> inputSchemaComponents = new ArrayList<>();
final List<String> outputSchemaComponents = new ArrayList<>();
final Map<String, String> options = new HashMap<>();
final Map<Row, List<Row>> data = new HashMap<>();
public Builder(String name) {
this.name = name;
}
public Builder addInputSchema(String... schemaComponents) {
this.inputSchemaComponents.addAll(Arrays.asList(schemaComponents));
return this;
}
public Builder addInputSchema(List<String> schemaComponents) {
this.inputSchemaComponents.addAll(schemaComponents);
return this;
}
public Builder addOutputSchema(String... schemaComponents) {
this.outputSchemaComponents.addAll(Arrays.asList(schemaComponents));
return this;
}
public Builder addOutputSchema(List<String> schemaComponents) {
this.outputSchemaComponents.addAll(schemaComponents);
return this;
}
public Builder addOption(String key, String value) {
this.options.put(key, value);
return this;
}
public Builder addOptions(Map<String, String> options) {
this.options.putAll(options);
return this;
}
public Builder data(Map<Row, List<Row>> data) {
this.data.putAll(data);
return this;
}
public ModelTestStep build() {
return new ModelTestStep(
name, inputSchemaComponents, outputSchemaComponents, options, data);
}
}
}
|
Builder
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/http/code/javaguide/http/JavaActionsComposition.java
|
{
"start": 3436,
"end": 3593
}
|
interface ____ {
String key();
}
// #action-composition-dependency-injection-annotation
// #action-composition-dependency-injection
public
|
WithCache
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/kotlin/KspComponentProcessorTest.java
|
{
"start": 17112,
"end": 18481
}
|
class ____");
CompilerTests.daggerCompiler(componentSrc)
.compile(
subject -> {
subject.hasErrorCount(0);
subject.generatedSource(
CompilerTests.javaSource(
"test/DaggerMyComponent",
"package test;",
"",
"import com.google.errorprone.annotations.CanIgnoreReturnValue;",
"import dagger.internal.DaggerGenerated;",
"import dagger.internal.Preconditions;",
"import javax.annotation.processing.Generated;",
"",
"@DaggerGenerated",
"@Generated(",
" value = \"dagger.internal.codegen.ComponentProcessor\",",
" comments = \"https://dagger.dev\"",
")",
"@SuppressWarnings({",
" \"unchecked\",",
" \"rawtypes\",",
" \"KotlinInternal\",",
" \"KotlinInternalInJava\",",
" \"cast\",",
" \"deprecation\",",
" \"nullness:initialization.field.uninitialized\"",
"})",
"public final
|
Bar
|
java
|
quarkusio__quarkus
|
extensions/jaxb/deployment/src/main/java/io/quarkus/jaxb/deployment/JaxbClassesToBeBoundBuildItem.java
|
{
"start": 275,
"end": 413
}
|
class ____ can be removed via
* {@code quarkus.jaxb.exclude-classes}.
*
* @see FilteredJaxbClassesToBeBoundBuildItem
*/
public final
|
names
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/view/TestHtmlBlock.java
|
{
"start": 1196,
"end": 1365
}
|
class ____ extends HtmlBlock {
@Override
public void render(Block html) {
html.
p("#testid").__("test note").__();
}
}
public static
|
TestBlock
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/Comparisons.java
|
{
"start": 606,
"end": 3696
}
|
class ____ {
private Comparisons() {}
public static Boolean eq(Object l, Object r) {
Integer i = compare(l, r);
return i == null ? null : i.intValue() == 0;
}
public static boolean nulleq(Object l, Object r) {
if (l == null && r == null) {
return true;
}
Integer i = compare(l, r);
return i == null ? false : i.intValue() == 0;
}
static Boolean neq(Object l, Object r) {
Integer i = compare(l, r);
return i == null ? null : i.intValue() != 0;
}
public static Boolean lt(Object l, Object r) {
Integer i = compare(l, r);
return i == null ? null : i.intValue() < 0;
}
static Boolean lte(Object l, Object r) {
Integer i = compare(l, r);
return i == null ? null : i.intValue() <= 0;
}
public static Boolean gt(Object l, Object r) {
Integer i = compare(l, r);
return i == null ? null : i.intValue() > 0;
}
static Boolean gte(Object l, Object r) {
Integer i = compare(l, r);
return i == null ? null : i.intValue() >= 0;
}
static Boolean in(Object l, Set<Object> r) {
return r.contains(l);
}
/**
* Compares two expression arguments (typically Numbers), if possible.
* Otherwise returns null (the arguments are not comparable or at least
* one of them is null).
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
static Integer compare(Object l, Object r) {
if (l == null || r == null) {
return null;
}
// typical number comparison
if (l instanceof Number lN && r instanceof Number rN) {
return compare(lN, rN);
}
// automatic conversion for versions
if (l instanceof Version lV && r instanceof String rStr) {
return lV.compareTo(new Version(rStr));
}
if (l instanceof String lStr && r instanceof Version rV) {
return new Version(lStr).compareTo(rV);
}
if (l instanceof Comparable lC && r instanceof Comparable) {
try {
return Integer.valueOf(lC.compareTo(r));
} catch (ClassCastException cce) {
// when types are not compatible, cce is thrown
// fall back to null
return null;
}
}
return null;
}
private static Integer compare(Number l, Number r) {
if (l instanceof Double || r instanceof Double) {
return Double.compare(l.doubleValue(), r.doubleValue());
}
if (l instanceof Float || r instanceof Float) {
return Float.compare(l.floatValue(), r.floatValue());
}
if (l instanceof BigInteger || r instanceof BigInteger) {
return asBigInteger(l).compareTo(asBigInteger(r));
}
if (l instanceof Long || r instanceof Long) {
return Long.compare(l.longValue(), r.longValue());
}
return Integer.valueOf(Integer.compare(l.intValue(), r.intValue()));
}
}
|
Comparisons
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldBePeriod.java
|
{
"start": 717,
"end": 1210
}
|
class ____ extends BasicErrorMessageFactory {
private static final String EXPECTED_PREFIX = "%nExpecting Period:%n %s%nto be ";
private ShouldBePeriod(Period actual, String metric) {
super(EXPECTED_PREFIX + metric, actual);
}
public static ShouldBePeriod shouldBePositive(Period actual) {
return new ShouldBePeriod(actual, "positive");
}
public static ShouldBePeriod shouldBeNegative(Period actual) {
return new ShouldBePeriod(actual, "negative");
}
}
|
ShouldBePeriod
|
java
|
micronaut-projects__micronaut-core
|
http-netty/src/main/java/io/micronaut/http/netty/channel/DefaultEventLoopGroupRegistry.java
|
{
"start": 2325,
"end": 9542
}
|
class ____ implements EventLoopGroupRegistry {
private static final Logger LOG = LoggerFactory.getLogger(DefaultEventLoopGroupRegistry.class);
private final EventLoopGroupFactory eventLoopGroupFactory;
private final BeanLocator beanLocator;
private final Map<EventLoopGroup, EventLoopGroupConfiguration> eventLoopGroups = new ConcurrentHashMap<>();
private final BeanProvider<LoomCarrierGroup.Factory> loomCarrierGroupFactory;
private final List<TaskQueueInterceptor> taskQueueInterceptors;
/**
* Default constructor.
*
* @param eventLoopGroupFactory The event loop group factory
* @param beanLocator The bean locator
* @param loomCarrierGroupFactory Factory for the loom carrier group
* @param taskQueueInterceptors Task queue interceptors
*/
public DefaultEventLoopGroupRegistry(EventLoopGroupFactory eventLoopGroupFactory, BeanLocator beanLocator, BeanProvider<LoomCarrierGroup.Factory> loomCarrierGroupFactory, List<TaskQueueInterceptor> taskQueueInterceptors) {
this.eventLoopGroupFactory = eventLoopGroupFactory;
this.beanLocator = beanLocator;
this.loomCarrierGroupFactory = loomCarrierGroupFactory;
this.taskQueueInterceptors = taskQueueInterceptors;
}
/**
* Shut down event loop groups according to configuration.
*/
@PreDestroy
void shutdown() {
eventLoopGroups.forEach((eventLoopGroup, configuration) -> {
try {
long quietPeriod = configuration.getShutdownQuietPeriod().toMillis();
long timeout = configuration.getShutdownTimeout().toMillis();
eventLoopGroup.shutdownGracefully(quietPeriod, timeout, TimeUnit.MILLISECONDS);
} catch (Throwable t) {
if (LOG.isWarnEnabled()) {
LOG.warn("Error shutting down EventLoopGroup: {}", t.getMessage(), t);
}
}
});
eventLoopGroups.clear();
}
private EventLoopGroup createGroup(EventLoopGroupConfiguration configuration, String name, Executor executor) {
IoHandlerFactory ioHandlerFactory = eventLoopGroupFactory.createIoHandlerFactory(configuration);
int nThreads = numThreads(configuration);
EventLoopGroup eventLoopGroup;
if (configuration.isLoomCarrier()) {
eventLoopGroup = loomCarrierGroupFactory.get().create(nThreads, executor, ioHandlerFactory);
} else if (taskQueueInterceptors.isEmpty()) {
eventLoopGroup = new MultiThreadIoEventLoopGroup(nThreads, executor, ioHandlerFactory);
} else {
eventLoopGroup = new MultiThreadIoEventLoopGroup(nThreads, executor, ioHandlerFactory) {
@Override
protected IoEventLoop newChild(Executor executor, IoHandlerFactory ioHandlerFactory, Object... args) {
return new SingleThreadIoEventLoop(this, executor, ioHandlerFactory) {
@Override
protected Queue<Runnable> newTaskQueue(int maxPendingTasks) {
Queue<Runnable> tq = super.newTaskQueue(maxPendingTasks);
for (TaskQueueInterceptor taskQueueInterceptor : taskQueueInterceptors) {
tq = taskQueueInterceptor.wrapTaskQueue(name, tq);
}
return tq;
}
};
}
};
}
eventLoopGroups.put(eventLoopGroup, configuration);
return eventLoopGroup;
}
/**
* Constructs an event loop group for each configuration.
*
* @param configuration The configuration
* @return The event loop group
*/
@EachBean(EventLoopGroupConfiguration.class)
@Bean
@BootstrapContextCompatible
protected EventLoopGroup eventLoopGroup(EventLoopGroupConfiguration configuration) {
String executorName = configuration.getExecutorName().orElse(null);
Executor executor;
if (executorName != null) {
executor = beanLocator.findBean(Executor.class, Qualifiers.byName(executorName))
.orElseThrow(() -> new ConfigurationException("No executor service configured for name: " + executorName));
} else {
ThreadFactory threadFactory = beanLocator.findBean(ThreadFactory.class, Qualifiers.byName(configuration.getName()))
.orElseGet(() -> new DefaultThreadFactory(configuration.getName() + "-" + DefaultThreadFactory.toPoolName(NioEventLoopGroup.class)));
if (threadFactory instanceof NettyThreadFactory.EventLoopCustomizableThreadFactory custom) {
threadFactory = custom.customizeForEventLoop();
}
executor = new ThreadPerTaskExecutor(threadFactory);
}
return createGroup(configuration, configuration.getName(), executor);
}
/**
* Constructs an event loop group with default Configuration.
*
* @param threadFactory The default Netty thread factory
* @return The event loop group
*/
@Singleton
@Requires(missingProperty = EventLoopGroupConfiguration.DEFAULT_LOOP)
@Primary
@BootstrapContextCompatible
@Bean(typed = { EventLoopGroup.class })
protected EventLoopGroup defaultEventLoopGroup(@Named(NettyThreadFactory.NAME) ThreadFactory threadFactory) {
if (threadFactory instanceof NettyThreadFactory.EventLoopCustomizableThreadFactory custom) {
threadFactory = custom.customizeForEventLoop();
}
return createGroup(new DefaultEventLoopGroupConfiguration(), EventLoopGroupConfiguration.DEFAULT, new ThreadPerTaskExecutor(threadFactory));
}
@NonNull
@Override
public EventLoopGroup getDefaultEventLoopGroup() {
return beanLocator.getBean(EventLoopGroup.class);
}
@Override
public Optional<EventLoopGroup> getEventLoopGroup(@NonNull String name) {
ArgumentUtils.requireNonNull("name", name);
if (EventLoopGroupConfiguration.DEFAULT.equals(name)) {
return beanLocator.findBean(EventLoopGroup.class);
} else {
return beanLocator.findBean(EventLoopGroup.class, Qualifiers.byName(name));
}
}
@Override
public Optional<EventLoopGroupConfiguration> getEventLoopGroupConfiguration(@NonNull String name) {
ArgumentUtils.requireNonNull("name", name);
return beanLocator.findBean(EventLoopGroupConfiguration.class, Qualifiers.byName(name));
}
/**
* Calculate the number of threads from {@link EventLoopGroupConfiguration#getNumThreads()} and
* {@link EventLoopGroupConfiguration#getThreadCoreRatio()}.
*
* @param configuration The configuration
* @return The actual number of threads to use
*/
public static int numThreads(EventLoopGroupConfiguration configuration) {
int explicit = configuration.getNumThreads();
if (explicit != 0) {
return explicit;
}
return Math.toIntExact(Math.round(configuration.getThreadCoreRatio() * NettyRuntime.availableProcessors()));
}
}
|
DefaultEventLoopGroupRegistry
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/cluster/routing/RoutingFeatures.java
|
{
"start": 647,
"end": 928
}
|
class ____ implements FeatureSpecification {
@Override
public Set<NodeFeature> getFeatures() {
return Set.of();
}
@Override
public Set<NodeFeature> getTestFeatures() {
return Set.of(IndexRouting.LOGSB_ROUTE_ON_SORT_FIELDS);
}
}
|
RoutingFeatures
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/boot/model/IdentifierRelation.java
|
{
"start": 723,
"end": 2786
}
|
class ____ implements AttributeContainer {
private final List<Attribute> attributes;
public IdentifierRelation() {
this.attributes = new ArrayList<>();
}
@Override
public void addAttribute(Attribute attribute) {
this.attributes.add( attribute );
}
public List<Attribute> getAttributesPrefixed(
String prefix,
Iterator<Selectable> iterator,
boolean makeKey,
boolean insertable) {
return getAttributesPrefixed(prefix, ColumnNameIterator.from( iterator ), makeKey, insertable );
}
public List<Attribute> getAttributesPrefixed(
String prefix,
ColumnNameIterator columnNameIterator,
boolean makeKey,
boolean insertable) {
List<Attribute> prefixedAttributes = new ArrayList<>();
for ( Attribute attribute : attributes ) {
Attribute prefixedAttribute = attribute.deepCopy();
String name = prefixedAttribute.getName();
if ( name != null ) {
prefixedAttribute.setName( prefix + prefixedAttribute.getName() );
}
changeNamesInColumns( prefixedAttribute, columnNameIterator );
if ( makeKey ) {
if ( prefixedAttribute instanceof Keyable ){
( (Keyable) prefixedAttribute ).setKey( true );
}
// HHH-11463 when cloning a many-to-one to be a key-many-to-one, the FK attribute
// should be explicitly set to 'none' or added to be 'none' to avoid issues with
// making references to the main schema.
if ( prefixedAttribute instanceof ManyToOneAttribute ) {
final ManyToOneAttribute manyToOne = (ManyToOneAttribute) prefixedAttribute;
manyToOne.setForeignKey( "none" );
}
}
if ( prefixedAttribute instanceof BasicAttribute ) {
( (BasicAttribute) prefixedAttribute ).setInsertable( insertable );
}
prefixedAttributes.add( prefixedAttribute );
}
return prefixedAttributes;
}
private static void changeNamesInColumns(Attribute attribute, ColumnNameIterator columnNameIterator) {
for ( Column column : attribute.getColumns() ) {
if ( column.getName() != null ) {
column.setName( columnNameIterator.next() );
}
}
}
}
|
IdentifierRelation
|
java
|
micronaut-projects__micronaut-core
|
http-server-tck/src/main/java/io/micronaut/http/server/tck/tests/codec/JsonCodecAdditionalType2Test.java
|
{
"start": 2915,
"end": 3715
}
|
class ____ {
@Produces(CUSTOM_MEDIA_TYPE)
@Get("/json-additional-codec")
String index() {
return """
{
"version": "https://jsonfeed.org/version/1",
"title": "My Example Feed",
"home_page_url": "https://example.org/",
"feed_url": "https://example.org/feed.json",
]
}\
""";
}
@Produces(CUSTOM_MEDIA_TYPE)
@Get("/json-additional-codec/pojo")
JsonFeed pojo() {
return new JsonFeed("https://jsonfeed.org/version/1", "My Example Feed", "https://example.org/", "https://example.org/feed.json");
}
}
@Introspected
@ReflectiveAccess
static
|
JsonFeedController
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java
|
{
"start": 433,
"end": 708
}
|
class ____ extends ActionType<GetUsersResponse> {
public static final GetUsersAction INSTANCE = new GetUsersAction();
public static final String NAME = "cluster:admin/xpack/security/user/get";
protected GetUsersAction() {
super(NAME);
}
}
|
GetUsersAction
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java
|
{
"start": 21765,
"end": 24188
}
|
class ____ implements RemoteConnectionInfo.ModeInfo {
final List<String> seedNodes;
final int maxConnectionsPerCluster;
final int numNodesConnected;
public SniffModeInfo(List<String> seedNodes, int maxConnectionsPerCluster, int numNodesConnected) {
this.seedNodes = seedNodes;
this.maxConnectionsPerCluster = maxConnectionsPerCluster;
this.numNodesConnected = numNodesConnected;
}
private SniffModeInfo(StreamInput input) throws IOException {
seedNodes = Arrays.asList(input.readStringArray());
maxConnectionsPerCluster = input.readVInt();
numNodesConnected = input.readVInt();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray("seeds");
for (String address : seedNodes) {
builder.value(address);
}
builder.endArray();
builder.field("num_nodes_connected", numNodesConnected);
builder.field("max_connections_per_cluster", maxConnectionsPerCluster);
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeStringCollection(seedNodes);
out.writeVInt(maxConnectionsPerCluster);
out.writeVInt(numNodesConnected);
}
@Override
public boolean isConnected() {
return numNodesConnected > 0;
}
@Override
public String modeName() {
return "sniff";
}
@Override
public RemoteConnectionStrategy.ConnectionStrategy modeType() {
return RemoteConnectionStrategy.ConnectionStrategy.SNIFF;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SniffModeInfo sniff = (SniffModeInfo) o;
return maxConnectionsPerCluster == sniff.maxConnectionsPerCluster
&& numNodesConnected == sniff.numNodesConnected
&& Objects.equals(seedNodes, sniff.seedNodes);
}
@Override
public int hashCode() {
return Objects.hash(seedNodes, maxConnectionsPerCluster, numNodesConnected);
}
}
}
|
SniffModeInfo
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/jsf/DelegatingNavigationHandlerTests.java
|
{
"start": 2912,
"end": 3440
}
|
class ____ extends DecoratingNavigationHandler {
private String lastFromAction;
private String lastOutcome;
@Override
public void handleNavigation(FacesContext facesContext, @Nullable String fromAction,
@Nullable String outcome, @Nullable NavigationHandler originalNavigationHandler) {
lastFromAction = fromAction;
lastOutcome = outcome;
if (originalNavigationHandler != null) {
originalNavigationHandler.handleNavigation(facesContext, fromAction, outcome);
}
}
}
}
|
TestDecoratingNavigationHandler
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/visitors/IntroductionTestGenVisitor.java
|
{
"start": 766,
"end": 1121
}
|
interface ____ {
String getParentMethod();
}
"""));
} catch (Exception e) {
throw new ProcessingException(element, "Failed to generate a Parent introduction: " + e.getMessage(), e);
}
});
}
}
|
IntroductionTestParent
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/cglib/core/EmitUtils.java
|
{
"start": 1185,
"end": 27212
}
|
class ____ {
private static final Signature CSTRUCT_NULL =
TypeUtils.parseConstructor("");
private static final Signature CSTRUCT_THROWABLE =
TypeUtils.parseConstructor("Throwable");
private static final Signature GET_NAME =
TypeUtils.parseSignature("String getName()");
private static final Signature HASH_CODE =
TypeUtils.parseSignature("int hashCode()");
private static final Signature EQUALS =
TypeUtils.parseSignature("boolean equals(Object)");
private static final Signature STRING_LENGTH =
TypeUtils.parseSignature("int length()");
private static final Signature STRING_CHAR_AT =
TypeUtils.parseSignature("char charAt(int)");
private static final Signature FOR_NAME =
TypeUtils.parseSignature("Class forName(String)");
private static final Signature DOUBLE_TO_LONG_BITS =
TypeUtils.parseSignature("long doubleToLongBits(double)");
private static final Signature FLOAT_TO_INT_BITS =
TypeUtils.parseSignature("int floatToIntBits(float)");
private static final Signature TO_STRING =
TypeUtils.parseSignature("String toString()");
private static final Signature APPEND_STRING =
TypeUtils.parseSignature("StringBuffer append(String)");
private static final Signature APPEND_INT =
TypeUtils.parseSignature("StringBuffer append(int)");
private static final Signature APPEND_DOUBLE =
TypeUtils.parseSignature("StringBuffer append(double)");
private static final Signature APPEND_FLOAT =
TypeUtils.parseSignature("StringBuffer append(float)");
private static final Signature APPEND_CHAR =
TypeUtils.parseSignature("StringBuffer append(char)");
private static final Signature APPEND_LONG =
TypeUtils.parseSignature("StringBuffer append(long)");
private static final Signature APPEND_BOOLEAN =
TypeUtils.parseSignature("StringBuffer append(boolean)");
private static final Signature LENGTH =
TypeUtils.parseSignature("int length()");
private static final Signature SET_LENGTH =
TypeUtils.parseSignature("void setLength(int)");
private static final Signature GET_DECLARED_METHOD =
TypeUtils.parseSignature("java.lang.reflect.Method getDeclaredMethod(String, Class[])");
public static final ArrayDelimiters DEFAULT_DELIMITERS = new ArrayDelimiters("{", ", ", "}");
private EmitUtils() {
}
public static void factory_method(ClassEmitter ce, Signature sig) {
CodeEmitter e = ce.begin_method(Constants.ACC_PUBLIC, sig, null);
e.new_instance_this();
e.dup();
e.load_args();
e.invoke_constructor_this(TypeUtils.parseConstructor(sig.getArgumentTypes()));
e.return_value();
e.end_method();
}
public static void null_constructor(ClassEmitter ce) {
CodeEmitter e = ce.begin_method(Constants.ACC_PUBLIC, CSTRUCT_NULL, null);
e.load_this();
e.super_invoke_constructor();
e.return_value();
e.end_method();
}
/**
* Process an array on the stack. Assumes the top item on the stack
* is an array of the specified type. For each element in the array,
* puts the element on the stack and triggers the callback.
* @param type the type of the array (type.isArray() must be true)
* @param callback the callback triggered for each element
*/
public static void process_array(CodeEmitter e, Type type, ProcessArrayCallback callback) {
Type componentType = TypeUtils.getComponentType(type);
Local array = e.make_local();
Local loopvar = e.make_local(Type.INT_TYPE);
Label loopbody = e.make_label();
Label checkloop = e.make_label();
e.store_local(array);
e.push(0);
e.store_local(loopvar);
e.goTo(checkloop);
e.mark(loopbody);
e.load_local(array);
e.load_local(loopvar);
e.array_load(componentType);
callback.processElement(componentType);
e.iinc(loopvar, 1);
e.mark(checkloop);
e.load_local(loopvar);
e.load_local(array);
e.arraylength();
e.if_icmp(CodeEmitter.LT, loopbody);
}
/**
* Process two arrays on the stack in parallel. Assumes the top two items on the stack
* are arrays of the specified class. The arrays must be the same length. For each pair
* of elements in the arrays, puts the pair on the stack and triggers the callback.
* @param type the type of the arrays (type.isArray() must be true)
* @param callback the callback triggered for each pair of elements
*/
public static void process_arrays(CodeEmitter e, Type type, ProcessArrayCallback callback) {
Type componentType = TypeUtils.getComponentType(type);
Local array1 = e.make_local();
Local array2 = e.make_local();
Local loopvar = e.make_local(Type.INT_TYPE);
Label loopbody = e.make_label();
Label checkloop = e.make_label();
e.store_local(array1);
e.store_local(array2);
e.push(0);
e.store_local(loopvar);
e.goTo(checkloop);
e.mark(loopbody);
e.load_local(array1);
e.load_local(loopvar);
e.array_load(componentType);
e.load_local(array2);
e.load_local(loopvar);
e.array_load(componentType);
callback.processElement(componentType);
e.iinc(loopvar, 1);
e.mark(checkloop);
e.load_local(loopvar);
e.load_local(array1);
e.arraylength();
e.if_icmp(CodeEmitter.LT, loopbody);
}
public static void string_switch(CodeEmitter e, String[] strings, int switchStyle, ObjectSwitchCallback callback) {
try {
switch (switchStyle) {
case Constants.SWITCH_STYLE_TRIE:
string_switch_trie(e, strings, callback);
break;
case Constants.SWITCH_STYLE_HASH:
string_switch_hash(e, strings, callback, false);
break;
case Constants.SWITCH_STYLE_HASHONLY:
string_switch_hash(e, strings, callback, true);
break;
default:
throw new IllegalArgumentException("unknown switch style " + switchStyle);
}
} catch (RuntimeException | Error ex) {
throw ex;
} catch (Exception ex) {
throw new CodeGenerationException(ex);
}
}
private static void string_switch_trie(final CodeEmitter e,
String[] strings,
final ObjectSwitchCallback callback) throws Exception {
final Label def = e.make_label();
final Label end = e.make_label();
final Map buckets = CollectionUtils.bucket(Arrays.asList(strings), value -> ((String)value).length());
e.dup();
e.invoke_virtual(Constants.TYPE_STRING, STRING_LENGTH);
e.process_switch(getSwitchKeys(buckets), new ProcessSwitchCallback() {
@Override
public void processCase(int key, Label ignore_end) throws Exception {
List bucket = (List)buckets.get(key);
stringSwitchHelper(e, bucket, callback, def, end, 0);
}
@Override
public void processDefault() {
e.goTo(def);
}
});
e.mark(def);
e.pop();
callback.processDefault();
e.mark(end);
}
private static void stringSwitchHelper(final CodeEmitter e,
List strings,
final ObjectSwitchCallback callback,
final Label def,
final Label end,
final int index) throws Exception {
final int len = ((String)strings.get(0)).length();
final Map buckets = CollectionUtils.bucket(strings, value -> ((String)value).charAt(index));
e.dup();
e.push(index);
e.invoke_virtual(Constants.TYPE_STRING, STRING_CHAR_AT);
e.process_switch(getSwitchKeys(buckets), new ProcessSwitchCallback() {
@Override
public void processCase(int key, Label ignore_end) throws Exception {
List bucket = (List)buckets.get(key);
if (index + 1 == len) {
e.pop();
callback.processCase(bucket.get(0), end);
} else {
stringSwitchHelper(e, bucket, callback, def, end, index + 1);
}
}
@Override
public void processDefault() {
e.goTo(def);
}
});
}
static int[] getSwitchKeys(Map buckets) {
int[] keys = new int[buckets.size()];
int index = 0;
for (Iterator it = buckets.keySet().iterator(); it.hasNext();) {
keys[index++] = ((Integer)it.next());
}
Arrays.sort(keys);
return keys;
}
private static void string_switch_hash(final CodeEmitter e,
final String[] strings,
final ObjectSwitchCallback callback,
final boolean skipEquals) throws Exception {
final Map buckets = CollectionUtils.bucket(Arrays.asList(strings), value -> value.hashCode());
final Label def = e.make_label();
final Label end = e.make_label();
e.dup();
e.invoke_virtual(Constants.TYPE_OBJECT, HASH_CODE);
e.process_switch(getSwitchKeys(buckets), new ProcessSwitchCallback() {
@Override
public void processCase(int key, Label ignore_end) throws Exception {
List bucket = (List)buckets.get(key);
Label next = null;
if (skipEquals && bucket.size() == 1) {
if (skipEquals) {
e.pop();
}
callback.processCase(bucket.get(0), end);
} else {
for (Iterator it = bucket.iterator(); it.hasNext();) {
String string = (String)it.next();
if (next != null) {
e.mark(next);
}
if (it.hasNext()) {
e.dup();
}
e.push(string);
e.invoke_virtual(Constants.TYPE_OBJECT, EQUALS);
if (it.hasNext()) {
e.if_jump(CodeEmitter.EQ, next = e.make_label());
e.pop();
} else {
e.if_jump(CodeEmitter.EQ, def);
}
callback.processCase(string, end);
}
}
}
@Override
public void processDefault() {
e.pop();
}
});
e.mark(def);
callback.processDefault();
e.mark(end);
}
public static void load_class_this(CodeEmitter e) {
load_class_helper(e, e.getClassEmitter().getClassType());
}
public static void load_class(CodeEmitter e, Type type) {
if (TypeUtils.isPrimitive(type)) {
if (type == Type.VOID_TYPE) {
throw new IllegalArgumentException("cannot load void type");
}
e.getstatic(TypeUtils.getBoxedType(type), "TYPE", Constants.TYPE_CLASS);
} else {
load_class_helper(e, type);
}
}
private static void load_class_helper(CodeEmitter e, final Type type) {
if (e.isStaticHook()) {
// have to fall back on non-optimized load
e.push(TypeUtils.emulateClassGetName(type));
e.invoke_static(Constants.TYPE_CLASS, FOR_NAME);
} else {
ClassEmitter ce = e.getClassEmitter();
String typeName = TypeUtils.emulateClassGetName(type);
// TODO: can end up with duplicated field names when using chained transformers; incorporate static hook # somehow
String fieldName = "CGLIB$load_class$" + TypeUtils.escapeType(typeName);
if (!ce.isFieldDeclared(fieldName)) {
ce.declare_field(Constants.PRIVATE_FINAL_STATIC, fieldName, Constants.TYPE_CLASS, null);
CodeEmitter hook = ce.getStaticHook();
hook.push(typeName);
hook.invoke_static(Constants.TYPE_CLASS, FOR_NAME);
hook.putstatic(ce.getClassType(), fieldName, Constants.TYPE_CLASS);
}
e.getfield(fieldName);
}
}
public static void push_array(CodeEmitter e, Object[] array) {
e.push(array.length);
e.newarray(Type.getType(remapComponentType(array.getClass().componentType())));
for (int i = 0; i < array.length; i++) {
e.dup();
e.push(i);
push_object(e, array[i]);
e.aastore();
}
}
private static Class remapComponentType(Class componentType) {
if (componentType.equals(Type.class)) {
return Class.class;
}
return componentType;
}
public static void push_object(CodeEmitter e, Object obj) {
if (obj == null) {
e.aconst_null();
} else {
if (obj.getClass().isArray()) {
push_array(e, (Object[]) obj);
} else if (obj instanceof String text) {
e.push(text);
} else if (obj instanceof Type type) {
load_class(e, type);
} else if (obj instanceof Class<?> clazz) {
load_class(e, Type.getType(clazz));
} else if (obj instanceof BigInteger) {
e.new_instance(Constants.TYPE_BIG_INTEGER);
e.dup();
e.push(obj.toString());
e.invoke_constructor(Constants.TYPE_BIG_INTEGER);
} else if (obj instanceof BigDecimal) {
e.new_instance(Constants.TYPE_BIG_DECIMAL);
e.dup();
e.push(obj.toString());
e.invoke_constructor(Constants.TYPE_BIG_DECIMAL);
} else {
throw new IllegalArgumentException("unknown type: " + obj.getClass());
}
}
}
/**
* @deprecated use {@link #hash_code(CodeEmitter, Type, int, CustomizerRegistry)} instead
*/
@Deprecated
public static void hash_code(CodeEmitter e, Type type, int multiplier, final Customizer customizer) {
hash_code(e, type, multiplier, CustomizerRegistry.singleton(customizer));
}
public static void hash_code(CodeEmitter e, Type type, int multiplier, final CustomizerRegistry registry) {
if (TypeUtils.isArray(type)) {
hash_array(e, type, multiplier, registry);
} else {
e.swap(Type.INT_TYPE, type);
e.push(multiplier);
e.math(CodeEmitter.MUL, Type.INT_TYPE);
e.swap(type, Type.INT_TYPE);
if (TypeUtils.isPrimitive(type)) {
hash_primitive(e, type);
} else {
hash_object(e, type, registry);
}
e.math(CodeEmitter.ADD, Type.INT_TYPE);
}
}
private static void hash_array(final CodeEmitter e, Type type, final int multiplier, final CustomizerRegistry registry) {
Label skip = e.make_label();
Label end = e.make_label();
e.dup();
e.ifnull(skip);
EmitUtils.process_array(e, type, type1 -> hash_code(e, type1, multiplier, registry));
e.goTo(end);
e.mark(skip);
e.pop();
e.mark(end);
}
private static void hash_object(CodeEmitter e, Type type, CustomizerRegistry registry) {
// (f == null) ? 0 : f.hashCode();
Label skip = e.make_label();
Label end = e.make_label();
e.dup();
e.ifnull(skip);
boolean customHashCode = false;
for (HashCodeCustomizer customizer : registry.get(HashCodeCustomizer.class)) {
if (customizer.customize(e, type)) {
customHashCode = true;
break;
}
}
if (!customHashCode) {
for (Customizer customizer : registry.get(Customizer.class)) {
customizer.customize(e, type);
}
e.invoke_virtual(Constants.TYPE_OBJECT, HASH_CODE);
}
e.goTo(end);
e.mark(skip);
e.pop();
e.push(0);
e.mark(end);
}
private static void hash_primitive(CodeEmitter e, Type type) {
switch (type.getSort()) {
case Type.BOOLEAN:
// f ? 0 : 1
e.push(1);
e.math(CodeEmitter.XOR, Type.INT_TYPE);
break;
case Type.FLOAT:
// Float.floatToIntBits(f)
e.invoke_static(Constants.TYPE_FLOAT, FLOAT_TO_INT_BITS);
break;
case Type.DOUBLE:
// Double.doubleToLongBits(f), hash_code(Long.TYPE)
e.invoke_static(Constants.TYPE_DOUBLE, DOUBLE_TO_LONG_BITS);
// fall through
case Type.LONG:
hash_long(e);
}
}
private static void hash_long(CodeEmitter e) {
// (int)(f ^ (f >>> 32))
e.dup2();
e.push(32);
e.math(CodeEmitter.USHR, Type.LONG_TYPE);
e.math(CodeEmitter.XOR, Type.LONG_TYPE);
e.cast_numeric(Type.LONG_TYPE, Type.INT_TYPE);
}
// public static void not_equals(CodeEmitter e, Type type, Label notEquals) {
// not_equals(e, type, notEquals, null);
// }
/**
* @deprecated use {@link #not_equals(CodeEmitter, Type, Label, CustomizerRegistry)} instead
*/
@Deprecated
public static void not_equals(CodeEmitter e, Type type, final Label notEquals, final Customizer customizer) {
not_equals(e, type, notEquals, CustomizerRegistry.singleton(customizer));
}
/**
* Branches to the specified label if the top two items on the stack
* are not equal. The items must both be of the specified
* class. Equality is determined by comparing primitive values
* directly and by invoking the <code>equals</code> method for
* Objects. Arrays are recursively processed in the same manner.
*/
public static void not_equals(final CodeEmitter e, Type type, final Label notEquals, final CustomizerRegistry registry) {
(new ProcessArrayCallback() {
@Override
public void processElement(Type type) {
not_equals_helper(e, type, notEquals, registry, this);
}
}).processElement(type);
}
private static void not_equals_helper(CodeEmitter e,
Type type,
Label notEquals,
CustomizerRegistry registry,
ProcessArrayCallback callback) {
if (TypeUtils.isPrimitive(type)) {
e.if_cmp(type, CodeEmitter.NE, notEquals);
} else {
Label end = e.make_label();
nullcmp(e, notEquals, end);
if (TypeUtils.isArray(type)) {
Label checkContents = e.make_label();
e.dup2();
e.arraylength();
e.swap();
e.arraylength();
e.if_icmp(CodeEmitter.EQ, checkContents);
e.pop2();
e.goTo(notEquals);
e.mark(checkContents);
EmitUtils.process_arrays(e, type, callback);
} else {
List<Customizer> customizers = registry.get(Customizer.class);
if (!customizers.isEmpty()) {
for (Customizer customizer : customizers) {
customizer.customize(e, type);
}
e.swap();
for (Customizer customizer : customizers) {
customizer.customize(e, type);
}
}
e.invoke_virtual(Constants.TYPE_OBJECT, EQUALS);
e.if_jump(CodeEmitter.EQ, notEquals);
}
e.mark(end);
}
}
/**
* If both objects on the top of the stack are non-null, does nothing.
* If one is null, or both are null, both are popped off and execution
* branches to the respective label.
* @param oneNull label to branch to if only one of the objects is null
* @param bothNull label to branch to if both of the objects are null
*/
private static void nullcmp(CodeEmitter e, Label oneNull, Label bothNull) {
e.dup2();
Label nonNull = e.make_label();
Label oneNullHelper = e.make_label();
Label end = e.make_label();
e.ifnonnull(nonNull);
e.ifnonnull(oneNullHelper);
e.pop2();
e.goTo(bothNull);
e.mark(nonNull);
e.ifnull(oneNullHelper);
e.goTo(end);
e.mark(oneNullHelper);
e.pop2();
e.goTo(oneNull);
e.mark(end);
}
/*
public static void to_string(CodeEmitter e,
Type type,
ArrayDelimiters delims,
CustomizerRegistry registry) {
e.new_instance(Constants.TYPE_STRING_BUFFER);
e.dup();
e.invoke_constructor(Constants.TYPE_STRING_BUFFER);
e.swap();
append_string(e, type, delims, registry);
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, TO_STRING);
}
*/
/**
* @deprecated use {@link #append_string(CodeEmitter, Type, ArrayDelimiters, CustomizerRegistry)} instead
*/
@Deprecated
public static void append_string(final CodeEmitter e,
Type type,
final ArrayDelimiters delims,
final Customizer customizer) {
append_string(e, type, delims, CustomizerRegistry.singleton(customizer));
}
public static void append_string(final CodeEmitter e,
Type type,
final ArrayDelimiters delims,
final CustomizerRegistry registry) {
final ArrayDelimiters d = (delims != null) ? delims : DEFAULT_DELIMITERS;
ProcessArrayCallback callback = new ProcessArrayCallback() {
@Override
public void processElement(Type type) {
append_string_helper(e, type, d, registry, this);
e.push(d.inside);
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, APPEND_STRING);
}
};
append_string_helper(e, type, d, registry, callback);
}
private static void append_string_helper(CodeEmitter e,
Type type,
ArrayDelimiters delims,
CustomizerRegistry registry,
ProcessArrayCallback callback) {
Label skip = e.make_label();
Label end = e.make_label();
if (TypeUtils.isPrimitive(type)) {
switch (type.getSort()) {
case Type.INT:
case Type.SHORT:
case Type.BYTE:
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, APPEND_INT);
break;
case Type.DOUBLE:
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, APPEND_DOUBLE);
break;
case Type.FLOAT:
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, APPEND_FLOAT);
break;
case Type.LONG:
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, APPEND_LONG);
break;
case Type.BOOLEAN:
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, APPEND_BOOLEAN);
break;
case Type.CHAR:
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, APPEND_CHAR);
break;
}
} else if (TypeUtils.isArray(type)) {
e.dup();
e.ifnull(skip);
e.swap();
if (delims != null && delims.before != null && !delims.before.isEmpty()) {
e.push(delims.before);
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, APPEND_STRING);
e.swap();
}
EmitUtils.process_array(e, type, callback);
shrinkStringBuffer(e, 2);
if (delims != null && delims.after != null && !delims.after.isEmpty()) {
e.push(delims.after);
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, APPEND_STRING);
}
} else {
e.dup();
e.ifnull(skip);
for (Customizer customizer : registry.get(Customizer.class)) {
customizer.customize(e, type);
}
e.invoke_virtual(Constants.TYPE_OBJECT, TO_STRING);
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, APPEND_STRING);
}
e.goTo(end);
e.mark(skip);
e.pop();
e.push("null");
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, APPEND_STRING);
e.mark(end);
}
private static void shrinkStringBuffer(CodeEmitter e, int amt) {
e.dup();
e.dup();
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, LENGTH);
e.push(amt);
e.math(CodeEmitter.SUB, Type.INT_TYPE);
e.invoke_virtual(Constants.TYPE_STRING_BUFFER, SET_LENGTH);
}
public static
|
EmitUtils
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/lock/internal/DB2LockingSupport.java
|
{
"start": 380,
"end": 1471
}
|
class ____ extends LockingSupportParameterized {
/**
* Builds a locking-strategy for DB2 LUW.
*/
public static DB2LockingSupport forDB2(boolean supportsSkipLocked) {
return new DB2LockingSupport(
RowLockStrategy.NONE,
false,
false,
supportsSkipLocked
);
}
/**
* Builds a locking-strategy for DB2 iOS.
*/
public static DB2LockingSupport forDB2i() {
return new DB2LockingSupport(
RowLockStrategy.NONE,
false,
false,
true
);
}
/**
* Builds a locking-strategy for DB2 on zOS.
*/
public static DB2LockingSupport forDB2z() {
return new DB2LockingSupport(
// https://www.ibm.com/docs/en/db2-for-zos/12.0.0?topic=statement-update-clause
RowLockStrategy.COLUMN,
false,
false,
true
);
}
public DB2LockingSupport(
RowLockStrategy rowLockStrategy,
boolean supportsWait,
boolean supportsNoWait,
boolean supportsSkipLocked) {
super(
PessimisticLockStyle.CLAUSE,
rowLockStrategy,
supportsWait,
supportsNoWait,
supportsSkipLocked,
OuterJoinLockingType.FULL
);
}
}
|
DB2LockingSupport
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/foreach/InterfaceConfig.java
|
{
"start": 726,
"end": 799
}
|
interface ____ {
String getHost();
int getPort();
}
|
InterfaceConfig
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation4/SimpleBean.java
|
{
"start": 892,
"end": 948
}
|
class ____.
*
* @author Mark Pollack
*/
public
|
annotation
|
java
|
quarkusio__quarkus
|
extensions/arc/deployment/src/test/java/io/quarkus/arc/test/stereotype/DoubleScopedBeanTestCase.java
|
{
"start": 469,
"end": 968
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(SessionStereotype.class, RequestStereotype.class, DoubleScopedStereotypeBean.class))
.setExpectedException(DeploymentException.class);
@Inject
DoubleScopedStereotypeBean bean;
@Test
public void runTest() {
fail();
}
@SessionScoped
@Stereotype
public @
|
DoubleScopedBeanTestCase
|
java
|
elastic__elasticsearch
|
x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java
|
{
"start": 1550,
"end": 10578
}
|
class ____ extends ESTestCase {
public void testValidateWithNoDocs() {
ActionRequestValidationException validation = new MonitoringBulkRequest().validate();
assertNotNull(validation);
assertThat(validation.validationErrors(), hasItem("no monitoring documents added"));
}
public void testValidateWithEmptySource() throws IOException {
final MonitoringBulkRequest request = new MonitoringBulkRequest();
final int nbDocs = randomIntBetween(0, 5);
for (int i = 0; i < nbDocs; i++) {
request.add(randomMonitoringBulkDoc());
}
final int nbEmptyDocs = randomIntBetween(1, 20);
for (int i = 0; i < nbEmptyDocs; i++) {
request.add(MonitoringTestUtils.randomMonitoringBulkDoc(random(), randomXContentType(), BytesArray.EMPTY));
}
final ActionRequestValidationException validation = request.validate();
assertNotNull(validation);
final List<String> validationErrors = validation.validationErrors();
for (int i = 0; i < nbEmptyDocs; i++) {
assertThat(validationErrors, hasItem("source is missing for monitoring document [" + String.valueOf(nbDocs + i) + "]"));
}
}
public void testAdd() throws IOException {
final MonitoringBulkRequest request = new MonitoringBulkRequest();
final int nbDocs = randomIntBetween(1, 20);
for (int i = 0; i < nbDocs; i++) {
request.add(randomMonitoringBulkDoc());
}
assertThat(request.getDocs(), hasSize(nbDocs));
}
public void testAddRequestContent() throws IOException {
final XContentType xContentType = XContentType.JSON;
final int nbDocs = randomIntBetween(1, 20);
final String[] ids = new String[nbDocs];
final BytesReference[] sources = new BytesReference[nbDocs];
final BytesStreamOutput content = new BytesStreamOutput();
try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType, content)) {
for (int i = 0; i < nbDocs; i++) {
builder.startObject();
{
builder.startObject("index");
{
if (rarely()) {
builder.field("_index", "");
}
builder.field("_type", "_doc");
if (randomBoolean()) {
ids[i] = randomAlphaOfLength(10);
builder.field("_id", ids[i]);
}
}
builder.endObject();
}
builder.endObject();
builder.flush();
content.write(xContentType.xContent().bulkSeparator());
sources[i] = RandomObjects.randomSource(random(), xContentType);
BytesRef bytes = sources[i].toBytesRef();
content.write(bytes.bytes, bytes.offset, bytes.length);
content.write(xContentType.xContent().bulkSeparator());
}
content.write(xContentType.xContent().bulkSeparator());
}
final MonitoredSystem system = randomFrom(MonitoredSystem.values());
final long timestamp = randomNonNegativeLong();
final long interval = randomNonNegativeLong();
final MonitoringBulkRequest bulkRequest = new MonitoringBulkRequest();
bulkRequest.add(system, content.bytes(), xContentType, timestamp, interval);
final Collection<MonitoringBulkDoc> bulkDocs = bulkRequest.getDocs();
assertNotNull(bulkDocs);
assertEquals(nbDocs, bulkDocs.size());
int count = 0;
for (final MonitoringBulkDoc bulkDoc : bulkDocs) {
assertThat(bulkDoc.getSystem(), equalTo(system));
assertThat(bulkDoc.getId(), equalTo(ids[count]));
assertThat(bulkDoc.getTimestamp(), equalTo(timestamp));
assertThat(bulkDoc.getIntervalMillis(), equalTo(interval));
assertThat(bulkDoc.getSource(), equalBytes(sources[count]));
assertThat(bulkDoc.getXContentType(), equalTo(xContentType));
++count;
}
}
public void testAddRequestContentWithEmptySource() throws IOException {
final int nbDocs = randomIntBetween(0, 5);
final int nbEmptyDocs = randomIntBetween(1, 10);
final int totalDocs = nbDocs + nbEmptyDocs;
final XContentType xContentType = XContentType.JSON;
final byte separator = xContentType.xContent().bulkSeparator();
final BytesStreamOutput content = new BytesStreamOutput();
try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType, content)) {
for (int i = 0; i < totalDocs; i++) {
builder.startObject();
{
builder.startObject("index");
{
builder.field("_index", "");
builder.field("_type", "_doc");
builder.field("_id", String.valueOf(i));
}
builder.endObject();
}
builder.endObject();
builder.flush();
content.write(separator);
final BytesRef bytes;
if (i < nbDocs) {
bytes = RandomObjects.randomSource(random(), xContentType).toBytesRef();
} else {
bytes = BytesArray.EMPTY.toBytesRef();
}
content.write(bytes.bytes, bytes.offset, bytes.length);
content.write(separator);
}
content.write(separator);
}
final MonitoringBulkRequest bulkRequest = new MonitoringBulkRequest();
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> bulkRequest.add(randomFrom(MonitoredSystem.values()), content.bytes(), xContentType, 0L, 0L)
);
assertThat(e.getMessage(), containsString("source is missing for monitoring document [][_doc][" + nbDocs + "]"));
}
public void testAddRequestContentWithUnrecognizedIndexName() throws IOException {
final String indexName = randomAlphaOfLength(10);
final XContentType xContentType = XContentType.JSON;
final byte separator = xContentType.xContent().bulkSeparator();
final BytesStreamOutput content = new BytesStreamOutput();
try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType, content)) {
builder.startObject();
{
builder.startObject("index");
{
builder.field("_index", indexName);
}
builder.endObject();
}
builder.endObject();
builder.flush();
content.write(separator);
final BytesRef bytes = RandomObjects.randomSource(random(), xContentType).toBytesRef();
content.write(bytes.bytes, bytes.offset, bytes.length);
content.write(separator);
content.write(separator);
}
final MonitoringBulkRequest bulkRequest = new MonitoringBulkRequest();
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> bulkRequest.add(randomFrom(MonitoredSystem.values()), content.bytes(), xContentType, 0L, 0L)
);
assertThat(e.getMessage(), containsString("unrecognized index name [" + indexName + "]"));
}
public void testSerialization() throws IOException {
final MonitoringBulkRequest originalRequest = new MonitoringBulkRequest();
final int numDocs = iterations(10, 30);
for (int i = 0; i < numDocs; i++) {
originalRequest.add(randomMonitoringBulkDoc());
}
final BytesStreamOutput out = new BytesStreamOutput();
originalRequest.writeTo(out);
final StreamInput in = out.bytes().streamInput();
in.setTransportVersion(out.getTransportVersion());
final MonitoringBulkRequest deserializedRequest = new MonitoringBulkRequest(in);
assertThat(in.available(), equalTo(0));
final MonitoringBulkDoc[] originalBulkDocs = originalRequest.getDocs().toArray(new MonitoringBulkDoc[] {});
final MonitoringBulkDoc[] deserializedBulkDocs = deserializedRequest.getDocs().toArray(new MonitoringBulkDoc[] {});
assertArrayEquals(originalBulkDocs, deserializedBulkDocs);
}
/**
* Return a {@link XContentType} supported by the Monitoring Bulk API (JSON or Smile)
*/
private XContentType randomXContentType() {
return randomFrom(XContentType.JSON, XContentType.SMILE);
}
private MonitoringBulkDoc randomMonitoringBulkDoc() throws IOException {
return MonitoringTestUtils.randomMonitoringBulkDoc(random(), randomXContentType());
}
}
|
MonitoringBulkRequestTests
|
java
|
apache__spark
|
sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/SupportsPushDownOffset.java
|
{
"start": 1186,
"end": 1334
}
|
interface ____ extends ScanBuilder {
/**
* Pushes down OFFSET to the data source.
*/
boolean pushOffset(int offset);
}
|
SupportsPushDownOffset
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java
|
{
"start": 16296,
"end": 18108
}
|
class ____ extends ByteVectorValues {
private final QueryCancellation queryCancellation;
private final ByteVectorValues in;
private ExitableByteVectorValues(QueryCancellation queryCancellation, ByteVectorValues in) {
this.queryCancellation = queryCancellation;
this.in = in;
}
@Override
public int dimension() {
return in.dimension();
}
@Override
public int size() {
return in.size();
}
@Override
public byte[] vectorValue(int ord) throws IOException {
return in.vectorValue(ord);
}
@Override
public int ordToDoc(int ord) {
return in.ordToDoc(ord);
}
@Override
public VectorScorer scorer(byte[] bytes) throws IOException {
VectorScorer scorer = in.scorer(bytes);
if (scorer == null) {
return null;
}
DocIdSetIterator scorerIterator = scorer.iterator();
return new VectorScorer() {
private final DocIdSetIterator iterator = exitableIterator(scorerIterator, queryCancellation);
@Override
public float score() throws IOException {
return scorer.score();
}
@Override
public DocIdSetIterator iterator() {
return iterator;
}
};
}
@Override
public DocIndexIterator iterator() {
return createExitableIterator(in.iterator(), queryCancellation);
}
@Override
public ByteVectorValues copy() throws IOException {
return in.copy();
}
}
private static
|
ExitableByteVectorValues
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/example/soft/SoftAssertionsParameterInjectionExample.java
|
{
"start": 909,
"end": 1115
}
|
class ____ {
@Test
void soft_assertions_example(SoftAssertions soft) {
soft.assertThat(List.of(1, 2)).contains(1);
soft.assertThat(1 + 1).isEqualTo(2);
}
}
|
SoftAssertionsParameterInjectionExample
|
java
|
alibaba__fastjson
|
src/main/java/com/alibaba/fastjson/serializer/ObjectSerializer.java
|
{
"start": 1125,
"end": 1332
}
|
enum ____ {
* LOGIN_FAILURE(8), INVALID_ARGUMENT(0), SIGN_ERROR(17);
* public final int value;
* ResultCode(int value){
* this.value = value;
* }
* }
*
* public static
|
ResultCode
|
java
|
micronaut-projects__micronaut-core
|
http-server/src/main/java/io/micronaut/http/server/filter/DefaultFilterBodyParser.java
|
{
"start": 1797,
"end": 5096
}
|
class ____ implements FilterBodyParser {
private static final Logger LOG = LoggerFactory.getLogger(DefaultFilterBodyParser.class);
private static final @NonNull Argument<Map<String, Object>> MAP_STRING_OBJECT_ARGUMENT = Argument.mapOf(String.class, Object.class);
private final JsonMapper jsonMapper;
private final FormUrlEncodedDecoder formUrlEncodedDecoder;
/**
* @param jsonMapper JSON Mapper
* @param formUrlEncodedDecoder Decoder for form-url-encoded payload
*/
DefaultFilterBodyParser(FormUrlEncodedDecoder formUrlEncodedDecoder,
JsonMapper jsonMapper) {
this.formUrlEncodedDecoder = formUrlEncodedDecoder;
this.jsonMapper = jsonMapper;
}
@Override
@NonNull
@SingleResult
public CompletableFuture<Map<String, Object>> parseBody(@NonNull HttpRequest<?> request) {
Optional<MediaType> mediaTypeOptional = request.getContentType();
if (mediaTypeOptional.isEmpty()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Could not parse body into a Map because the request does not have a Content-Type");
}
return CompletableFuture.completedFuture(Collections.emptyMap());
}
if (!(request instanceof ServerHttpRequest<?>)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Could not parse body into a Map because the request is not an instance of ServerHttpRequest");
}
return CompletableFuture.completedFuture(Collections.emptyMap());
}
MediaType contentType = mediaTypeOptional.get();
if (contentType.equals(MediaType.APPLICATION_FORM_URLENCODED_TYPE)) {
return parseFormUrlEncoded((ServerHttpRequest<?>) request);
} else if (contentType.equals(MediaType.APPLICATION_JSON_TYPE)) {
return parseJson((ServerHttpRequest<?>) request);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Could not parse body into a Map because the request's content type is not either application/x-www-form-urlencoded or application/json");
}
return CompletableFuture.completedFuture(Collections.emptyMap());
}
private CompletableFuture<Map<String, Object>> parseJson(@NonNull ServerHttpRequest<?> request) {
try (CloseableByteBody closeableByteBody = request.byteBody().split(ByteBody.SplitBackpressureMode.FASTEST)) {
return closeableByteBody.buffer()
.thenApply(bb -> {
try {
return jsonMapper.readValue(bb.toByteArray(), MAP_STRING_OBJECT_ARGUMENT);
} catch (IOException e) {
throw new CompletionException(e);
}
});
}
}
private CompletableFuture<Map<String, Object>> parseFormUrlEncoded(@NonNull ServerHttpRequest<?> request) {
try (CloseableByteBody closeableByteBody = request.byteBody().split(ByteBody.SplitBackpressureMode.FASTEST)) {
return closeableByteBody.buffer()
.thenApply(bb -> bb.toString(request.getCharacterEncoding()))
.thenApply(str -> formUrlEncodedDecoder.decode(str, request.getCharacterEncoding()));
}
}
}
|
DefaultFilterBodyParser
|
java
|
quarkusio__quarkus
|
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/ReflectionRegistration.java
|
{
"start": 418,
"end": 674
}
|
class ____ it's needed.
*
* @param beanClassName
* @param clientProxyName
*/
default void registerClientProxy(DotName beanClassName, String clientProxyName) {
}
/**
* Register the intercepted subclass for the given bean
|
if
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/ZKPathDumper.java
|
{
"start": 1427,
"end": 4385
}
|
class ____ {
public static final int INDENT = 2;
private final CuratorFramework curator;
private final String root;
private final boolean verbose;
/**
* Create a path dumper -but do not dump the path until asked
* @param curator curator instance
* @param root root
* @param verbose verbose flag - includes more details (such as ACLs)
*/
public ZKPathDumper(CuratorFramework curator,
String root,
boolean verbose) {
Preconditions.checkArgument(curator != null);
Preconditions.checkArgument(root != null);
this.curator = curator;
this.root = root;
this.verbose = verbose;
}
/**
* Trigger the recursive registry dump.
* @return a string view of the registry
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("ZK tree for ").append(root).append('\n');
expand(builder, root, 1);
return builder.toString();
}
/**
* Recursively expand the path into the supplied string builder, increasing
* the indentation by {@link #INDENT} as it proceeds (depth first) down
* the tree
* @param builder string build to append to
* @param path path to examine
* @param indent current indentation
*/
private void expand(StringBuilder builder,
String path,
int indent) {
try {
GetChildrenBuilder childrenBuilder = curator.getChildren();
List<String> children = childrenBuilder.forPath(path);
for (String child : children) {
String childPath = path + "/" + child;
String body;
Stat stat = curator.checkExists().forPath(childPath);
StringBuilder bodyBuilder = new StringBuilder(256);
bodyBuilder.append(" [")
.append(stat.getDataLength())
.append("]");
if (stat.getEphemeralOwner() > 0) {
bodyBuilder.append("*");
}
if (verbose) {
// verbose: extract ACLs
builder.append(" -- ");
List<ACL> acls =
curator.getACL().forPath(childPath);
for (ACL acl : acls) {
builder.append(RegistrySecurity.aclToString(acl));
builder.append(" ");
}
}
body = bodyBuilder.toString();
// print each child
append(builder, indent, ' ');
builder.append('/').append(child);
builder.append(body);
builder.append('\n');
// recurse
expand(builder, childPath, indent + INDENT);
}
} catch (Exception e) {
builder.append(e.toString()).append("\n");
}
}
/**
* Append the specified indentation to a builder
* @param builder string build to append to
* @param indent current indentation
* @param c charactor to use for indentation
*/
private void append(StringBuilder builder, int indent, char c) {
for (int i = 0; i < indent; i++) {
builder.append(c);
}
}
}
|
ZKPathDumper
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/SubApplicationEntityReader.java
|
{
"start": 3427,
"end": 22862
}
|
class ____ extends GenericEntityReader {
private static final SubApplicationTableRW SUB_APPLICATION_TABLE =
new SubApplicationTableRW();
SubApplicationEntityReader(TimelineReaderContext ctxt,
TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) {
super(ctxt, entityFilters, toRetrieve);
}
SubApplicationEntityReader(TimelineReaderContext ctxt,
TimelineDataToRetrieve toRetrieve) {
super(ctxt, toRetrieve);
}
/**
* Uses the {@link SubApplicationTableRW}.
*/
protected BaseTableRW<?> getTable() {
return SUB_APPLICATION_TABLE;
}
@Override
protected FilterList constructFilterListBasedOnFilters() throws IOException {
// Filters here cannot be null for multiple entity reads as they are set in
// augmentParams if null.
FilterList listBasedOnFilters = new FilterList();
TimelineEntityFilters filters = getFilters();
// Create filter list based on created time range and add it to
// listBasedOnFilters.
long createdTimeBegin = filters.getCreatedTimeBegin();
long createdTimeEnd = filters.getCreatedTimeEnd();
if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) {
listBasedOnFilters.addFilter(TimelineFilterUtils
.createSingleColValueFiltersByRange(SubApplicationColumn.CREATED_TIME,
createdTimeBegin, createdTimeEnd));
}
// Create filter list based on metric filters and add it to
// listBasedOnFilters.
TimelineFilterList metricFilters = filters.getMetricFilters();
if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) {
listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(
SubApplicationColumnPrefix.METRIC, metricFilters));
}
// Create filter list based on config filters and add it to
// listBasedOnFilters.
TimelineFilterList configFilters = filters.getConfigFilters();
if (configFilters != null && !configFilters.getFilterList().isEmpty()) {
listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(
SubApplicationColumnPrefix.CONFIG, configFilters));
}
// Create filter list based on info filters and add it to listBasedOnFilters
TimelineFilterList infoFilters = filters.getInfoFilters();
if (infoFilters != null && !infoFilters.getFilterList().isEmpty()) {
listBasedOnFilters.addFilter(TimelineFilterUtils
.createHBaseFilterList(SubApplicationColumnPrefix.INFO, infoFilters));
}
return listBasedOnFilters;
}
/**
* Add {@link QualifierFilter} filters to filter list for each column of
* entity table.
*
* @param list filter list to which qualifier filters have to be added.
*/
protected void updateFixedColumns(FilterList list) {
for (SubApplicationColumn column : SubApplicationColumn.values()) {
list.addFilter(new QualifierFilter(CompareOp.EQUAL,
new BinaryComparator(column.getColumnQualifierBytes())));
}
}
/**
* Creates a filter list which indicates that only some of the column
* qualifiers in the info column family will be returned in result.
*
* @param isApplication If true, it means operations are to be performed for
* application table, otherwise for entity table.
* @return filter list.
* @throws IOException if any problem occurs while creating filter list.
*/
private FilterList createFilterListForColsOfInfoFamily() throws IOException {
FilterList infoFamilyColsFilter = new FilterList(Operator.MUST_PASS_ONE);
// Add filters for each column in entity table.
updateFixedColumns(infoFamilyColsFilter);
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
// If INFO field has to be retrieved, add a filter for fetching columns
// with INFO column prefix.
if (hasField(fieldsToRetrieve, Field.INFO)) {
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.EQUAL,
SubApplicationColumnPrefix.INFO));
}
TimelineFilterList relatesTo = getFilters().getRelatesTo();
if (hasField(fieldsToRetrieve, Field.RELATES_TO)) {
// If RELATES_TO field has to be retrieved, add a filter for fetching
// columns with RELATES_TO column prefix.
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.EQUAL,
SubApplicationColumnPrefix.RELATES_TO));
} else if (relatesTo != null && !relatesTo.getFilterList().isEmpty()) {
// Even if fields to retrieve does not contain RELATES_TO, we still
// need to have a filter to fetch some of the column qualifiers if
// relatesTo filters are specified. relatesTo filters will then be
// matched after fetching rows from HBase.
Set<String> relatesToCols =
TimelineFilterUtils.fetchColumnsFromFilterList(relatesTo);
infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers(
SubApplicationColumnPrefix.RELATES_TO, relatesToCols));
}
TimelineFilterList isRelatedTo = getFilters().getIsRelatedTo();
if (hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) {
// If IS_RELATED_TO field has to be retrieved, add a filter for fetching
// columns with IS_RELATED_TO column prefix.
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.EQUAL,
SubApplicationColumnPrefix.IS_RELATED_TO));
} else if (isRelatedTo != null && !isRelatedTo.getFilterList().isEmpty()) {
// Even if fields to retrieve does not contain IS_RELATED_TO, we still
// need to have a filter to fetch some of the column qualifiers if
// isRelatedTo filters are specified. isRelatedTo filters will then be
// matched after fetching rows from HBase.
Set<String> isRelatedToCols =
TimelineFilterUtils.fetchColumnsFromFilterList(isRelatedTo);
infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers(
SubApplicationColumnPrefix.IS_RELATED_TO, isRelatedToCols));
}
TimelineFilterList eventFilters = getFilters().getEventFilters();
if (hasField(fieldsToRetrieve, Field.EVENTS)) {
// If EVENTS field has to be retrieved, add a filter for fetching columns
// with EVENT column prefix.
infoFamilyColsFilter.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.EQUAL,
SubApplicationColumnPrefix.EVENT));
} else if (eventFilters != null
&& !eventFilters.getFilterList().isEmpty()) {
// Even if fields to retrieve does not contain EVENTS, we still need to
// have a filter to fetch some of the column qualifiers on the basis of
// event filters specified. Event filters will then be matched after
// fetching rows from HBase.
Set<String> eventCols =
TimelineFilterUtils.fetchColumnsFromFilterList(eventFilters);
infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers(
SubApplicationColumnPrefix.EVENT, eventCols));
}
return infoFamilyColsFilter;
}
/**
* Exclude column prefixes via filters which are not required(based on fields
* to retrieve) from info column family. These filters are added to filter
* list which contains a filter for getting info column family.
*
* @param infoColFamilyList filter list for info column family.
*/
private void excludeFieldsFromInfoColFamily(FilterList infoColFamilyList) {
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
// Events not required.
if (!hasField(fieldsToRetrieve, Field.EVENTS)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.NOT_EQUAL,
SubApplicationColumnPrefix.EVENT));
}
// info not required.
if (!hasField(fieldsToRetrieve, Field.INFO)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.NOT_EQUAL,
SubApplicationColumnPrefix.INFO));
}
// is related to not required.
if (!hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.NOT_EQUAL,
SubApplicationColumnPrefix.IS_RELATED_TO));
}
// relates to not required.
if (!hasField(fieldsToRetrieve, Field.RELATES_TO)) {
infoColFamilyList.addFilter(
TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.NOT_EQUAL,
SubApplicationColumnPrefix.RELATES_TO));
}
}
/**
* Updates filter list based on fields for confs and metrics to retrieve.
*
* @param listBasedOnFields filter list based on fields.
* @throws IOException if any problem occurs while updating filter list.
*/
private void updateFilterForConfsAndMetricsToRetrieve(
FilterList listBasedOnFields, Set<String> cfsInFields)
throws IOException {
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
// Please note that if confsToRetrieve is specified, we would have added
// CONFS to fields to retrieve in augmentParams() even if not specified.
if (dataToRetrieve.getFieldsToRetrieve().contains(Field.CONFIGS)) {
// Create a filter list for configs.
listBasedOnFields.addFilter(
TimelineFilterUtils.createFilterForConfsOrMetricsToRetrieve(
dataToRetrieve.getConfsToRetrieve(),
SubApplicationColumnFamily.CONFIGS,
SubApplicationColumnPrefix.CONFIG));
cfsInFields.add(
Bytes.toString(SubApplicationColumnFamily.CONFIGS.getBytes()));
}
// Please note that if metricsToRetrieve is specified, we would have added
// METRICS to fields to retrieve in augmentParams() even if not specified.
if (dataToRetrieve.getFieldsToRetrieve().contains(Field.METRICS)) {
// Create a filter list for metrics.
listBasedOnFields.addFilter(
TimelineFilterUtils.createFilterForConfsOrMetricsToRetrieve(
dataToRetrieve.getMetricsToRetrieve(),
SubApplicationColumnFamily.METRICS,
SubApplicationColumnPrefix.METRIC));
cfsInFields.add(
Bytes.toString(SubApplicationColumnFamily.METRICS.getBytes()));
}
}
@Override
protected FilterList constructFilterListBasedOnFields(Set<String> cfsInFields)
throws IOException {
if (!needCreateFilterListBasedOnFields()) {
// Fetch all the columns. No need of a filter.
return null;
}
FilterList listBasedOnFields = new FilterList(Operator.MUST_PASS_ONE);
FilterList infoColFamilyList = new FilterList();
// By default fetch everything in INFO column family.
FamilyFilter infoColumnFamily = new FamilyFilter(CompareOp.EQUAL,
new BinaryComparator(SubApplicationColumnFamily.INFO.getBytes()));
infoColFamilyList.addFilter(infoColumnFamily);
if (fetchPartialColsFromInfoFamily()) {
// We can fetch only some of the columns from info family.
infoColFamilyList.addFilter(createFilterListForColsOfInfoFamily());
} else {
// Exclude column prefixes in info column family which are not required
// based on fields to retrieve.
excludeFieldsFromInfoColFamily(infoColFamilyList);
}
listBasedOnFields.addFilter(infoColFamilyList);
cfsInFields.add(
Bytes.toString(SubApplicationColumnFamily.INFO.getBytes()));
updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields, cfsInFields);
return listBasedOnFields;
}
@Override
protected void validateParams() {
if (getContext() == null) {
throw new NullPointerException("context shouldn't be null");
}
if (getDataToRetrieve() == null) {
throw new NullPointerException("data to retrieve shouldn't be null");
}
if (getContext().getClusterId() == null) {
throw new NullPointerException("clusterId shouldn't be null");
}
if (getContext().getDoAsUser() == null) {
throw new NullPointerException("DoAsUser shouldn't be null");
}
if (getContext().getEntityType() == null) {
throw new NullPointerException("entityType shouldn't be null");
}
}
@Override
protected void augmentParams(Configuration hbaseConf, Connection conn)
throws IOException {
getDataToRetrieve().addFieldsBasedOnConfsAndMetricsToRetrieve();
createFiltersIfNull();
}
private void setMetricsTimeRange(Query query) {
// Set time range for metric values.
HBaseTimelineStorageUtils.setMetricsTimeRange(query,
SubApplicationColumnFamily.METRICS.getBytes(),
getDataToRetrieve().getMetricsTimeBegin(),
getDataToRetrieve().getMetricsTimeEnd());
}
@Override
protected ResultScanner getResults(Configuration hbaseConf, Connection conn,
FilterList filterList) throws IOException {
// Scan through part of the table to find the entities belong to one app
// and one type
Scan scan = new Scan();
TimelineReaderContext context = getContext();
if (context.getDoAsUser() == null) {
throw new BadRequestException("Invalid user!");
}
RowKeyPrefix<SubApplicationRowKey> subApplicationRowKeyPrefix = null;
// default mode, will always scans from beginning of entity type.
if (getFilters() == null || getFilters().getFromId() == null) {
subApplicationRowKeyPrefix = new SubApplicationRowKeyPrefix(
context.getDoAsUser(), context.getClusterId(),
context.getEntityType(), null, null, null);
scan.setRowPrefixFilter(subApplicationRowKeyPrefix.getRowKeyPrefix());
} else { // pagination mode, will scan from given entityIdPrefix!enitityId
SubApplicationRowKey entityRowKey = null;
try {
entityRowKey = SubApplicationRowKey
.parseRowKeyFromString(getFilters().getFromId());
} catch (IllegalArgumentException e) {
throw new BadRequestException("Invalid filter fromid is provided.");
}
if (!context.getClusterId().equals(entityRowKey.getClusterId())) {
throw new BadRequestException(
"fromid doesn't belong to clusterId=" + context.getClusterId());
}
// set start row
scan.withStartRow(entityRowKey.getRowKey());
// get the bytes for stop row
subApplicationRowKeyPrefix = new SubApplicationRowKeyPrefix(
context.getDoAsUser(), context.getClusterId(),
context.getEntityType(), null, null, null);
// set stop row
scan.withStopRow(
HBaseTimelineStorageUtils.calculateTheClosestNextRowKeyForPrefix(
subApplicationRowKeyPrefix.getRowKeyPrefix()));
// set page filter to limit. This filter has to set only in pagination
// mode.
filterList.addFilter(new PageFilter(getFilters().getLimit()));
}
setMetricsTimeRange(scan);
scan.setMaxVersions(getDataToRetrieve().getMetricsLimit());
if (filterList != null && !filterList.getFilters().isEmpty()) {
scan.setFilter(filterList);
}
return getTable().getResultScanner(hbaseConf, conn, scan);
}
@Override
protected Result getResult(Configuration hbaseConf, Connection conn,
FilterList filterList) throws IOException {
throw new UnsupportedOperationException(
"we don't support a single entity query");
}
@Override
protected TimelineEntity parseEntity(Result result) throws IOException {
if (result == null || result.isEmpty()) {
return null;
}
TimelineEntity entity = new TimelineEntity();
SubApplicationRowKey parseRowKey =
SubApplicationRowKey.parseRowKey(result.getRow());
entity.setType(parseRowKey.getEntityType());
entity.setId(parseRowKey.getEntityId());
entity.setIdPrefix(parseRowKey.getEntityIdPrefix().longValue());
TimelineEntityFilters filters = getFilters();
// fetch created time
Long createdTime = (Long) ColumnRWHelper.readResult(result,
SubApplicationColumn.CREATED_TIME);
entity.setCreatedTime(createdTime);
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
// fetch is related to entities and match isRelatedTo filter. If isRelatedTo
// filters do not match, entity would be dropped. We have to match filters
// locally as relevant HBase filters to filter out rows on the basis of
// isRelatedTo are not set in HBase scan.
boolean checkIsRelatedTo =
filters.getIsRelatedTo() != null
&& filters.getIsRelatedTo().getFilterList().size() > 0;
if (hasField(fieldsToRetrieve, Field.IS_RELATED_TO) || checkIsRelatedTo) {
readRelationship(entity, result, SubApplicationColumnPrefix.IS_RELATED_TO,
true);
if (checkIsRelatedTo && !TimelineStorageUtils.matchIsRelatedTo(entity,
filters.getIsRelatedTo())) {
return null;
}
if (!hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) {
entity.getIsRelatedToEntities().clear();
}
}
// fetch relates to entities and match relatesTo filter. If relatesTo
// filters do not match, entity would be dropped. We have to match filters
// locally as relevant HBase filters to filter out rows on the basis of
// relatesTo are not set in HBase scan.
boolean checkRelatesTo =
!isSingleEntityRead() && filters.getRelatesTo() != null
&& filters.getRelatesTo().getFilterList().size() > 0;
if (hasField(fieldsToRetrieve, Field.RELATES_TO) || checkRelatesTo) {
readRelationship(entity, result, SubApplicationColumnPrefix.RELATES_TO,
false);
if (checkRelatesTo && !TimelineStorageUtils.matchRelatesTo(entity,
filters.getRelatesTo())) {
return null;
}
if (!hasField(fieldsToRetrieve, Field.RELATES_TO)) {
entity.getRelatesToEntities().clear();
}
}
// fetch info if fieldsToRetrieve contains INFO or ALL.
if (hasField(fieldsToRetrieve, Field.INFO)) {
readKeyValuePairs(entity, result, SubApplicationColumnPrefix.INFO, false);
}
// fetch configs if fieldsToRetrieve contains CONFIGS or ALL.
if (hasField(fieldsToRetrieve, Field.CONFIGS)) {
readKeyValuePairs(entity, result, SubApplicationColumnPrefix.CONFIG,
true);
}
// fetch events and match event filters if they exist. If event filters do
// not match, entity would be dropped. We have to match filters locally
// as relevant HBase filters to filter out rows on the basis of events
// are not set in HBase scan.
boolean checkEvents =
!isSingleEntityRead() && filters.getEventFilters() != null
&& filters.getEventFilters().getFilterList().size() > 0;
if (hasField(fieldsToRetrieve, Field.EVENTS) || checkEvents) {
readEvents(entity, result, SubApplicationColumnPrefix.EVENT);
if (checkEvents && !TimelineStorageUtils.matchEventFilters(entity,
filters.getEventFilters())) {
return null;
}
if (!hasField(fieldsToRetrieve, Field.EVENTS)) {
entity.getEvents().clear();
}
}
// fetch metrics if fieldsToRetrieve contains METRICS or ALL.
if (hasField(fieldsToRetrieve, Field.METRICS)) {
readMetrics(entity, result, SubApplicationColumnPrefix.METRIC);
}
entity.getInfo().put(TimelineReaderUtils.FROMID_KEY,
parseRowKey.getRowKeyAsString());
return entity;
}
}
|
SubApplicationEntityReader
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/property/access/internal/PropertyAccessMapImpl.java
|
{
"start": 2349,
"end": 2834
}
|
class ____ implements Setter {
private final String propertyName;
public SetterImpl(String propertyName) {
this.propertyName = propertyName;
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public void set(Object target, @Nullable Object value) {
( (Map) target ).put( propertyName, value );
}
@Override
public @Nullable String getMethodName() {
return null;
}
@Override
public @Nullable Method getMethod() {
return null;
}
}
}
|
SetterImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/manytomanyassociationclass/Membership.java
|
{
"start": 266,
"end": 1466
}
|
class ____ {
private Serializable id;
private String name;
private User user;
private Group group;
public Membership() {
}
public Membership(Serializable id) {
this.id = id;
}
public Membership(String name) {
this.name = name;
}
public Membership(Serializable id, String name) {
this.id = id;
this.name = name;
}
public Serializable getId() {
return id;
}
public void setId(Serializable id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
public Group getGroup() {
return group;
}
public void setGroup(Group group) {
this.group = group;
}
public boolean equals(Object obj) {
if ( this == obj ) {
return true;
}
if ( obj instanceof Membership ) {
Membership mem = ( Membership ) obj;
if ( mem.getName() != null && name != null ) {
return mem.getName().equals( name );
}
else {
return super.equals( obj );
}
}
else {
return false;
}
}
public int hashCode() {
return ( name == null ? super.hashCode() : name.hashCode() );
}
}
|
Membership
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/context/properties/ConfigurationPropertiesReportEndpointSerializationTests.java
|
{
"start": 16848,
"end": 17044
}
|
class ____ {
private String name = "123456";
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
}
}
public static
|
Bar
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/metamodel/model/domain/internal/NonAggregatedCompositeSqmPathSource.java
|
{
"start": 540,
"end": 1386
}
|
class ____<J>
extends AbstractSqmPathSource<J> implements CompositeSqmPathSource<J> {
private final ManagedDomainType<J> container;
public NonAggregatedCompositeSqmPathSource(
String localName,
SqmPathSource<J> pathModel,
BindableType bindableType,
ManagedDomainType<J> container) {
super( localName, pathModel, container, bindableType );
this.container = container;
}
@Override
public @Nullable SqmPathSource<?> findSubPathSource(String name) {
return (SqmPathSource<?>) container.findAttribute( name );
}
@Override
public SqmPath<J> createSqmPath(SqmPath<?> lhs, @Nullable SqmPathSource<?> intermediatePathSource) {
return new NonAggregatedCompositeSimplePath<>(
PathHelper.append( lhs, this, intermediatePathSource ),
pathModel,
lhs,
lhs.nodeBuilder()
);
}
}
|
NonAggregatedCompositeSqmPathSource
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/Cache.java
|
{
"start": 1959,
"end": 3584
}
|
class ____ collection role.
* <p>
* The appropriate policies depend on the kind of data an entity represents. For
* example, a program might have different caching policies for "reference" data,
* for transactional data, and for data used for analytics. Ordinarily, the
* implementation of those policies is the responsibility of the
* {@linkplain org.hibernate.cache.spi.RegionFactory cache provider} and is
* transparent to code which makes use of a Hibernate {@link Session}. At worst,
* interaction with the cache may be controlled by specification of an explicit
* {@link CacheMode}.
* <p>
* Very occasionally, it's necessary or advantageous to control the cache explicitly
* via programmatic eviction, using, for example, {@link #evictEntityData(Class)} to
* evict a whole cache region, or {@link #evictEntityData(Class, Object)}, to evict
* a single item.
* <p>
* The second-level cache is never aware of changes made externally to Hibernate,
* for example, directly via JDBC, or by another program. Therefore, programmatic
* eviction might be required in order to ensure that such updates do not result
* in stale data being read from the cache.
* <p>
* If multiple entities or roles are mapped to the same cache region, they share
* policies and even the same FIFO-type expiry queue (if any). This sounds useful,
* but comes with the downside that {@link #evictEntityData(Class)} for any one of
* the entities evicts <em>all</em> entities mapped to the same region. It's
* therefore much more common to have a distinct region for each entity and role.
* <p>
* None of the operations of this
|
or
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inject/dagger/ProvidesNullTest.java
|
{
"start": 5361,
"end": 5522
}
|
interface ____ {}
""")
.addSourceLines(
"Test.java",
"""
import dagger.Provides;
public
|
Nullable
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/introspect/JacksonAnnotationIntrospector.java
|
{
"start": 38982,
"end": 57709
}
|
class ____
/**********************************************************************
*/
@Override
public String[] findSerializationPropertyOrder(MapperConfig<?> config, AnnotatedClass ac) {
JsonPropertyOrder order = _findAnnotation(ac, JsonPropertyOrder.class);
return (order == null) ? null : order.value();
}
@Override
public Boolean findSerializationSortAlphabetically(MapperConfig<?> config, Annotated ann) {
return _findSortAlpha(ann);
}
private final Boolean _findSortAlpha(Annotated ann) {
JsonPropertyOrder order = _findAnnotation(ann, JsonPropertyOrder.class);
// 23-Jun-2015, tatu: as per [databind#840], let's only consider
// `true` to have any significance.
if ((order != null) && order.alphabetic()) {
return Boolean.TRUE;
}
return null;
}
@Override
public void findAndAddVirtualProperties(MapperConfig<?> config, AnnotatedClass ac,
List<BeanPropertyWriter> properties) {
JsonAppend ann = _findAnnotation(ac, JsonAppend.class);
if (ann == null) {
return;
}
final boolean prepend = ann.prepend();
JavaType propType = null;
// First: any attribute-backed properties?
JsonAppend.Attr[] attrs = ann.attrs();
for (int i = 0, len = attrs.length; i < len; ++i) {
if (propType == null) {
propType = config.constructType(Object.class);
}
BeanPropertyWriter bpw = _constructVirtualProperty(attrs[i],
config, ac, propType);
if (prepend) {
properties.add(i, bpw);
} else {
properties.add(bpw);
}
}
// Then: general-purpose virtual properties?
JsonAppend.Prop[] props = ann.props();
for (int i = 0, len = props.length; i < len; ++i) {
BeanPropertyWriter bpw = _constructVirtualProperty(props[i],
config, ac);
if (prepend) {
properties.add(i, bpw);
} else {
properties.add(bpw);
}
}
}
protected BeanPropertyWriter _constructVirtualProperty(JsonAppend.Attr attr,
MapperConfig<?> config, AnnotatedClass ac, JavaType type)
{
PropertyMetadata metadata = attr.required() ?
PropertyMetadata.STD_REQUIRED : PropertyMetadata.STD_OPTIONAL;
// could add Index, Description in future, if those matter
String attrName = attr.value();
// allow explicit renaming; if none, default to attribute name
PropertyName propName = _propertyName(attr.propName(), attr.propNamespace());
if (!propName.hasSimpleName()) {
propName = PropertyName.construct(attrName);
}
// now, then, we need a placeholder for member (no real Field/Method):
AnnotatedMember member = new VirtualAnnotatedMember(ac, ac.getRawType(),
attrName, type);
// and with that and property definition
SimpleBeanPropertyDefinition propDef = SimpleBeanPropertyDefinition.construct(config,
member, propName, metadata, attr.include());
// can construct the property writer
return AttributePropertyWriter.construct(attrName, propDef,
ac.getAnnotations(), type);
}
protected BeanPropertyWriter _constructVirtualProperty(JsonAppend.Prop prop,
MapperConfig<?> config, AnnotatedClass ac)
{
PropertyMetadata metadata = prop.required() ?
PropertyMetadata.STD_REQUIRED : PropertyMetadata.STD_OPTIONAL;
PropertyName propName = _propertyName(prop.name(), prop.namespace());
JavaType type = config.constructType(prop.type());
// now, then, we need a placeholder for member (no real Field/Method):
AnnotatedMember member = new VirtualAnnotatedMember(ac, ac.getRawType(),
propName.getSimpleName(), type);
// and with that and property definition
SimpleBeanPropertyDefinition propDef = SimpleBeanPropertyDefinition.construct(config,
member, propName, metadata, prop.include());
Class<?> implClass = prop.value();
HandlerInstantiator hi = config.getHandlerInstantiator();
VirtualBeanPropertyWriter bpw = (hi == null) ? null
: hi.virtualPropertyWriterInstance(config, implClass);
if (bpw == null) {
bpw = (VirtualBeanPropertyWriter) ClassUtil.createInstance(implClass,
config.canOverrideAccessModifiers());
}
// one more thing: give it necessary contextual information
return bpw.withConfig(config, ac, propDef, type);
}
/*
/**********************************************************************
/* Serialization: property annotations
/**********************************************************************
*/
@Override
public PropertyName findNameForSerialization(MapperConfig<?> config, Annotated a)
{
boolean useDefault = false;
JsonGetter jg = _findAnnotation(a, JsonGetter.class);
if (jg != null) {
String s = jg.value();
// 04-May-2018, tatu: Should allow for "nameless" `@JsonGetter` too
if (!s.isEmpty()) {
return PropertyName.construct(s);
}
useDefault = true;
}
JsonProperty pann = _findAnnotation(a, JsonProperty.class);
if (pann != null) {
// 14-Nov-2020, tatu: "namespace" added in 2.12
String ns = pann.namespace();
if (ns != null && ns.isEmpty()) {
ns = null;
}
return PropertyName.construct(pann.value(), ns);
}
if (useDefault || _hasOneOf(a, ANNOTATIONS_TO_INFER_SER)) {
return PropertyName.USE_DEFAULT;
}
return null;
}
@Override // since 2.12
public Boolean hasAsKey(MapperConfig<?> config, Annotated a) {
JsonKey ann = _findAnnotation(a, JsonKey.class);
if (ann == null) {
return null;
}
return ann.value();
}
@Override
public Boolean hasAsValue(MapperConfig<?> config, Annotated a) {
JsonValue ann = _findAnnotation(a, JsonValue.class);
if (ann == null) {
return null;
}
return ann.value();
}
@Override
public Boolean hasAnyGetter(MapperConfig<?> config, Annotated a) {
JsonAnyGetter ann = _findAnnotation(a, JsonAnyGetter.class);
if (ann == null) {
return null;
}
return ann.enabled();
}
/*
/**********************************************************************
/* Deserialization: general annotations
/**********************************************************************
*/
@Override
public Object findDeserializer(MapperConfig<?> config, Annotated a)
{
JsonDeserialize ann = _findAnnotation(a, JsonDeserialize.class);
if (ann != null) {
@SuppressWarnings("rawtypes")
Class<? extends ValueDeserializer> deserClass = ann.using();
if (deserClass != ValueDeserializer.None.class) {
return deserClass;
}
}
return null;
}
@Override
public Object findKeyDeserializer(MapperConfig<?> config, Annotated a)
{
JsonDeserialize ann = _findAnnotation(a, JsonDeserialize.class);
if (ann != null) {
Class<? extends KeyDeserializer> deserClass = ann.keyUsing();
if (deserClass != KeyDeserializer.None.class) {
return deserClass;
}
}
return null;
}
@Override
public Object findContentDeserializer(MapperConfig<?> config, Annotated a)
{
JsonDeserialize ann = _findAnnotation(a, JsonDeserialize.class);
if (ann != null) {
@SuppressWarnings("rawtypes")
Class<? extends ValueDeserializer> deserClass = ann.contentUsing();
if (deserClass != ValueDeserializer.None.class) {
return deserClass;
}
}
return null;
}
@Override
public Object findDeserializationConverter(MapperConfig<?> config, Annotated a)
{
JsonDeserialize ann = _findAnnotation(a, JsonDeserialize.class);
return (ann == null) ? null : _classIfExplicit(ann.converter(), Converter.None.class);
}
@Override
public Object findDeserializationContentConverter(MapperConfig<?> config, AnnotatedMember a)
{
JsonDeserialize ann = _findAnnotation(a, JsonDeserialize.class);
return (ann == null) ? null : _classIfExplicit(ann.contentConverter(), Converter.None.class);
}
/*
/**********************************************************************
/* Deserialization: type modifications
/**********************************************************************
*/
@Override
public JavaType refineDeserializationType(final MapperConfig<?> config,
final Annotated a, final JavaType baseType)
{
JavaType type = baseType;
final TypeFactory tf = config.getTypeFactory();
final JsonDeserialize jsonDeser = _findAnnotation(a, JsonDeserialize.class);
// Ok: start by refining the main type itself; common to all types
final Class<?> valueClass = (jsonDeser == null) ? null : _classIfExplicit(jsonDeser.as());
if ((valueClass != null) && !type.hasRawClass(valueClass)
&& !_primitiveAndWrapper(type, valueClass)) {
try {
type = tf.constructSpecializedType(type, valueClass);
} catch (IllegalArgumentException iae) {
throw _databindException(iae,
String.format("Failed to narrow type %s with annotation (value %s), from '%s': %s",
type, valueClass.getName(), a.getName(), iae.getMessage()));
}
}
// Then further processing for container types
// First, key type (for Maps, Map-like types):
if (type.isMapLikeType()) {
JavaType keyType = type.getKeyType();
final Class<?> keyClass = (jsonDeser == null) ? null : _classIfExplicit(jsonDeser.keyAs());
if ((keyClass != null)
&& !_primitiveAndWrapper(keyType, keyClass)) {
try {
keyType = tf.constructSpecializedType(keyType, keyClass);
type = ((MapLikeType) type).withKeyType(keyType);
} catch (IllegalArgumentException iae) {
throw _databindException(iae,
String.format("Failed to narrow key type of %s with concrete-type annotation (value %s), from '%s': %s",
type, keyClass.getName(), a.getName(), iae.getMessage()));
}
}
}
JavaType contentType = type.getContentType();
if (contentType != null) { // collection[like], map[like], array, reference
// And then value types for all containers:
final Class<?> contentClass = (jsonDeser == null) ? null : _classIfExplicit(jsonDeser.contentAs());
if ((contentClass != null)
&& !_primitiveAndWrapper(contentType, contentClass)) {
try {
contentType = tf.constructSpecializedType(contentType, contentClass);
type = type.withContentType(contentType);
} catch (IllegalArgumentException iae) {
throw _databindException(iae,
String.format("Failed to narrow value type of %s with concrete-type annotation (value %s), from '%s': %s",
type, contentClass.getName(), a.getName(), iae.getMessage()));
}
}
}
return type;
}
/*
/**********************************************************************
/* Deserialization: Class annotations
/**********************************************************************
*/
@Override
public Object findValueInstantiator(MapperConfig<?> config, AnnotatedClass ac)
{
JsonValueInstantiator ann = _findAnnotation(ac, JsonValueInstantiator.class);
// no 'null' marker yet, so:
return (ann == null) ? null : ann.value();
}
@Override
public Class<?> findPOJOBuilder(MapperConfig<?> config, AnnotatedClass ac)
{
JsonDeserialize ann = _findAnnotation(ac, JsonDeserialize.class);
return (ann == null) ? null : _classIfExplicit(ann.builder());
}
@Override
public JsonPOJOBuilder.Value findPOJOBuilderConfig(MapperConfig<?> config, AnnotatedClass ac)
{
JsonPOJOBuilder ann = _findAnnotation(ac, JsonPOJOBuilder.class);
return (ann == null) ? null : new JsonPOJOBuilder.Value(ann);
}
/*
/**********************************************************************
/* Deserialization: property annotations
/**********************************************************************
*/
@Override
public PropertyName findNameForDeserialization(MapperConfig<?> config, Annotated a)
{
// @JsonSetter has precedence over @JsonProperty, being more specific
boolean useDefault = false;
JsonSetter js = _findAnnotation(a, JsonSetter.class);
if (js != null) {
String s = js.value();
// 04-May-2018, tatu: Need to allow for "nameless" `@JsonSetter` too
if (s.isEmpty()) {
useDefault = true;
} else {
return PropertyName.construct(s);
}
}
JsonProperty pann = _findAnnotation(a, JsonProperty.class);
if (pann != null) {
// 14-Nov-2020, tatu: "namespace" added in 2.12
String ns = pann.namespace();
if (ns != null && ns.isEmpty()) {
ns = null;
}
return PropertyName.construct(pann.value(), ns);
}
if (useDefault || _hasOneOf(a, ANNOTATIONS_TO_INFER_DESER)) {
return PropertyName.USE_DEFAULT;
}
return null;
}
@Override
public Boolean hasAnySetter(MapperConfig<?> config, Annotated a) {
JsonAnySetter ann = _findAnnotation(a, JsonAnySetter.class);
return (ann == null) ? null : ann.enabled();
}
@Override
public JsonSetter.Value findSetterInfo(MapperConfig<?> config, Annotated a) {
return JsonSetter.Value.from(_findAnnotation(a, JsonSetter.class));
}
@Override
public Boolean findMergeInfo(MapperConfig<?> config, Annotated a) {
JsonMerge ann = _findAnnotation(a, JsonMerge.class);
return (ann == null) ? null : ann.value().asBoolean();
}
@Override
public JsonCreator.Mode findCreatorAnnotation(MapperConfig<?> config, Annotated a) {
JsonCreator ann = _findAnnotation(a, JsonCreator.class);
JsonCreator.Mode mode;
if (ann == null) {
mode = null;
} else {
mode = ann.mode();
// 25-Jan-2025, tatu: [databind#4809] Need to avoid "DEFAULT" from masking
// @CreatorProperties-provided value
if (mode != JsonCreator.Mode.DEFAULT) {
return mode;
}
}
if (_cfgConstructorPropertiesImpliesCreator
&& config.isEnabled(MapperFeature.INFER_CREATOR_FROM_CONSTRUCTOR_PROPERTIES)
) {
if (_javaBeansHelper != null) {
Boolean b = _javaBeansHelper.hasCreatorAnnotation(a);
if ((b != null) && b.booleanValue()) {
// 13-Sep-2016, tatu: Judgment call, but I don't think JDK ever implies
// use of delegate; assumes as-properties implicitly
return JsonCreator.Mode.PROPERTIES;
}
}
}
return mode;
}
/*
/**********************************************************************
/* Helper methods
/**********************************************************************
*/
protected boolean _isIgnorable(Annotated a)
{
JsonIgnore ann = _findAnnotation(a, JsonIgnore.class);
if (ann != null) {
return ann.value();
}
// From JDK 7/java.beans
if (_javaBeansHelper != null) {
Boolean b = _javaBeansHelper.findTransient(a);
if (b != null) {
return b.booleanValue();
}
}
return false;
}
protected Class<?> _classIfExplicit(Class<?> cls) {
if (cls == null || ClassUtil.isBogusClass(cls)) {
return null;
}
return cls;
}
protected Class<?> _classIfExplicit(Class<?> cls, Class<?> implicit) {
cls = _classIfExplicit(cls);
return (cls == null || cls == implicit) ? null : cls;
}
protected PropertyName _propertyName(String localName, String namespace) {
if (localName.isEmpty()) {
return PropertyName.USE_DEFAULT;
}
if (namespace == null || namespace.isEmpty()) {
return PropertyName.construct(localName);
}
return PropertyName.construct(localName, namespace);
}
private boolean _primitiveAndWrapper(Class<?> baseType, Class<?> refinement)
{
if (baseType.isPrimitive()) {
return baseType == ClassUtil.primitiveType(refinement);
}
if (refinement.isPrimitive()) {
return refinement == ClassUtil.primitiveType(baseType);
}
return false;
}
private boolean _primitiveAndWrapper(JavaType baseType, Class<?> refinement)
{
if (baseType.isPrimitive()) {
return baseType.hasRawClass(ClassUtil.primitiveType(refinement));
}
if (refinement.isPrimitive()) {
return refinement == ClassUtil.primitiveType(baseType.getRawClass());
}
return false;
}
// @since 2.12
private DatabindException _databindException(String msg) {
// not optimal as we have no parser/generator/context to pass
return DatabindException.from((JsonParser) null, msg);
}
// @since 2.12
private DatabindException _databindException(Throwable t, String msg) {
// not optimal as we have no parser/generator/context to pass
return DatabindException.from((JsonParser) null, msg, t);
}
}
|
annotations
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/AtomicIntegerFieldUpdaterAssert.java
|
{
"start": 1570,
"end": 2453
}
|
class ____ a non-private volatile int field (age).
* AtomicIntegerFieldUpdater<Person> ageUpdater = AtomicIntegerFieldUpdater.newUpdater(Person.class, "age");
*
* // this assertion succeeds:
* ageUpdater.set(person, 25);
* assertThat(ageUpdater).hasValue(25, person);
*
* // this assertion fails:
* fieldUpdater.set(person, 28);
* assertThat(fieldUpdater).hasValue(25, person);</code></pre>
*
* @param expectedValue the expected value inside the {@code OBJECT}.
* @param obj the object holding the updatable field.
* @return this assertion object.
* @since 2.7.0 / 3.7.0
*/
@Override
public AtomicIntegerFieldUpdaterAssert<OBJECT> hasValue(Integer expectedValue, OBJECT obj) {
return super.hasValue(expectedValue, obj);
}
@Override
protected Integer getActualValue(OBJECT obj) {
return actual.get(obj);
}
}
|
holding
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/jdk/JDKScalarsDeserTest.java
|
{
"start": 1200,
"end": 1287
}
|
class ____ {
int _v;
void setV(int v) { _v = v; }
}
static
|
IntBean
|
java
|
dropwizard__dropwizard
|
dropwizard-testing/src/test/java/io/dropwizard/testing/junit5/ReuseDropwizardAppExtensionTestSuite.java
|
{
"start": 1341,
"end": 1880
}
|
class ____ {
static final DropwizardAppExtension<TestConfiguration> EXTENSION = ReuseDropwizardAppExtensionTestSuite.EXTENSION;
@Test
void clientHasNotBeenClosed() {
final String response = EXTENSION.client()
.target("http://localhost:" + EXTENSION.getAdminPort() + "/tasks/echo")
.request()
.post(Entity.entity("Custom message", MediaType.TEXT_PLAIN), String.class);
assertThat(response).isEqualTo("Custom message");
}
}
|
DropwizardAppExtensionTestSuiteBarTest
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/mappingcontrol/ErroneousBuiltInAndMethodMapper.java
|
{
"start": 848,
"end": 1054
}
|
class ____ {
private Calendar time;
public Target(Calendar time) {
this.time = time;
}
public Calendar getTime() {
return time;
}
}
}
|
Target
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/lucene/index/LazySoftDeletesDirectoryReaderWrapperTests.java
|
{
"start": 1705,
"end": 8247
}
|
class ____ extends LuceneTestCase {
public void testDropFullyDeletedSegments() throws IOException {
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
String softDeletesField = "soft_delete";
indexWriterConfig.setSoftDeletesField(softDeletesField);
indexWriterConfig.setMergePolicy(
new SoftDeletesRetentionMergePolicy(softDeletesField, MatchAllDocsQuery::new, NoMergePolicy.INSTANCE)
);
try (Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, indexWriterConfig)) {
Document doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new StringField("version", "1", Field.Store.YES));
writer.addDocument(doc);
writer.commit();
doc = new Document();
doc.add(new StringField("id", "2", Field.Store.YES));
doc.add(new StringField("version", "1", Field.Store.YES));
writer.addDocument(doc);
writer.commit();
try (DirectoryReader reader = new LazySoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField)) {
assertEquals(2, reader.leaves().size());
assertEquals(2, reader.numDocs());
assertEquals(2, reader.maxDoc());
assertEquals(0, reader.numDeletedDocs());
}
writer.updateDocValues(new Term("id", "1"), new NumericDocValuesField(softDeletesField, 1));
writer.commit();
try (DirectoryReader reader = new LazySoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField)) {
assertEquals(1, reader.numDocs());
assertEquals(1, reader.maxDoc());
assertEquals(0, reader.numDeletedDocs());
assertEquals(1, reader.leaves().size());
}
try (DirectoryReader reader = DirectoryReader.open(dir)) {
assertEquals(2, reader.numDocs());
assertEquals(2, reader.maxDoc());
assertEquals(0, reader.numDeletedDocs());
assertEquals(2, reader.leaves().size());
}
}
}
public void testMixSoftAndHardDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
String softDeletesField = "soft_delete";
indexWriterConfig.setSoftDeletesField(softDeletesField);
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
Set<Integer> uniqueDocs = new HashSet<>();
for (int i = 0; i < 100; i++) {
int docId = random().nextInt(5);
uniqueDocs.add(docId);
Document doc = new Document();
doc.add(new StringField("id", String.valueOf(docId), Field.Store.YES));
if (docId % 2 == 0) {
writer.updateDocument(new Term("id", String.valueOf(docId)), doc);
} else {
writer.softUpdateDocument(new Term("id", String.valueOf(docId)), doc, new NumericDocValuesField(softDeletesField, 0));
}
}
writer.commit();
writer.close();
DirectoryReader reader = new LazySoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField);
assertEquals(uniqueDocs.size(), reader.numDocs());
IndexSearcher searcher = newSearcher(reader);
for (Integer docId : uniqueDocs) {
assertEquals(1, searcher.count(new TermQuery(new Term("id", docId.toString()))));
}
IOUtils.close(reader, dir);
}
public void testReaderCacheKey() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
String softDeletesField = "soft_delete";
indexWriterConfig.setSoftDeletesField(softDeletesField);
indexWriterConfig.setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
Document doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new StringField("version", "1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new StringField("id", "2", Field.Store.YES));
doc.add(new StringField("version", "1", Field.Store.YES));
writer.addDocument(doc);
writer.commit();
DirectoryReader reader = new LazySoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField);
IndexReader.CacheHelper readerCacheHelper = reader.leaves().get(0).reader().getReaderCacheHelper();
AtomicInteger leafCalled = new AtomicInteger(0);
AtomicInteger dirCalled = new AtomicInteger(0);
readerCacheHelper.addClosedListener(key -> {
leafCalled.incrementAndGet();
assertSame(key, readerCacheHelper.getKey());
});
IndexReader.CacheHelper dirReaderCacheHelper = reader.getReaderCacheHelper();
dirReaderCacheHelper.addClosedListener(key -> {
dirCalled.incrementAndGet();
assertSame(key, dirReaderCacheHelper.getKey());
});
assertEquals(2, reader.numDocs());
assertEquals(2, reader.maxDoc());
assertEquals(0, reader.numDeletedDocs());
doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new StringField("version", "2", Field.Store.YES));
writer.softUpdateDocument(new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
doc = new Document();
doc.add(new StringField("id", "3", Field.Store.YES));
doc.add(new StringField("version", "1", Field.Store.YES));
writer.addDocument(doc);
writer.commit();
assertEquals(0, leafCalled.get());
assertEquals(0, dirCalled.get());
DirectoryReader newReader = new LazySoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField);
assertEquals(0, leafCalled.get());
assertEquals(0, dirCalled.get());
assertNotSame(newReader.getReaderCacheHelper().getKey(), reader.getReaderCacheHelper().getKey());
assertNotSame(newReader, reader);
reader.close();
reader = newReader;
assertEquals(1, dirCalled.get());
assertEquals(1, leafCalled.get());
IOUtils.close(reader, writer, dir);
}
}
|
LazySoftDeletesDirectoryReaderWrapperTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java
|
{
"start": 646,
"end": 1753
}
|
class ____ implements AggregatorFunctionSupplier {
private final int limit;
private final boolean ascending;
public TopLongAggregatorFunctionSupplier(int limit, boolean ascending) {
this.limit = limit;
this.ascending = ascending;
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
return TopLongAggregatorFunction.intermediateStateDesc();
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return TopLongGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public TopLongAggregatorFunction aggregator(DriverContext driverContext, List<Integer> channels) {
return TopLongAggregatorFunction.create(driverContext, channels, limit, ascending);
}
@Override
public TopLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext,
List<Integer> channels) {
return TopLongGroupingAggregatorFunction.create(channels, driverContext, limit, ascending);
}
@Override
public String describe() {
return "top of longs";
}
}
|
TopLongAggregatorFunctionSupplier
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/postgresql/issues/Issue3538.java
|
{
"start": 720,
"end": 3415
}
|
class ____ {
@Test
public void test_end() throws Exception {
DbType dbType = DbType.postgresql;
String sql = "end;\nend;";
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
List<SQLStatement> statements = parser.parseStatementList();
assertEquals(2, statements.size());
FormatOption DEFAULT_FORMAT_OPTION = new FormatOption(false, true, false);
for (SQLStatement statement : statements) {
System.out.println("sql: " + statement.getClass().getName() + " " + statement.toString());
String result = SQLUtils.toSQLString(statement, dbType, DEFAULT_FORMAT_OPTION);
System.out.println(result);
}
}
@Test
public void test_begin_end() throws Exception {
DbType dbType = DbType.postgresql;
String sql = "begin;\n"
+ "update table set name='a', sn='a' where id=1;\n"
+ "update table set name='b', sn='b' where id=2;\n"
+ "update table set name='c', sn='c' where id=3;\n"
+ "end;";
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
List<SQLStatement> statements = parser.parseStatementList();
assertEquals(5, statements.size());
com.alibaba.druid.sql.dialect.postgresql.ast.stmt.PGUpdateStatement jjj;
FormatOption DEFAULT_FORMAT_OPTION = new FormatOption(false, true, false);
for (SQLStatement statement : statements) {
System.out.println("sql: " + statement.getClass().getName() + " " + statement.toString());
String result = SQLUtils.toSQLString(statement, dbType, DEFAULT_FORMAT_OPTION);
System.out.println(result);
}
}
@Test
public void test_start_end() throws Exception {
String sql = "start transaction;\n"
+ "update table set name='a', sn='a' where id=1;\n"
+ "update table set name='b', sn='b' where id=2;\n"
+ "update table set name='c', sn='c' where id=3;\n"
+ "end;";
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, DbType.postgresql);
List<SQLStatement> statements = parser.parseStatementList();
assertEquals(5, statements.size());
FormatOption DEFAULT_FORMAT_OPTION = new FormatOption(false, true, false);
for (SQLStatement statement : statements) {
System.out.println("sql: " + statement.getClass().getName() + " " + statement.toString());
String result = SQLUtils.toSQLString(statement, DbType.postgresql, DEFAULT_FORMAT_OPTION);
System.out.println(result);
}
}
}
|
Issue3538
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/mixins/MixinForFactoryMethod3220Test.java
|
{
"start": 959,
"end": 1060
}
|
class ____
extends DatabindTestUtil
{
// [databind#3220]
static
|
MixinForFactoryMethod3220Test
|
java
|
apache__camel
|
core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedBeanProcessor.java
|
{
"start": 1262,
"end": 2290
}
|
class ____ extends ManagedProcessor implements ManagedBeanMBean {
private transient String beanClassName;
public ManagedBeanProcessor(CamelContext context, BeanProcessor processor, ProcessorDefinition<?> definition) {
super(context, processor, definition);
}
@Override
public BeanProcessor getProcessor() {
return (BeanProcessor) super.getProcessor();
}
@Override
public Object getInstance() {
return getProcessor().getBean();
}
@Override
public String getMethod() {
return getProcessor().getMethod();
}
@Override
public String getBeanClassName() {
if (beanClassName != null) {
return beanClassName;
}
try {
Object bean = getProcessor().getBean();
if (bean != null) {
beanClassName = ObjectHelper.className(bean);
}
} catch (NoSuchBeanException e) {
// ignore
}
return beanClassName;
}
}
|
ManagedBeanProcessor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/function/json/PostgreSQLJsonReplaceFunction.java
|
{
"start": 733,
"end": 3354
}
|
class ____ extends AbstractJsonReplaceFunction {
public PostgreSQLJsonReplaceFunction(TypeConfiguration typeConfiguration) {
super( typeConfiguration );
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> arguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> translator) {
final Expression json = (Expression) arguments.get( 0 );
final Expression jsonPath = (Expression) arguments.get( 1 );
final SqlAstNode value = arguments.get( 2 );
sqlAppender.appendSql( "jsonb_set(" );
final boolean needsCast = !isJsonType( json ) && AbstractSqlAstTranslator.isParameter( json );
if ( needsCast ) {
sqlAppender.appendSql( "cast(" );
}
json.accept( translator );
if ( needsCast ) {
sqlAppender.appendSql( " as jsonb)" );
}
sqlAppender.appendSql( ',' );
List<JsonPathHelper.JsonPathElement> jsonPathElements =
JsonPathHelper.parseJsonPathElements( translator.getLiteralValue( jsonPath ) );
sqlAppender.appendSql( "array" );
char separator = '[';
for ( JsonPathHelper.JsonPathElement pathElement : jsonPathElements ) {
sqlAppender.appendSql( separator );
if ( pathElement instanceof JsonPathHelper.JsonAttribute attribute ) {
sqlAppender.appendSingleQuoteEscapedString( attribute.attribute() );
}
else if ( pathElement instanceof JsonPathHelper.JsonParameterIndexAccess jsonParameterIndexAccess ) {
final String parameterName = jsonParameterIndexAccess.parameterName();
throw new QueryException( "JSON path [" + jsonPath + "] uses parameter [" + parameterName + "] that is not passed" );
}
else {
sqlAppender.appendSql( '\'' );
sqlAppender.appendSql( ( (JsonPathHelper.JsonIndexAccess) pathElement ).index() );
sqlAppender.appendSql( '\'' );
}
separator = ',';
}
sqlAppender.appendSql( "]::text[]," );
if ( value instanceof Literal literal && literal.getLiteralValue() == null ) {
sqlAppender.appendSql( "null::jsonb" );
}
else {
sqlAppender.appendSql( "to_jsonb(" );
value.accept( translator );
if ( value instanceof Literal literal && literal.getJdbcMapping().getJdbcType().isString() ) {
// PostgreSQL until version 16 is not smart enough to infer the type of a string literal
sqlAppender.appendSql( "::text" );
}
sqlAppender.appendSql( ')' );
}
sqlAppender.appendSql( ",false)" );
}
private boolean isJsonType(Expression expression) {
final JdbcMappingContainer expressionType = expression.getExpressionType();
return expressionType != null && expressionType.getSingleJdbcMapping().getJdbcType().isJson();
}
}
|
PostgreSQLJsonReplaceFunction
|
java
|
reactor__reactor-core
|
reactor-core-micrometer/src/test/java/reactor/core/observability/micrometer/MicrometerMeterListenerConfigurationTest.java
|
{
"start": 1352,
"end": 8334
}
|
class ____ {
@ParameterizedTestWithName
@CsvSource(value = {
",",
"someName,",
",someTag",
"someName,someTag"
})
void fromFlux(@Nullable String name, @Nullable String tag) {
MeterRegistry expectedRegistry = new SimpleMeterRegistry();
Clock expectedClock = Clock.SYSTEM;
Flux<Integer> flux = Flux.just(1, 2, 3);
if (name != null) {
flux = flux.name(name);
}
if (tag != null) {
flux = flux.tag("tag", tag);
}
MicrometerMeterListenerConfiguration configuration = MicrometerMeterListenerConfiguration.fromFlux(flux, expectedRegistry);
assertThat(configuration.registry.config().clock()).as("clock").isSameAs(expectedClock);
assertThat(configuration.registry).as("registry").isSameAs(expectedRegistry);
assertThat(configuration.isMono).as("isMono").isFalse();
assertThat(configuration.sequenceName)
.as("sequenceName")
.isEqualTo(name == null ? Micrometer.DEFAULT_METER_PREFIX : name);
if (tag == null) {
assertThat(configuration.commonTags.stream().map(t -> t.getKey() + "=" + t.getValue()))
.as("commonTags without additional tag")
.containsExactly("type=Flux");
}
else {
assertThat(configuration.commonTags.stream().map(t -> t.getKey() + "=" + t.getValue()))
.as("commonTags")
.containsExactlyInAnyOrder("type=Flux", "tag="+tag);
}
}
@ParameterizedTestWithName
@CsvSource(value = {
",",
"someName,",
",someTag",
"someName,someTag"
})
void fromMono(@Nullable String name, @Nullable String tag) {
MeterRegistry expectedRegistry = new SimpleMeterRegistry();
Clock expectedClock = Clock.SYSTEM;
Mono<Integer> mono = Mono.just(1);
if (name != null) {
mono = mono.name(name);
}
if (tag != null) {
mono = mono.tag("tag", tag);
}
MicrometerMeterListenerConfiguration configuration = MicrometerMeterListenerConfiguration.fromMono(mono, expectedRegistry);
assertThat(configuration.registry.config().clock()).as("clock").isSameAs(expectedClock);
assertThat(configuration.registry).as("registry").isSameAs(expectedRegistry);
assertThat(configuration.isMono).as("isMono").isTrue();
assertThat(configuration.sequenceName)
.as("sequenceName")
.isEqualTo(name == null ? Micrometer.DEFAULT_METER_PREFIX : name);
if (tag == null) {
assertThat(configuration.commonTags.stream().map(t -> t.getKey() + "=" + t.getValue()))
.as("commonTags without additional tag")
.containsExactly("type=Mono");
}
else {
assertThat(configuration.commonTags.stream().map(t -> t.getKey() + "=" + t.getValue()))
.as("commonTags")
.containsExactlyInAnyOrder("type=Mono", "tag="+tag);
}
}
@Test
void resolveName_notSet() {
String defaultName = "ANONYMOUS";
TestLogger logger = new TestLogger(false);
Flux<Integer> flux = Flux.just(1);
String resolvedName = MicrometerMeterListenerConfiguration.resolveName(flux, logger, defaultName);
assertThat(resolvedName).isEqualTo(defaultName);
assertThat(logger.getOutContent() + logger.getErrContent()).as("logs").isEmpty();
}
@Test
void resolveName_setRightAbove() {
TestLogger logger = new TestLogger(false);
Flux<Integer> flux = Flux.just(1).name("someName");
String resolvedName = MicrometerMeterListenerConfiguration.resolveName(flux, logger, "UNEXPECTED");
assertThat(resolvedName).isEqualTo("someName");
assertThat(logger.getOutContent() + logger.getErrContent()).as("logs").isEmpty();
}
@Test
void resolveName_setHigherAbove() {
TestLogger logger = new TestLogger(false);
Flux<Integer> flux = Flux.just(1).name("someName").filter(i -> i % 2 == 0).map(i -> i + 10);
String resolvedName = MicrometerMeterListenerConfiguration.resolveName(flux, logger, "UNEXPECTED");
assertThat(resolvedName).isEqualTo("someName");
assertThat(logger.getOutContent() + logger.getErrContent()).as("logs").isEmpty();
}
@Test
void resolveName_notScannable() {
String defaultName = "ANONYMOUS";
TestLogger testLogger = new TestLogger(false);
Publisher<Object> publisher = Operators::complete;
String resolvedName = MicrometerMeterListenerConfiguration.resolveName(publisher, testLogger, defaultName);
assertThat(resolvedName).as("resolved name").isEqualTo(defaultName);
assertThat(testLogger.getErrContent()).contains("Attempting to activate metrics but the upstream is not Scannable. You might want to use `name()` (and optionally `tags()`) right before this listener");
}
@Test
void resolveTags_notSet() {
Tags defaultTags = Tags.of("common1", "commonValue1");
Flux<Integer> flux = Flux.just(1);
Tags resolvedTags = MicrometerMeterListenerConfiguration.resolveTags(flux, defaultTags);
assertThat(resolvedTags.stream().map(Object::toString))
.containsExactly("tag(common1=commonValue1)");
}
@Test
void resolveTags_setRightAbove() {
Tags defaultTags = Tags.of("common1", "commonValue1");
Flux<Integer> flux = Flux
.just(1)
.tag("k1", "v1");
Tags resolvedTags = MicrometerMeterListenerConfiguration.resolveTags(flux, defaultTags);
assertThat(resolvedTags.stream().map(Object::toString)).containsExactlyInAnyOrder(
"tag(common1=commonValue1)",
"tag(k1=v1)"
);
}
@Test
void resolveTags_setHigherAbove() {
Tags defaultTags = Tags.of("common1", "commonValue1");
Flux<Integer> flux = Flux
.just(1)
.tag("k1", "v1")
.filter(i -> i % 2 == 0)
.map(i -> i + 10);
Tags resolvedTags = MicrometerMeterListenerConfiguration.resolveTags(flux, defaultTags);
assertThat(resolvedTags.stream().map(Object::toString)).containsExactlyInAnyOrder(
"tag(common1=commonValue1)",
"tag(k1=v1)"
);
}
@Test
void resolveTags_multipleScatteredTagsSetAbove() {
Tags defaultTags = Tags.of("common1", "commonValue1");
Flux<Integer> flux = Flux.just(1)
.tag("k1", "v1")
.filter(i -> i % 2 == 0)
.tag("k2", "v2")
.map(i -> i + 10);
Tags resolvedTags = MicrometerMeterListenerConfiguration.resolveTags(flux, defaultTags);
assertThat(resolvedTags.stream().map(Object::toString)).containsExactlyInAnyOrder(
"tag(common1=commonValue1)",
"tag(k1=v1)",
"tag(k2=v2)"
);
}
@Test
void resolveTags_multipleScatteredTagsSetAboveWithDeduplication() {
Tags defaultTags = Tags.of("common1", "commonValue1");
Flux<Integer> flux = Flux.just(1)
.tag("k1", "v1")
.tag("k2", "oldV2")
.filter(i -> i % 2 == 0)
.tag("k2", "v2")
.map(i -> i + 10);
Tags resolvedTags = MicrometerMeterListenerConfiguration.resolveTags(flux, defaultTags);
assertThat(resolvedTags.stream().map(Object::toString)).containsExactly(
"tag(common1=commonValue1)",
"tag(k1=v1)",
"tag(k2=v2)"
);
}
@Test
void resolveTags_notScannable() {
Tags defaultTags = Tags.of("common1", "commonValue1");
Publisher<Object> publisher = Operators::complete;
Tags resolvedTags = MicrometerMeterListenerConfiguration.resolveTags(publisher, defaultTags);
assertThat(resolvedTags.stream().map(Object::toString)).containsExactly("tag(common1=commonValue1)");
}
}
|
MicrometerMeterListenerConfigurationTest
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/ognlstatic/OgnlStaticTest.java
|
{
"start": 1108,
"end": 2761
}
|
class ____ {
private static SqlSessionFactory sqlSessionFactory;
@BeforeAll
static void setUp() throws Exception {
// create a SqlSessionFactory
try (Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/ognlstatic/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
// populate in-memory database
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/ognlstatic/CreateDB.sql");
}
/**
* This is the log output.
* <p>
* DEBUG [main] - ooo Using Connection [org.hsqldb.jdbc.JDBCConnection@5ae1a5c7]
* <p>
* DEBUG [main] - ==> Preparing: SELECT * FROM users WHERE name IN (?) AND id = ?
* <p>
* DEBUG [main] - ==> Parameters: 1(Integer), 1(Integer)
* <p>
* There are two parameter mappings but DefaultParameterHandler maps them both to input parameter (integer)
*/
@Test // see issue #448
void shouldGetAUserStatic() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
User user = mapper.getUserStatic(1);
Assertions.assertNotNull(user);
Assertions.assertEquals("User1", user.getName());
}
}
@Tag("RequireIllegalAccess")
@Test // see issue #61 (gh)
void shouldGetAUserWithIfNode() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
User user = mapper.getUserIfNode("User1");
Assertions.assertEquals("User1", user.getName());
}
}
}
|
OgnlStaticTest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/issues/MyPackageScopedBean.java
|
{
"start": 844,
"end": 1029
}
|
class ____ {
String doSomething(String body) {
return "Hello " + body;
}
private String doSomethingElse(String foo) {
return "foo";
}
}
|
MyPackageScopedBean
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/time/JavaTimeDefaultTimeZoneTest.java
|
{
"start": 3655,
"end": 4147
}
|
class ____ {
// BUG: Diagnostic matches: REPLACEME
LocalDateTime now = LocalDateTime.now();
LocalDateTime nowWithZone = LocalDateTime.now(systemDefault());
}
""")
.doTest();
}
@Test
public void monthDay() {
helper
.addSourceLines(
"TestClass.java",
"""
import static java.time.ZoneId.systemDefault;
import java.time.MonthDay;
public
|
TestClass
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/aot/CodeWarningsTests.java
|
{
"start": 1888,
"end": 8892
}
|
class ____ {
private static final TestCompiler TEST_COMPILER = TestCompiler.forSystem()
.withCompilerOptions("-Xlint:all", "-Werror");
private final CodeWarnings codeWarnings = new CodeWarnings();
private final TestGenerationContext generationContext = new TestGenerationContext();
@Test
void registerNoWarningDoesNotIncludeAnnotationOnMethod() {
compileWithMethod(method -> {
this.codeWarnings.suppress(method);
method.addStatement("$T bean = $S", String.class, "Hello");
}, compiled -> assertThat(compiled.getSourceFile()).doesNotContain("@SuppressWarnings"));
}
@Test
void registerNoWarningDoesNotIncludeAnnotationOnType() {
compile(type -> {
this.codeWarnings.suppress(type);
type.addField(FieldSpec.builder(String.class, "type").build());
}, compiled -> assertThat(compiled.getSourceFile()).doesNotContain("@SuppressWarnings"));
}
@Test
@SuppressWarnings("deprecation")
void registerWarningSuppressesItOnMethod() {
this.codeWarnings.register("deprecation");
compileWithMethod(method -> {
this.codeWarnings.suppress(method);
method.addStatement("$T bean = new $T()", DeprecatedBean.class, DeprecatedBean.class);
}, compiled -> assertThat(compiled.getSourceFile()).contains("@SuppressWarnings(\"deprecation\")"));
}
@Test
@SuppressWarnings("deprecation")
void registerWarningSuppressesItOnType() {
this.codeWarnings.register("deprecation");
compile(type -> {
this.codeWarnings.suppress(type);
type.addField(FieldSpec.builder(DeprecatedBean.class, "bean").build());
}, compiled -> assertThat(compiled.getSourceFile())
.contains("@SuppressWarnings(\"deprecation\")"));
}
@Test
@SuppressWarnings({ "deprecation", "removal" })
void registerSeveralWarningsSuppressesThemOnMethod() {
this.codeWarnings.register("deprecation");
this.codeWarnings.register("removal");
compileWithMethod(method -> {
this.codeWarnings.suppress(method);
method.addStatement("$T bean = new $T()", DeprecatedBean.class, DeprecatedBean.class);
method.addStatement("$T another = new $T()", DeprecatedForRemovalBean.class, DeprecatedForRemovalBean.class);
}, compiled -> assertThat(compiled.getSourceFile()).contains("@SuppressWarnings({ \"deprecation\", \"removal\" })"));
}
@Test
@SuppressWarnings({ "deprecation", "removal" })
void registerSeveralWarningsSuppressesThemOnType() {
this.codeWarnings.register("deprecation");
this.codeWarnings.register("removal");
compile(type -> {
this.codeWarnings.suppress(type);
type.addField(FieldSpec.builder(DeprecatedBean.class, "bean").build());
type.addField(FieldSpec.builder(DeprecatedForRemovalBean.class, "another").build());
}, compiled -> assertThat(compiled.getSourceFile())
.contains("@SuppressWarnings({ \"deprecation\", \"removal\" })"));
}
@Test
@SuppressWarnings("deprecation")
void detectDeprecationOnAnnotatedElementWithDeprecated() {
this.codeWarnings.detectDeprecation(DeprecatedBean.class);
assertThat(this.codeWarnings.getWarnings()).containsOnly("deprecation");
}
@Test
@SuppressWarnings("deprecation")
void detectDeprecationOnAnnotatedElementWhoseEnclosingElementIsDeprecated() {
this.codeWarnings.detectDeprecation(DeprecatedBean.Nested.class);
assertThat(this.codeWarnings.getWarnings()).containsExactly("deprecation");
}
@Test
@SuppressWarnings("removal")
void detectDeprecationOnAnnotatedElementWithDeprecatedForRemoval() {
this.codeWarnings.detectDeprecation(DeprecatedForRemovalBean.class);
assertThat(this.codeWarnings.getWarnings()).containsOnly("removal");
}
@Test
@SuppressWarnings("removal")
void detectDeprecationOnAnnotatedElementWhoseEnclosingElementIsDeprecatedForRemoval() {
this.codeWarnings.detectDeprecation(DeprecatedForRemovalBean.Nested.class);
assertThat(this.codeWarnings.getWarnings()).containsExactly("removal");
}
@ParameterizedTest
@MethodSource("resolvableTypesWithDeprecated")
void detectDeprecationOnResolvableTypeWithDeprecated(ResolvableType resolvableType) {
this.codeWarnings.detectDeprecation(resolvableType);
assertThat(this.codeWarnings.getWarnings()).containsExactly("deprecation");
}
@SuppressWarnings("deprecation")
static Stream<Arguments> resolvableTypesWithDeprecated() {
Class<?> deprecatedBean = DeprecatedBean.class;
Class<?> nested = DeprecatedBean.Nested.class;
return Stream.of(
Arguments.of(ResolvableType.forClass(deprecatedBean)),
Arguments.of(ResolvableType.forClass(nested)),
Arguments.of(ResolvableType.forClassWithGenerics(GenericBean.class, deprecatedBean)),
Arguments.of(ResolvableType.forClassWithGenerics(GenericBean.class, nested)),
Arguments.of(ResolvableType.forClassWithGenerics(GenericBean.class,
ResolvableType.forClassWithGenerics(GenericBean.class, deprecatedBean))),
Arguments.of(ResolvableType.forClassWithGenerics(GenericBean.class,
ResolvableType.forClassWithGenerics(GenericBean.class, nested)))
);
}
@ParameterizedTest
@MethodSource("resolvableTypesWithDeprecatedForRemoval")
void detectDeprecationOnResolvableTypeWithDeprecatedForRemoval(ResolvableType resolvableType) {
this.codeWarnings.detectDeprecation(resolvableType);
assertThat(this.codeWarnings.getWarnings()).containsExactly("removal");
}
@SuppressWarnings("removal")
static Stream<Arguments> resolvableTypesWithDeprecatedForRemoval() {
Class<?> deprecatedBean = DeprecatedForRemovalBean.class;
Class<?> nested = DeprecatedForRemovalBean.Nested.class;
return Stream.of(
Arguments.of(ResolvableType.forClass(deprecatedBean)),
Arguments.of(ResolvableType.forClass(nested)),
Arguments.of(ResolvableType.forClassWithGenerics(GenericBean.class, deprecatedBean)),
Arguments.of(ResolvableType.forClassWithGenerics(GenericBean.class, nested)),
Arguments.of(ResolvableType.forClassWithGenerics(GenericBean.class,
ResolvableType.forClassWithGenerics(GenericBean.class, deprecatedBean))),
Arguments.of(ResolvableType.forClassWithGenerics(GenericBean.class,
ResolvableType.forClassWithGenerics(GenericBean.class, nested)))
);
}
@Test
void toStringIncludesWarnings() {
this.codeWarnings.register("deprecation");
this.codeWarnings.register("rawtypes");
assertThat(this.codeWarnings).hasToString("CodeWarnings[deprecation, rawtypes]");
}
private void compileWithMethod(Consumer<Builder> method, Consumer<Compiled> result) {
compile(type -> {
type.addModifiers(Modifier.PUBLIC);
Builder methodBuilder = MethodSpec.methodBuilder("apply")
.addModifiers(Modifier.PUBLIC);
method.accept(methodBuilder);
type.addMethod(methodBuilder.build());
}, result);
}
private void compile(Consumer<TypeSpec.Builder> type, Consumer<Compiled> result) {
DeferredTypeBuilder typeBuilder = new DeferredTypeBuilder();
this.generationContext.getGeneratedClasses().addForFeature("TestCode", typeBuilder);
typeBuilder.set(type);
this.generationContext.writeGeneratedContent();
TEST_COMPILER.with(this.generationContext).compile(result);
}
}
|
CodeWarningsTests
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/MonoFlatMapTest.java
|
{
"start": 1015,
"end": 3954
}
|
class ____ {
@Test
public void normalHidden() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Mono.just(1).hide().flatMap(v -> Mono.just(2).hide()).subscribe(ts);
ts.assertValues(2)
.assertComplete()
.assertNoError();
}
@Test
public void cancel() {
TestPublisher<String> cancelTester = TestPublisher.create();
StepVerifier.create(cancelTester.mono()
.flatMap(s -> Mono.just(s.length())))
.thenCancel()
.verify();
cancelTester.assertCancelled();
}
@Test
public void scanOperator(){
MonoFlatMap<String, Integer> test = new MonoFlatMap<>(Mono.just("foo"), s -> Mono.just(1));
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanMain() {
CoreSubscriber<Integer> actual = new LambdaMonoSubscriber<>(null, e -> {}, null, null);
MonoFlatMap.FlatMapMain<String, Integer> test = new MonoFlatMap.FlatMapMain<>(
actual, s -> Mono.just(s.length()));
Subscription parent = Operators.emptySubscription();
test.onSubscribe(parent);
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(0);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
test.onError(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
}
@Test
public void scanInner() {
CoreSubscriber<Integer> actual = new LambdaMonoSubscriber<>(null, e -> {}, null, null);
MonoFlatMap.FlatMapMain<String, Integer> main = new MonoFlatMap.FlatMapMain<>(actual, s
-> Mono.just(s.length()));
MonoFlatMap.FlatMapInner<Integer> test = new MonoFlatMap.FlatMapInner<>(main);
Subscription innerSubscription = Operators.emptySubscription();
test.onSubscribe(innerSubscription);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(innerSubscription);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(main);
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
test.onError(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
}
@Test
public void noWrappingOfCheckedExceptions() {
Mono.just("single")
.flatMap(x -> Mono.error(new NoSuchMethodException()))
.as(StepVerifier::create)
.expectError(NoSuchMethodException.class)
.verify();
}
@Test
public void noWrappingOfCheckedExceptions_hide() {
Mono.just("single")
.hide()
.flatMap(x -> Mono.error(new NoSuchMethodException()))
.as(StepVerifier::create)
.expectError(NoSuchMethodException.class)
.verify();
}
}
|
MonoFlatMapTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/naturalid/NaturalIdAndAssociationTest.java
|
{
"start": 2202,
"end": 2626
}
|
class ____ {
private BigDecimal quantity;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(referencedColumnName = "isoCode", nullable = false)
private ZCurrencyEntity1 currency;
public Amount() {
}
public Amount(BigDecimal quantity, ZCurrencyEntity1 currency) {
this.quantity = quantity;
this.currency = currency;
}
}
@Entity(name = "ZCurrencyEntity")
@Table(name = "CURRENCY")
public static
|
Amount
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/support/SerializableRequestDto.java
|
{
"start": 889,
"end": 1222
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 1L;
public final String customerName;
public SerializableRequestDto() {
customerName = "Default";
}
public SerializableRequestDto(String customerName) {
this.customerName = customerName;
}
}
|
SerializableRequestDto
|
java
|
greenrobot__EventBus
|
EventBus/src/org/greenrobot/eventbus/SubscriberMethodFinder.java
|
{
"start": 1005,
"end": 1059
}
|
class ____ {
/*
* In newer
|
SubscriberMethodFinder
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/integration/MockitoSpyBeanAndSpringAopProxyIntegrationTests.java
|
{
"start": 2454,
"end": 7041
}
|
class ____ {
@MockitoSpyBean
DateService dateService;
@BeforeEach
void resetCache() {
// We have to clear the "test" cache before each test. Otherwise, method
// invocations on the Spring AOP proxy will never make it to the Mockito spy.
dateService.clearCache();
}
/**
* Stubbing and verification for a Mockito spy that is wrapped in a Spring AOP
* proxy should always work when performed via the ultimate target of the Spring
* AOP proxy (i.e., the actual spy instance).
*/
// We need to run this test at least twice to ensure the Mockito spy can be reused
// across test method invocations without using @DirtestContext.
@RepeatedTest(2)
void stubAndVerifyOnUltimateTargetOfSpringAopProxy() {
assertThat(AopUtils.isAopProxy(dateService)).as("is Spring AOP proxy").isTrue();
DateService spy = AopTestUtils.getUltimateTargetObject(dateService);
assertIsSpy(dateService, "ultimate target");
given(spy.getDate(false)).willReturn(1L);
Long date = dateService.getDate(false);
assertThat(date).isOne();
given(spy.getDate(false)).willReturn(2L);
date = dateService.getDate(false);
assertThat(date).isEqualTo(1L); // 1L instead of 2L, because the AOP proxy caches the original value.
// Each of the following verifies times(1), because the AOP proxy caches the
// original value and does not delegate to the spy on subsequent invocations.
verify(spy, times(1)).getDate(false);
verify(spy, times(1)).getDate(eq(false));
verify(spy, times(1)).getDate(anyBoolean());
}
/**
* Verification for a Mockito spy that is wrapped in a Spring AOP proxy should
* always work when performed via the Spring AOP proxy. However, stubbing
* does not currently work via the Spring AOP proxy.
*
* <p>Consequently, this test method supplies the ultimate target of the Spring
* AOP proxy to stubbing calls, while supplying the Spring AOP proxy to verification
* calls.
*/
// We need to run this test at least twice to ensure the Mockito spy can be reused
// across test method invocations without using @DirtestContext.
@RepeatedTest(2)
void stubOnUltimateTargetAndVerifyOnSpringAopProxy() {
assertThat(AopUtils.isAopProxy(dateService)).as("is Spring AOP proxy").isTrue();
assertIsSpy(dateService, "Spring AOP proxy");
DateService spy = AopTestUtils.getUltimateTargetObject(dateService);
given(spy.getDate(false)).willReturn(1L);
Long date = dateService.getDate(false);
assertThat(date).isOne();
given(spy.getDate(false)).willReturn(2L);
date = dateService.getDate(false);
assertThat(date).isEqualTo(1L); // 1L instead of 2L, because the AOP proxy caches the original value.
// Each of the following verifies times(1), because the AOP proxy caches the
// original value and does not delegate to the spy on subsequent invocations.
verify(dateService, times(1)).getDate(false);
verify(dateService, times(1)).getDate(eq(false));
verify(dateService, times(1)).getDate(anyBoolean());
}
/**
* Ideally, both stubbing and verification should work transparently when a Mockito
* spy is wrapped in a Spring AOP proxy. However, Mockito currently does not provide
* support for transparent stubbing of a proxied spy. For example, implementing a
* custom {@link org.mockito.plugins.MockResolver} will not result in successful
* stubbing for a proxied mock.
*/
@Disabled("Disabled until Mockito provides support for transparent stubbing of a proxied spy")
// We need to run this test at least twice to ensure the Mockito spy can be reused
// across test method invocations without using @DirtestContext.
@RepeatedTest(2)
void stubAndVerifyDirectlyOnSpringAopProxy() throws Exception {
assertThat(AopUtils.isCglibProxy(dateService)).as("is Spring AOP CGLIB proxy").isTrue();
assertIsSpy(dateService);
doReturn(1L).when(dateService).getDate(false);
Long date = dateService.getDate(false);
assertThat(date).isOne();
doReturn(2L).when(dateService).getDate(false);
date = dateService.getDate(false);
assertThat(date).isEqualTo(1L); // 1L instead of 2L, because the AOP proxy caches the original value.
// Each of the following verifies times(1), because the AOP proxy caches the
// original value and does not delegate to the spy on subsequent invocations.
verify(dateService, times(1)).getDate(false);
verify(dateService, times(1)).getDate(eq(false));
verify(dateService, times(1)).getDate(anyBoolean());
}
@Configuration(proxyBeanMethods = false)
@EnableCaching(proxyTargetClass = true)
@Import(DateService.class)
static
|
MockitoSpyBeanAndSpringAopProxyIntegrationTests
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.