language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
netty__netty
|
handler/src/test/java/io/netty/handler/ssl/OpenSslTestUtils.java
|
{
"start": 734,
"end": 950
}
|
class ____ {
private OpenSslTestUtils() {
}
static void checkShouldUseKeyManagerFactory() {
assumeTrue(OpenSsl.supportsKeyManagerFactory() && OpenSsl.useKeyManagerFactory());
}
}
|
OpenSslTestUtils
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/masterreplica/StaticMasterReplicaTopologyProvider.java
|
{
"start": 1151,
"end": 4169
}
|
class ____ implements TopologyProvider {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(StaticMasterReplicaTopologyProvider.class);
private final RedisClient redisClient;
private final Iterable<RedisURI> redisURIs;
public StaticMasterReplicaTopologyProvider(RedisClient redisClient, Iterable<RedisURI> redisURIs) {
LettuceAssert.notNull(redisClient, "RedisClient must not be null");
LettuceAssert.notNull(redisURIs, "RedisURIs must not be null");
LettuceAssert.notNull(redisURIs.iterator().hasNext(), "RedisURIs must not be empty");
this.redisClient = redisClient;
this.redisURIs = redisURIs;
}
@Override
@SuppressWarnings("rawtypes")
public List<RedisNodeDescription> getNodes() {
RedisURI next = redisURIs.iterator().next();
try {
return getNodesAsync().get(next.getTimeout().toMillis(), TimeUnit.MILLISECONDS);
} catch (Exception e) {
throw Exceptions.bubble(e);
}
}
@Override
public CompletableFuture<List<RedisNodeDescription>> getNodesAsync() {
List<StatefulRedisConnection<String, String>> connections = new CopyOnWriteArrayList<>();
Flux<RedisURI> uris = Flux.fromIterable(redisURIs);
Mono<List<RedisNodeDescription>> nodes = uris.flatMap(uri -> getNodeDescription(connections, uri)).collectList()
.flatMap((nodeDescriptions) -> {
if (nodeDescriptions.isEmpty()) {
return Mono.error(new RedisConnectionException(
String.format("Failed to connect to at least one node in %s", redisURIs)));
}
return Mono.just(nodeDescriptions);
});
return nodes.toFuture();
}
private Mono<RedisNodeDescription> getNodeDescription(List<StatefulRedisConnection<String, String>> connections,
RedisURI uri) {
return Mono.fromCompletionStage(redisClient.connectAsync(StringCodec.UTF8, uri)) //
.onErrorResume(t -> {
logger.warn("Cannot connect to {}", uri, t);
return Mono.empty();
}) //
.doOnNext(connections::add) //
.flatMap(connection -> {
Mono<RedisNodeDescription> instance = getNodeDescription(uri, connection);
return instance.flatMap(it -> ResumeAfter.close(connection).thenEmit(it)).doFinally(s -> {
connections.remove(connection);
});
});
}
private static Mono<RedisNodeDescription> getNodeDescription(RedisURI uri,
StatefulRedisConnection<String, String> connection) {
return connection.reactive().role().collectList().map(RoleParser::parse)
.map(it -> new RedisMasterReplicaNode(uri.getHost(), uri.getPort(), uri, it.getRole()));
}
}
|
StaticMasterReplicaTopologyProvider
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceFields.java
|
{
"start": 329,
"end": 862
}
|
class ____ {
public static final String LOCATION = "location";
public static final String PROJECT_ID = "project_id";
public static final String URL_SETTING_NAME = "url";
public static final String STREAMING_URL_SETTING_NAME = "streaming_url";
public static final String PROVIDER_SETTING_NAME = "provider";
/**
* According to https://cloud.google.com/vertex-ai/docs/quotas#text-embedding-limits the limit is `250`.
*/
static final int EMBEDDING_MAX_BATCH_SIZE = 250;
}
|
GoogleVertexAiServiceFields
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/bytecode/internal/bytebuddy/ByteBuddyBasicProxyFactoryTest.java
|
{
"start": 541,
"end": 2163
}
|
class ____ {
private static final BasicProxyFactoryImpl BASIC_PROXY_FACTORY = new BasicProxyFactoryImpl( Entity.class, null, new ByteBuddyState() );
@Test
@JiraKey(value = "HHH-12786")
public void testEqualsHashCode() {
Object entityProxy = BASIC_PROXY_FACTORY.getProxy();
assertTrue( entityProxy.equals( entityProxy ) );
assertNotEquals(0, entityProxy.hashCode() );
Object otherEntityProxy = BASIC_PROXY_FACTORY.getProxy();
assertFalse( entityProxy.equals( otherEntityProxy ) );
}
@Test
@JiraKey(value = "HHH-12786")
public void testToString() {
Object entityProxy = BASIC_PROXY_FACTORY.getProxy();
assertTrue( entityProxy.toString().contains( "HibernateBasicProxy" ) );
}
@Test
@JiraKey(value = "HHH-12786")
public void testGetterSetter() {
Entity entityProxy = (Entity) BASIC_PROXY_FACTORY.getProxy();
entityProxy.setBool( true );
assertTrue( entityProxy.isBool() );
entityProxy.setBool( false );
assertFalse( entityProxy.isBool() );
entityProxy.setString( "John Irving" );
assertEquals( "John Irving", entityProxy.getString() );
}
@Test
@JiraKey(value = "HHH-12786")
public void testNonGetterSetterMethod() {
Entity entityProxy = (Entity) BASIC_PROXY_FACTORY.getProxy();
assertNull( entityProxy.otherMethod() );
}
@Test
@JiraKey(value = "HHH-13915")
public void testProxiesDoNotShareState() {
Entity entityAProxy = (Entity) BASIC_PROXY_FACTORY.getProxy();
entityAProxy.setString( "John Irving" );
Entity entityBProxy = (Entity) BASIC_PROXY_FACTORY.getProxy();
assertNull( entityBProxy.getString() );
}
public static
|
ByteBuddyBasicProxyFactoryTest
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/NestedTestClassesTests.java
|
{
"start": 17658,
"end": 17732
}
|
class ____ {
@Test
void test() {
}
}
}
static
|
AbstractInnerClass
|
java
|
apache__rocketmq
|
container/src/main/java/org/apache/rocketmq/container/IBrokerContainer.java
|
{
"start": 1371,
"end": 4135
}
|
interface ____ {
/**
* Start broker container
*/
void start() throws Exception;
/**
* Shutdown broker container and all the brokers inside.
*/
void shutdown();
/**
* Add a broker to this container with specific broker config.
*
* @param configContext the specified config context
* @return the added BrokerController or null if the broker already exists
* @throws Exception when initialize broker
*/
BrokerController addBroker(ConfigContext configContext) throws Exception;
/**
* Remove the broker from this container associated with the specific broker identity
*
* @param brokerIdentity the specific broker identity
* @return the removed BrokerController or null if the broker doesn't exists
*/
BrokerController removeBroker(BrokerIdentity brokerIdentity) throws Exception;
/**
* Return the broker controller associated with the specific broker identity
*
* @param brokerIdentity the specific broker identity
* @return the associated messaging broker or null
*/
BrokerController getBroker(BrokerIdentity brokerIdentity);
/**
* Return all the master brokers belong to this container
*
* @return the master broker list
*/
Collection<InnerBrokerController> getMasterBrokers();
/**
* Return all the slave brokers belong to this container
*
* @return the slave broker list
*/
Collection<InnerSalveBrokerController> getSlaveBrokers();
/**
* Return all broker controller in this container
*
* @return all broker controller
*/
List<BrokerController> getBrokerControllers();
/**
* Return the address of broker container.
*
* @return broker container address.
*/
String getBrokerContainerAddr();
/**
* Peek the first master broker in container.
*
* @return the first master broker in container
*/
BrokerController peekMasterBroker();
/**
* Return the config of the broker container
*
* @return the broker container config
*/
BrokerContainerConfig getBrokerContainerConfig();
/**
* Get netty server config.
*
* @return netty server config
*/
NettyServerConfig getNettyServerConfig();
/**
* Get netty client config.
*
* @return netty client config
*/
NettyClientConfig getNettyClientConfig();
/**
* Return the shared BrokerOuterAPI
*
* @return the shared BrokerOuterAPI
*/
BrokerOuterAPI getBrokerOuterAPI();
/**
* Return the shared RemotingServer
*
* @return the shared RemotingServer
*/
RemotingServer getRemotingServer();
}
|
IBrokerContainer
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
|
{
"start": 12917,
"end": 33241
}
|
enum ____ {
READ,
READ_LINK,
WRITE, // disallows snapshot paths.
WRITE_LINK,
CREATE, // like write, but also blocks invalid path names.
CREATE_LINK;
};
FSDirectory(FSNamesystem ns, Configuration conf) throws IOException {
this.inodeId = new INodeId();
rootDir = createRoot(ns);
inodeMap = INodeMap.newInstance(rootDir);
this.isPermissionEnabled = conf.getBoolean(
DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY,
DFSConfigKeys.DFS_PERMISSIONS_ENABLED_DEFAULT);
this.isPermissionContentSummarySubAccess = conf.getBoolean(
DFSConfigKeys.DFS_PERMISSIONS_CONTENT_SUMMARY_SUBACCESS_KEY,
DFSConfigKeys.DFS_PERMISSIONS_CONTENT_SUMMARY_SUBACCESS_DEFAULT);
this.fsOwnerShortUserName =
UserGroupInformation.getCurrentUser().getShortUserName();
this.supergroup = conf.get(
DFSConfigKeys.DFS_PERMISSIONS_SUPERUSERGROUP_KEY,
DFSConfigKeys.DFS_PERMISSIONS_SUPERUSERGROUP_DEFAULT);
this.aclsEnabled = conf.getBoolean(
DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY,
DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_DEFAULT);
LOG.info("ACLs enabled? " + aclsEnabled);
this.posixAclInheritanceEnabled = conf.getBoolean(
DFSConfigKeys.DFS_NAMENODE_POSIX_ACL_INHERITANCE_ENABLED_KEY,
DFSConfigKeys.DFS_NAMENODE_POSIX_ACL_INHERITANCE_ENABLED_DEFAULT);
LOG.info("POSIX ACL inheritance enabled? " + posixAclInheritanceEnabled);
this.xattrsEnabled = conf.getBoolean(
DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY,
DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_DEFAULT);
LOG.info("XAttrs enabled? " + xattrsEnabled);
this.xattrMaxSize = (int) conf.getLongBytes(
DFSConfigKeys.DFS_NAMENODE_MAX_XATTR_SIZE_KEY,
DFSConfigKeys.DFS_NAMENODE_MAX_XATTR_SIZE_DEFAULT);
Preconditions.checkArgument(xattrMaxSize > 0,
"The maximum size of an xattr should be > 0: (%s).",
DFSConfigKeys.DFS_NAMENODE_MAX_XATTR_SIZE_KEY);
Preconditions.checkArgument(xattrMaxSize <=
DFSConfigKeys.DFS_NAMENODE_MAX_XATTR_SIZE_HARD_LIMIT,
"The maximum size of an xattr should be <= maximum size"
+ " hard limit " + DFSConfigKeys.DFS_NAMENODE_MAX_XATTR_SIZE_HARD_LIMIT
+ ": (%s).", DFSConfigKeys.DFS_NAMENODE_MAX_XATTR_SIZE_KEY);
this.accessTimePrecision = conf.getLong(
DFS_NAMENODE_ACCESSTIME_PRECISION_KEY,
DFS_NAMENODE_ACCESSTIME_PRECISION_DEFAULT);
this.quotaByStorageTypeEnabled =
conf.getBoolean(DFS_QUOTA_BY_STORAGETYPE_ENABLED_KEY,
DFS_QUOTA_BY_STORAGETYPE_ENABLED_DEFAULT);
int configuredLimit = conf.getInt(
DFSConfigKeys.DFS_LIST_LIMIT, DFSConfigKeys.DFS_LIST_LIMIT_DEFAULT);
this.lsLimit = configuredLimit>0 ?
configuredLimit : DFSConfigKeys.DFS_LIST_LIMIT_DEFAULT;
this.contentCountLimit = conf.getInt(
DFSConfigKeys.DFS_CONTENT_SUMMARY_LIMIT_KEY,
DFSConfigKeys.DFS_CONTENT_SUMMARY_LIMIT_DEFAULT);
this.contentSleepMicroSec = conf.getLong(
DFSConfigKeys.DFS_CONTENT_SUMMARY_SLEEP_MICROSEC_KEY,
DFSConfigKeys.DFS_CONTENT_SUMMARY_SLEEP_MICROSEC_DEFAULT);
// filesystem limits
this.maxComponentLength = (int) conf.getLongBytes(
DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_KEY,
DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_DEFAULT);
this.maxDirItems = conf.getInt(
DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_KEY,
DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_DEFAULT);
this.inodeXAttrsLimit = conf.getInt(
DFSConfigKeys.DFS_NAMENODE_MAX_XATTRS_PER_INODE_KEY,
DFSConfigKeys.DFS_NAMENODE_MAX_XATTRS_PER_INODE_DEFAULT);
this.protectedDirectories = parseProtectedDirectories(conf);
this.isProtectedSubDirectoriesEnable = conf.getBoolean(
DFS_PROTECTED_SUBDIRECTORIES_ENABLE,
DFS_PROTECTED_SUBDIRECTORIES_ENABLE_DEFAULT);
this.accessControlEnforcerReportingThresholdMs = conf.getLong(
DFS_NAMENODE_ACCESS_CONTROL_ENFORCER_REPORTING_THRESHOLD_MS_KEY,
DFS_NAMENODE_ACCESS_CONTROL_ENFORCER_REPORTING_THRESHOLD_MS_DEFAULT);
Preconditions.checkArgument(this.inodeXAttrsLimit >= 0,
"Cannot set a negative limit on the number of xattrs per inode (%s).",
DFSConfigKeys.DFS_NAMENODE_MAX_XATTRS_PER_INODE_KEY);
Preconditions.checkArgument(
maxDirItems > 0 && maxDirItems <= MAX_DIR_ITEMS, "Cannot set "
+ DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_KEY
+ " to a value less than 1 or greater than " + MAX_DIR_ITEMS);
int threshold = conf.getInt(
DFSConfigKeys.DFS_NAMENODE_NAME_CACHE_THRESHOLD_KEY,
DFSConfigKeys.DFS_NAMENODE_NAME_CACHE_THRESHOLD_DEFAULT);
NameNode.LOG.info("Caching file names occurring more than " + threshold
+ " times");
nameCache = new NameCache<ByteArray>(threshold);
namesystem = ns;
this.editLog = ns.getEditLog();
ezManager = new EncryptionZoneManager(this, conf);
this.quotaInitThreads = conf.getInt(
DFSConfigKeys.DFS_NAMENODE_QUOTA_INIT_THREADS_KEY,
DFSConfigKeys.DFS_NAMENODE_QUOTA_INIT_THREADS_DEFAULT);
initUsersToBypassExtProvider(conf);
}
private void initUsersToBypassExtProvider(Configuration conf) {
String[] bypassUsers = conf.getTrimmedStrings(
DFSConfigKeys.DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_BYPASS_USERS_KEY,
DFSConfigKeys.DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_BYPASS_USERS_DEFAULT);
for(int i = 0; i < bypassUsers.length; i++) {
String tmp = bypassUsers[i].trim();
if (!tmp.isEmpty()) {
if (usersToBypassExtAttrProvider == null) {
usersToBypassExtAttrProvider = new HashSet<String>();
}
LOG.info("Add user " + tmp + " to the list that will bypass external"
+ " attribute provider.");
usersToBypassExtAttrProvider.add(tmp);
}
}
}
/**
* Check if a given user is configured to bypass external attribute provider.
* @param user user principal
* @return true if the user is to bypass external attribute provider
*/
private boolean isUserBypassingExtAttrProvider(final String user) {
return (usersToBypassExtAttrProvider != null) &&
usersToBypassExtAttrProvider.contains(user);
}
/**
* Return attributeProvider or null if ugi is to bypass attributeProvider.
* @param ugi
* @return configured attributeProvider or null
*/
private INodeAttributeProvider getUserFilteredAttributeProvider(
UserGroupInformation ugi) {
if (attributeProvider == null ||
(ugi != null && isUserBypassingExtAttrProvider(ugi.getUserName()))) {
return null;
}
return attributeProvider;
}
/**
* Get HdfsFileStatuses of the reserved paths: .inodes and raw.
*
* @return Array of HdfsFileStatus
*/
HdfsFileStatus[] getReservedStatuses() {
Preconditions.checkNotNull(reservedStatuses, "reservedStatuses should "
+ " not be null. It is populated when FSNamesystem loads FS image."
+ " It has to be set at this time instead of initialization time"
+ " because CTime is loaded during FSNamesystem#loadFromDisk.");
return reservedStatuses;
}
/**
* Create HdfsFileStatuses of the reserved paths: .inodes and raw.
* These statuses are solely for listing purpose. All other operations
* on the reserved dirs are disallowed.
* Operations on sub directories are resolved by
* {@link FSDirectory#resolvePath(String, FSDirectory)}
* and conducted directly, without the need to check the reserved dirs.
*
* This method should only be invoked once during namenode initialization.
*
* @param cTime CTime of the file system
* @return Array of HdfsFileStatus
*/
void createReservedStatuses(long cTime) {
HdfsFileStatus inodes = new HdfsFileStatus.Builder()
.isdir(true)
.mtime(cTime)
.atime(cTime)
.perm(new FsPermission((short) 0770))
.group(supergroup)
.path(DOT_INODES)
.build();
HdfsFileStatus raw = new HdfsFileStatus.Builder()
.isdir(true)
.mtime(cTime)
.atime(cTime)
.perm(new FsPermission((short) 0770))
.group(supergroup)
.path(RAW)
.build();
reservedStatuses = new HdfsFileStatus[] {inodes, raw};
}
FSNamesystem getFSNamesystem() {
return namesystem;
}
/**
* Indicates whether the image loading is complete or not.
* @return true if image loading is complete, false otherwise
*/
public boolean isImageLoaded() {
return namesystem.isImageLoaded();
}
/**
* Parse configuration setting dfs.namenode.protected.directories to
* retrieve the set of protected directories.
*
* @param conf
* @return a TreeSet
*/
@VisibleForTesting
static SortedSet<String> parseProtectedDirectories(Configuration conf) {
return parseProtectedDirectories(conf
.getTrimmedStringCollection(FS_PROTECTED_DIRECTORIES));
}
/**
* Parse configuration setting dfs.namenode.protected.directories to retrieve
* the set of protected directories.
*
* @param protectedDirsString
* a comma separated String representing a bunch of paths.
* @return a TreeSet
*/
@VisibleForTesting
static SortedSet<String> parseProtectedDirectories(
final String protectedDirsString) {
return parseProtectedDirectories(StringUtils
.getTrimmedStringCollection(protectedDirsString));
}
private static SortedSet<String> parseProtectedDirectories(
final Collection<String> protectedDirs) {
// Normalize each input path to guard against administrator error.
return new TreeSet<>(
normalizePaths(protectedDirs, FS_PROTECTED_DIRECTORIES));
}
public SortedSet<String> getProtectedDirectories() {
return protectedDirectories;
}
public boolean isProtectedSubDirectoriesEnable() {
return isProtectedSubDirectoriesEnable;
}
/**
* Set directories that cannot be removed unless empty, even by an
* administrator.
*
* @param protectedDirsString
* comma separated list of protected directories
*/
String setProtectedDirectories(String protectedDirsString) {
if (protectedDirsString == null) {
protectedDirectories = new TreeSet<>();
} else {
protectedDirectories = parseProtectedDirectories(protectedDirsString);
}
return Joiner.on(",").skipNulls().join(protectedDirectories);
}
public void setMaxDirItems(int newVal) {
Preconditions.checkArgument(
newVal > 0 && newVal <= MAX_DIR_ITEMS, "Cannot set "
+ DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_KEY
+ " to a value less than 1 or greater than " + MAX_DIR_ITEMS);
maxDirItems = newVal;
}
public int getMaxDirItems() {
return maxDirItems;
}
BlockManager getBlockManager() {
return getFSNamesystem().getBlockManager();
}
KeyProviderCryptoExtension getProvider() {
return getFSNamesystem().getProvider();
}
/** @return the root directory inode. */
public INodeDirectory getRoot() {
return rootDir;
}
public BlockStoragePolicySuite getBlockStoragePolicySuite() {
return getBlockManager().getStoragePolicySuite();
}
boolean isPermissionEnabled() {
return isPermissionEnabled;
}
boolean isAclsEnabled() {
return aclsEnabled;
}
boolean isPermissionContentSummarySubAccess() {
return isPermissionContentSummarySubAccess;
}
@VisibleForTesting
public boolean isPosixAclInheritanceEnabled() {
return posixAclInheritanceEnabled;
}
@VisibleForTesting
public void setPosixAclInheritanceEnabled(
boolean posixAclInheritanceEnabled) {
this.posixAclInheritanceEnabled = posixAclInheritanceEnabled;
}
boolean isXattrsEnabled() {
return xattrsEnabled;
}
int getXattrMaxSize() { return xattrMaxSize; }
boolean isAccessTimeSupported() {
return accessTimePrecision > 0;
}
long getAccessTimePrecision() {
return accessTimePrecision;
}
boolean isQuotaByStorageTypeEnabled() {
return quotaByStorageTypeEnabled;
}
int getLsLimit() {
return lsLimit;
}
int getContentCountLimit() {
return contentCountLimit;
}
long getContentSleepMicroSec() {
return contentSleepMicroSec;
}
int getInodeXAttrsLimit() {
return inodeXAttrsLimit;
}
FSEditLog getEditLog() {
return editLog;
}
/**
* Shutdown the filestore
*/
@Override
public void close() throws IOException {}
void markNameCacheInitialized() {
writeLock();
try {
nameCache.initialized();
} finally {
writeUnlock();
}
}
boolean shouldSkipQuotaChecks() {
return skipQuotaCheck;
}
/** Enable quota verification */
void enableQuotaChecks() {
skipQuotaCheck = false;
}
/** Disable quota verification */
void disableQuotaChecks() {
skipQuotaCheck = true;
}
/**
* Resolves a given path into an INodesInPath. All ancestor inodes that
* exist are validated as traversable directories. Symlinks in the ancestry
* will generate an UnresolvedLinkException. The returned IIP will be an
* accessible path that also passed additional sanity checks based on how
* the path will be used as specified by the DirOp.
* READ: Expands reserved paths and performs permission checks
* during traversal.
* WRITE: In addition to READ checks, ensures the path is not a
* snapshot path.
* CREATE: In addition to WRITE checks, ensures path does not contain
* illegal character sequences.
*
* @param pc A permission checker for traversal checks. Pass null for
* no permission checks.
* @param src The path to resolve.
* @param dirOp The {@link DirOp} that controls additional checks.
* @return if the path indicates an inode, return path after replacing up to
* {@code <inodeid>} with the corresponding path of the inode, else
* the path in {@code src} as is. If the path refers to a path in
* the "raw" directory, return the non-raw pathname.
* @throws FileNotFoundException
* @throws AccessControlException
* @throws ParentNotDirectoryException
* @throws UnresolvedLinkException
*/
@VisibleForTesting
public INodesInPath resolvePath(FSPermissionChecker pc, String src,
DirOp dirOp) throws UnresolvedLinkException, FileNotFoundException,
AccessControlException, ParentNotDirectoryException {
boolean isCreate = (dirOp == DirOp.CREATE || dirOp == DirOp.CREATE_LINK);
// prevent creation of new invalid paths
if (isCreate && !DFSUtil.isValidName(src)) {
throw new InvalidPathException("Invalid file name: " + src);
}
byte[][] components = INode.getPathComponents(src);
boolean isRaw = isReservedRawName(components);
components = resolveComponents(components, this);
INodesInPath iip = INodesInPath.resolve(rootDir, components, isRaw);
if (isPermissionEnabled && pc != null && isRaw) {
switch(dirOp) {
case READ_LINK:
case READ:
break;
default:
pc.checkSuperuserPrivilege(iip.getPath());
break;
}
}
// verify all ancestors are dirs and traversable. note that only
// methods that create new namespace items have the signature to throw
// PNDE
try {
checkTraverse(pc, iip, dirOp);
} catch (ParentNotDirectoryException pnde) {
if (!isCreate) {
throw new AccessControlException(pnde.getMessage());
}
throw pnde;
}
return iip;
}
/**
* This method should only be used from internal paths and not those provided
* directly by a user. It resolves a given path into an INodesInPath in a
* similar way to resolvePath(...), only traversal and permissions are not
* checked.
* @param src The path to resolve.
* @return if the path indicates an inode, return path after replacing up to
* {@code <inodeid>} with the corresponding path of the inode, else
* the path in {@code src} as is. If the path refers to a path in
* the "raw" directory, return the non-raw pathname.
* @throws FileNotFoundException
*/
public INodesInPath unprotectedResolvePath(String src)
throws FileNotFoundException {
byte[][] components = INode.getPathComponents(src);
boolean isRaw = isReservedRawName(components);
components = resolveComponents(components, this);
return INodesInPath.resolve(rootDir, components, isRaw);
}
INodesInPath resolvePath(FSPermissionChecker pc, String src, long fileId)
throws UnresolvedLinkException, FileNotFoundException,
AccessControlException, ParentNotDirectoryException {
// Older clients may not have given us an inode ID to work with.
// In this case, we have to try to resolve the path and hope it
// hasn't changed or been deleted since the file was opened for write.
INodesInPath iip;
if (fileId == HdfsConstants.GRANDFATHER_INODE_ID) {
iip = resolvePath(pc, src, DirOp.WRITE);
} else {
INode inode = getInode(fileId);
if (inode == null) {
iip = INodesInPath.fromComponents(INode.getPathComponents(src));
} else {
iip = INodesInPath.fromINode(inode);
}
}
return iip;
}
// this method can be removed after IIP is used more extensively
static String resolvePath(String src,
FSDirectory fsd) throws FileNotFoundException {
byte[][] pathComponents = INode.getPathComponents(src);
pathComponents = resolveComponents(pathComponents, fsd);
return DFSUtil.byteArray2PathString(pathComponents);
}
/**
* @return true if the path is a non-empty directory; otherwise, return false.
*/
public boolean isNonEmptyDirectory(INodesInPath inodesInPath) {
readLock();
try {
final INode inode = inodesInPath.getLastINode();
if (inode == null || !inode.isDirectory()) {
//not found or not a directory
return false;
}
final int s = inodesInPath.getPathSnapshotId();
return !inode.asDirectory().getChildrenList(s).isEmpty();
} finally {
readUnlock();
}
}
/**
* Check whether the filepath could be created
* @throws SnapshotAccessControlException if path is in RO snapshot
*/
boolean isValidToCreate(String src, INodesInPath iip)
throws SnapshotAccessControlException {
String srcs = normalizePath(src);
return srcs.startsWith("/") && !srcs.endsWith("/") &&
iip.getLastINode() == null;
}
/**
* Tell the block manager to update the replication factors when delete
* happens. Deleting a file or a snapshot might decrease the replication
* factor of the blocks as the blocks are always replicated to the highest
* replication factor among all snapshots.
*/
void updateReplicationFactor(Collection<UpdatedReplicationInfo> blocks) {
BlockManager bm = getBlockManager();
for (UpdatedReplicationInfo e : blocks) {
BlockInfo b = e.block();
bm.setReplication(b.getReplication(), e.targetReplication(), b);
}
}
/**
* Update the count of each directory with quota in the namespace.
* A directory's count is defined as the total number inodes in the tree
* rooted at the directory.
*
* This is an update of existing state of the filesystem and does not
* throw QuotaExceededException.
*/
void updateCountForQuota(int initThreads) {
writeLock();
try {
int threads = (initThreads < 1) ? 1 : initThreads;
LOG.info("Initializing quota with " + threads + " thread(s)");
long start = Time.monotonicNow();
QuotaCounts counts = new QuotaCounts.Builder().build();
ForkJoinPool p = new ForkJoinPool(threads);
RecursiveAction task = new InitQuotaTask(getBlockStoragePolicySuite(),
rootDir.getStoragePolicyID(), rootDir, counts);
p.execute(task);
task.join();
p.shutdown();
LOG.info("Quota initialization completed in " + (Time.monotonicNow() - start) +
" milliseconds\n" + counts);
} finally {
writeUnlock();
}
}
void updateCountForQuota() {
updateCountForQuota(quotaInitThreads);
}
/**
* parallel initialization using fork-join.
*/
private static
|
DirOp
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeOrNativeSourcesBuild.java
|
{
"start": 446,
"end": 774
}
|
class ____ implements BooleanSupplier {
private final NativeConfig nativeConfig;
public NativeOrNativeSourcesBuild(final NativeConfig nativeConfig) {
this.nativeConfig = nativeConfig;
}
@Override
public boolean getAsBoolean() {
return nativeConfig.enabled();
}
}
|
NativeOrNativeSourcesBuild
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java
|
{
"start": 4359,
"end": 5881
}
|
class ____ extends ActionResponse implements Writeable, ToXContentObject {
private Map<String, RollableIndexCaps> jobs = Collections.emptyMap();
public Response() {
}
public Response(Map<String, RollableIndexCaps> jobs) {
this.jobs = Objects.requireNonNull(jobs);
}
public Map<String, RollableIndexCaps> getJobs() {
return jobs;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeMap(jobs, StreamOutput::writeWriteable);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
for (Map.Entry<String, RollableIndexCaps> entry : jobs.entrySet()) {
entry.getValue().toXContent(builder, params);
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(jobs);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Response other = (Response) obj;
return Objects.equals(jobs, other.jobs);
}
@Override
public final String toString() {
return Strings.toString(this);
}
}
}
|
Response
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/RequestResponseBodyMethodProcessorMockTests.java
|
{
"start": 3432,
"end": 20372
}
|
class ____ {
private HttpMessageConverter<String> stringMessageConverter = mock();
private HttpMessageConverter<Resource> resourceMessageConverter = mock();
private HttpMessageConverter<Object> resourceRegionMessageConverter = mock();
private RequestResponseBodyMethodProcessor processor;
private ModelAndViewContainer mavContainer = new ModelAndViewContainer();
private MockHttpServletRequest servletRequest = new MockHttpServletRequest();
private MockHttpServletResponse servletResponse = new MockHttpServletResponse();
private NativeWebRequest webRequest = new ServletWebRequest(servletRequest, servletResponse);
private MethodParameter paramRequestBodyString;
private MethodParameter paramInt;
private MethodParameter paramValidBean;
private MethodParameter paramStringNotRequired;
private MethodParameter paramOptionalString;
private MethodParameter returnTypeString;
private MethodParameter returnTypeInt;
private MethodParameter returnTypeStringProduces;
private MethodParameter returnTypeResource;
@BeforeEach
void setup() throws Exception {
given(stringMessageConverter.getSupportedMediaTypes()).willReturn(Collections.singletonList(MediaType.TEXT_PLAIN));
given(stringMessageConverter.getSupportedMediaTypes(any())).willReturn(Collections.singletonList(MediaType.TEXT_PLAIN));
given(resourceMessageConverter.getSupportedMediaTypes()).willReturn(Collections.singletonList(MediaType.ALL));
given(resourceMessageConverter.getSupportedMediaTypes(any())).willReturn(Collections.singletonList(MediaType.ALL));
given(resourceRegionMessageConverter.getSupportedMediaTypes()).willReturn(Collections.singletonList(MediaType.ALL));
given(resourceRegionMessageConverter.getSupportedMediaTypes(any())).willReturn(Collections.singletonList(MediaType.ALL));
processor = new RequestResponseBodyMethodProcessor(
Arrays.asList(stringMessageConverter, resourceMessageConverter, resourceRegionMessageConverter));
servletRequest.setMethod("POST");
Method methodHandle1 = getClass().getMethod("handle1", String.class, int.class);
paramRequestBodyString = new MethodParameter(methodHandle1, 0);
paramInt = new MethodParameter(methodHandle1, 1);
paramValidBean = new MethodParameter(getClass().getMethod("handle2", SimpleBean.class), 0);
paramStringNotRequired = new MethodParameter(getClass().getMethod("handle3", String.class), 0);
paramOptionalString = new MethodParameter(getClass().getMethod("handle4", Optional.class), 0);
returnTypeString = new MethodParameter(methodHandle1, -1);
returnTypeInt = new MethodParameter(getClass().getMethod("handle5"), -1);
returnTypeStringProduces = new MethodParameter(getClass().getMethod("handle6"), -1);
returnTypeResource = new MethodParameter(getClass().getMethod("handle7"), -1);
}
@Test
void supportsParameter() {
assertThat(processor.supportsParameter(paramRequestBodyString)).as("RequestBody parameter not supported").isTrue();
assertThat(processor.supportsParameter(paramInt)).as("non-RequestBody parameter supported").isFalse();
}
@Test
void supportsReturnType() {
assertThat(processor.supportsReturnType(returnTypeString)).as("ResponseBody return type not supported").isTrue();
assertThat(processor.supportsReturnType(returnTypeInt)).as("non-ResponseBody return type supported").isFalse();
}
@Test
void resolveArgument() throws Exception {
MediaType contentType = MediaType.TEXT_PLAIN;
servletRequest.addHeader("Content-Type", contentType.toString());
String body = "Foo";
servletRequest.setContent(body.getBytes(StandardCharsets.UTF_8));
given(stringMessageConverter.canRead(String.class, contentType)).willReturn(true);
given(stringMessageConverter.read(eq(String.class), isA(HttpInputMessage.class))).willReturn(body);
Object result = processor.resolveArgument(paramRequestBodyString, mavContainer,
webRequest, new ValidatingBinderFactory());
assertThat(result).as("Invalid argument").isEqualTo(body);
assertThat(mavContainer.isRequestHandled()).as("The requestHandled flag shouldn't change").isFalse();
}
@Test
void resolveArgumentNotValid() {
assertThatExceptionOfType(MethodArgumentNotValidException.class).isThrownBy(() ->
testResolveArgumentWithValidation(new SimpleBean(null)))
.satisfies(ex -> {
BindingResult bindingResult = ex.getBindingResult();
assertThat(bindingResult.getObjectName()).isEqualTo("simpleBean");
assertThat(bindingResult.getErrorCount()).isEqualTo(1);
assertThat(bindingResult.getFieldError("name")).isNotNull();
});
}
@Test
void resolveArgumentValid() throws Exception {
testResolveArgumentWithValidation(new SimpleBean("name"));
}
private void testResolveArgumentWithValidation(SimpleBean simpleBean) throws Exception {
MediaType contentType = MediaType.TEXT_PLAIN;
servletRequest.addHeader("Content-Type", contentType.toString());
servletRequest.setContent("payload".getBytes(StandardCharsets.UTF_8));
@SuppressWarnings("unchecked")
HttpMessageConverter<SimpleBean> beanConverter = mock();
given(beanConverter.getSupportedMediaTypes()).willReturn(Collections.singletonList(MediaType.TEXT_PLAIN));
given(beanConverter.canRead(SimpleBean.class, contentType)).willReturn(true);
given(beanConverter.read(eq(SimpleBean.class), isA(HttpInputMessage.class))).willReturn(simpleBean);
processor = new RequestResponseBodyMethodProcessor(Collections.singletonList(beanConverter));
processor.resolveArgument(paramValidBean, mavContainer, webRequest, new ValidatingBinderFactory());
}
@Test
void resolveArgumentCannotRead() {
MediaType contentType = MediaType.TEXT_PLAIN;
servletRequest.addHeader("Content-Type", contentType.toString());
servletRequest.setContent("payload".getBytes(StandardCharsets.UTF_8));
given(stringMessageConverter.canRead(String.class, contentType)).willReturn(false);
assertThatExceptionOfType(HttpMediaTypeNotSupportedException.class).isThrownBy(() ->
processor.resolveArgument(paramRequestBodyString, mavContainer, webRequest, null));
}
@Test
void resolveArgumentNoContentType() {
servletRequest.setContent("payload".getBytes(StandardCharsets.UTF_8));
given(stringMessageConverter.canRead(String.class, MediaType.APPLICATION_OCTET_STREAM)).willReturn(false);
assertThatExceptionOfType(HttpMediaTypeNotSupportedException.class).isThrownBy(() ->
processor.resolveArgument(paramRequestBodyString, mavContainer, webRequest, null));
}
@Test
void resolveArgumentInvalidContentType() {
this.servletRequest.setContentType("bad");
servletRequest.setContent("payload".getBytes(StandardCharsets.UTF_8));
assertThatExceptionOfType(HttpMediaTypeNotSupportedException.class).isThrownBy(() ->
processor.resolveArgument(paramRequestBodyString, mavContainer, webRequest, null));
}
@Test // SPR-9942
void resolveArgumentRequiredNoContent() throws Exception {
servletRequest.setContentType(MediaType.TEXT_PLAIN_VALUE);
servletRequest.setContent(new byte[0]);
given(stringMessageConverter.canRead(String.class, MediaType.TEXT_PLAIN)).willReturn(true);
given(stringMessageConverter.read(eq(String.class), isA(HttpInputMessage.class))).willReturn(null);
assertThatExceptionOfType(HttpMessageNotReadableException.class).isThrownBy(() ->
processor.resolveArgument(paramRequestBodyString, mavContainer,
webRequest, new ValidatingBinderFactory()));
}
@Test
void resolveArgumentNotGetRequests() throws Exception {
servletRequest.setMethod("GET");
servletRequest.setContent(new byte[0]);
given(stringMessageConverter.canRead(String.class, MediaType.APPLICATION_OCTET_STREAM)).willReturn(false);
assertThat(processor.resolveArgument(paramStringNotRequired, mavContainer,
webRequest, new ValidatingBinderFactory())).isNull();
}
@Test
void resolveArgumentNotRequiredWithContent() throws Exception {
servletRequest.setContentType("text/plain");
servletRequest.setContent("body".getBytes());
given(stringMessageConverter.canRead(String.class, MediaType.TEXT_PLAIN)).willReturn(true);
given(stringMessageConverter.read(eq(String.class), isA(HttpInputMessage.class))).willReturn("body");
assertThat(processor.resolveArgument(paramStringNotRequired, mavContainer,
webRequest, new ValidatingBinderFactory())).isEqualTo("body");
}
@Test
void resolveArgumentNotRequiredNoContent() throws Exception {
servletRequest.setContentType("text/plain");
servletRequest.setContent(new byte[0]);
given(stringMessageConverter.canRead(String.class, MediaType.TEXT_PLAIN)).willReturn(true);
assertThat(processor.resolveArgument(paramStringNotRequired, mavContainer,
webRequest, new ValidatingBinderFactory())).isNull();
}
@Test // SPR-13417
void resolveArgumentNotRequiredNoContentNoContentType() throws Exception {
servletRequest.setContent(new byte[0]);
given(stringMessageConverter.canRead(String.class, MediaType.TEXT_PLAIN)).willReturn(true);
given(stringMessageConverter.canRead(String.class, MediaType.APPLICATION_OCTET_STREAM)).willReturn(false);
assertThat(processor.resolveArgument(paramStringNotRequired, mavContainer,
webRequest, new ValidatingBinderFactory())).isNull();
}
@Test
void resolveArgumentOptionalWithContent() throws Exception {
servletRequest.setContentType("text/plain");
servletRequest.setContent("body".getBytes());
given(stringMessageConverter.canRead(String.class, MediaType.TEXT_PLAIN)).willReturn(true);
given(stringMessageConverter.read(eq(String.class), isA(HttpInputMessage.class))).willReturn("body");
assertThat(processor.resolveArgument(paramOptionalString, mavContainer,
webRequest, new ValidatingBinderFactory())).isEqualTo(Optional.of("body"));
}
@Test
void resolveArgumentOptionalNoContent() throws Exception {
servletRequest.setContentType("text/plain");
servletRequest.setContent(new byte[0]);
given(stringMessageConverter.canRead(String.class, MediaType.TEXT_PLAIN)).willReturn(true);
assertThat(processor.resolveArgument(paramOptionalString, mavContainer, webRequest, new ValidatingBinderFactory())).isEqualTo(Optional.empty());
}
@Test
void resolveArgumentOptionalNoContentNoContentType() throws Exception {
servletRequest.setContent(new byte[0]);
given(stringMessageConverter.canRead(String.class, MediaType.TEXT_PLAIN)).willReturn(true);
given(stringMessageConverter.canRead(String.class, MediaType.APPLICATION_OCTET_STREAM)).willReturn(false);
assertThat(processor.resolveArgument(paramOptionalString, mavContainer,
webRequest, new ValidatingBinderFactory())).isEqualTo(Optional.empty());
}
@Test
void handleReturnValue() throws Exception {
MediaType accepted = MediaType.TEXT_PLAIN;
servletRequest.addHeader("Accept", accepted.toString());
String body = "Foo";
given(stringMessageConverter.canWrite(String.class, null)).willReturn(true);
given(stringMessageConverter.getSupportedMediaTypes()).willReturn(Collections.singletonList(MediaType.TEXT_PLAIN));
given(stringMessageConverter.canWrite(String.class, accepted)).willReturn(true);
processor.handleReturnValue(body, returnTypeString, mavContainer, webRequest);
assertThat(mavContainer.isRequestHandled()).as("The requestHandled flag wasn't set").isTrue();
verify(stringMessageConverter).write(eq(body), eq(accepted), isA(HttpOutputMessage.class));
}
@Test
void handleReturnValueProduces() throws Exception {
String body = "Foo";
servletRequest.addHeader("Accept", "text/*");
servletRequest.setAttribute(HandlerMapping.PRODUCIBLE_MEDIA_TYPES_ATTRIBUTE,
Collections.singleton(MediaType.TEXT_HTML));
given(stringMessageConverter.canWrite(String.class, MediaType.TEXT_HTML)).willReturn(true);
processor.handleReturnValue(body, returnTypeStringProduces, mavContainer, webRequest);
assertThat(mavContainer.isRequestHandled()).isTrue();
verify(stringMessageConverter).write(eq(body), eq(MediaType.TEXT_HTML), isA(HttpOutputMessage.class));
}
@Test
void handleReturnValueNotAcceptable() {
MediaType accepted = MediaType.APPLICATION_ATOM_XML;
servletRequest.addHeader("Accept", accepted.toString());
given(stringMessageConverter.canWrite(String.class, null)).willReturn(true);
given(stringMessageConverter.getSupportedMediaTypes()).willReturn(List.of(MediaType.TEXT_PLAIN));
given(stringMessageConverter.canWrite(String.class, accepted)).willReturn(false);
assertThatExceptionOfType(HttpMediaTypeNotAcceptableException.class).isThrownBy(() ->
processor.handleReturnValue("Foo", returnTypeString, mavContainer, webRequest));
}
@Test
void handleReturnValueNotAcceptableProduces() {
MediaType accepted = MediaType.TEXT_PLAIN;
servletRequest.addHeader("Accept", accepted.toString());
given(stringMessageConverter.canWrite(String.class, null)).willReturn(true);
given(stringMessageConverter.getSupportedMediaTypes()).willReturn(Collections.singletonList(MediaType.TEXT_PLAIN));
given(stringMessageConverter.canWrite(String.class, accepted)).willReturn(false);
assertThatExceptionOfType(HttpMediaTypeNotAcceptableException.class).isThrownBy(() ->
processor.handleReturnValue("Foo", returnTypeStringProduces, mavContainer, webRequest));
}
@Test
void handleReturnTypeResource() throws Exception {
Resource returnValue = new ByteArrayResource("Content".getBytes(StandardCharsets.UTF_8));
given(resourceMessageConverter.canWrite(ByteArrayResource.class, null)).willReturn(true);
given(resourceMessageConverter.getSupportedMediaTypes()).willReturn(Collections.singletonList(MediaType.ALL));
given(resourceMessageConverter.canWrite(ByteArrayResource.class, MediaType.APPLICATION_OCTET_STREAM))
.willReturn(true);
processor.handleReturnValue(returnValue, returnTypeResource, mavContainer, webRequest);
then(resourceMessageConverter).should(times(1)).write(any(ByteArrayResource.class),
eq(MediaType.APPLICATION_OCTET_STREAM), any(HttpOutputMessage.class));
assertThat(servletResponse.getStatus()).isEqualTo(200);
}
@Test // SPR-9841
void handleReturnValueMediaTypeSuffix() throws Exception {
String body = "Foo";
MediaType accepted = MediaType.APPLICATION_XHTML_XML;
List<MediaType> supported = Collections.singletonList(MediaType.valueOf("application/*+xml"));
servletRequest.addHeader("Accept", accepted);
given(stringMessageConverter.canWrite(String.class, null)).willReturn(true);
given(stringMessageConverter.getSupportedMediaTypes(any())).willReturn(supported);
given(stringMessageConverter.canWrite(String.class, accepted)).willReturn(true);
processor.handleReturnValue(body, returnTypeStringProduces, mavContainer, webRequest);
assertThat(mavContainer.isRequestHandled()).isTrue();
verify(stringMessageConverter).write(eq(body), eq(accepted), isA(HttpOutputMessage.class));
}
@Test
void handleReturnTypeResourceByteRange() throws Exception {
Resource returnValue = new ByteArrayResource("Content".getBytes(StandardCharsets.UTF_8));
servletRequest.addHeader("Range", "bytes=0-5");
given(resourceRegionMessageConverter.canWrite(any(), eq(null))).willReturn(true);
given(resourceRegionMessageConverter.canWrite(any(), eq(MediaType.APPLICATION_OCTET_STREAM))).willReturn(true);
processor.handleReturnValue(returnValue, returnTypeResource, mavContainer, webRequest);
then(resourceRegionMessageConverter).should(times(1)).write(
anyCollection(), eq(MediaType.APPLICATION_OCTET_STREAM),
argThat(outputMessage -> "bytes".equals(outputMessage.getHeaders().getFirst(HttpHeaders.ACCEPT_RANGES))));
assertThat(servletResponse.getStatus()).isEqualTo(206);
}
@Test
void handleReturnTypeResourceIllegalByteRange() throws Exception {
Resource returnValue = new ByteArrayResource("Content".getBytes(StandardCharsets.UTF_8));
servletRequest.addHeader("Range", "illegal");
given(resourceRegionMessageConverter.canWrite(any(), eq(null))).willReturn(true);
given(resourceRegionMessageConverter.canWrite(any(), eq(MediaType.APPLICATION_OCTET_STREAM))).willReturn(true);
processor.handleReturnValue(returnValue, returnTypeResource, mavContainer, webRequest);
then(resourceRegionMessageConverter).should(never()).write(
anyCollection(), eq(MediaType.APPLICATION_OCTET_STREAM), any(HttpOutputMessage.class));
assertThat(servletResponse.getStatus()).isEqualTo(416);
}
@SuppressWarnings("unused")
@ResponseBody
public String handle1(@RequestBody String s, int i) {
return s;
}
@SuppressWarnings("unused")
public void handle2(@Valid @RequestBody SimpleBean b) {
}
@SuppressWarnings("unused")
public void handle3(@RequestBody(required = false) String s) {
}
@SuppressWarnings("unused")
public void handle4(@RequestBody Optional<String> s) {
}
@SuppressWarnings("unused")
public int handle5() {
return 42;
}
@SuppressWarnings("unused")
@ResponseBody
public String handle6() {
return null;
}
@SuppressWarnings("unused")
@ResponseBody
public Resource handle7() {
return null;
}
private final
|
RequestResponseBodyMethodProcessorMockTests
|
java
|
netty__netty
|
common/src/main/java/io/netty/util/Signal.java
|
{
"start": 3148,
"end": 3305
}
|
class ____ extends AbstractConstant<SignalConstant> {
SignalConstant(int id, String name) {
super(id, name);
}
}
}
|
SignalConstant
|
java
|
apache__maven
|
compat/maven-compat/src/main/java/org/apache/maven/project/artifact/DefaultMavenMetadataCache.java
|
{
"start": 1749,
"end": 6762
}
|
class ____ {
private final Artifact artifact;
private final long pomHash;
private final boolean resolveManagedVersions;
private final List<ArtifactRepository> repositories = new ArrayList<>();
private final int hashCode;
public CacheKey(
Artifact artifact,
boolean resolveManagedVersions,
ArtifactRepository localRepository,
List<ArtifactRepository> remoteRepositories) {
File file = artifact.getFile();
this.artifact = ArtifactUtils.copyArtifact(artifact);
if ("pom".equals(artifact.getType()) && file != null) {
pomHash = file.getPath().hashCode() + file.lastModified();
} else {
pomHash = 0;
}
this.resolveManagedVersions = resolveManagedVersions;
this.repositories.add(localRepository);
this.repositories.addAll(remoteRepositories);
int hash = 17;
hash = hash * 31 + artifactHashCode(artifact);
hash = hash * 31 + (resolveManagedVersions ? 1 : 2);
hash = hash * 31 + repositoriesHashCode(repositories);
this.hashCode = hash;
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof CacheKey other) {
return pomHash == other.pomHash
&& artifactEquals(artifact, other.artifact)
&& resolveManagedVersions == other.resolveManagedVersions
&& repositoriesEquals(repositories, other.repositories);
} else {
return false;
}
}
}
private static int artifactHashCode(Artifact a) {
int result = 17;
result = 31 * result + a.getGroupId().hashCode();
result = 31 * result + a.getArtifactId().hashCode();
result = 31 * result + a.getType().hashCode();
if (a.getVersion() != null) {
result = 31 * result + a.getVersion().hashCode();
}
result = 31 * result + (a.getClassifier() != null ? a.getClassifier().hashCode() : 0);
result = 31 * result + (a.getScope() != null ? a.getScope().hashCode() : 0);
result = 31 * result
+ (a.getDependencyFilter() != null ? a.getDependencyFilter().hashCode() : 0);
result = 31 * result + (a.isOptional() ? 1 : 0);
return result;
}
private static boolean artifactEquals(Artifact a1, Artifact a2) {
if (a1 == a2) {
return true;
}
return Objects.equals(a1.getGroupId(), a2.getGroupId())
&& Objects.equals(a1.getArtifactId(), a2.getArtifactId())
&& Objects.equals(a1.getType(), a2.getType())
&& Objects.equals(a1.getVersion(), a2.getVersion())
&& Objects.equals(a1.getClassifier(), a2.getClassifier())
&& Objects.equals(a1.getScope(), a2.getScope())
&& Objects.equals(a1.getDependencyFilter(), a2.getDependencyFilter())
&& a1.isOptional() == a2.isOptional();
}
private static int repositoryHashCode(ArtifactRepository repository) {
int result = 17;
result = 31 * result + (repository.getId() != null ? repository.getId().hashCode() : 0);
return result;
}
private static int repositoriesHashCode(List<ArtifactRepository> repositories) {
int result = 17;
for (ArtifactRepository repository : repositories) {
result = 31 * result + repositoryHashCode(repository);
}
return result;
}
private static boolean repositoryEquals(ArtifactRepository r1, ArtifactRepository r2) {
if (r1 == r2) {
return true;
}
return Objects.equals(r1.getId(), r2.getId())
&& Objects.equals(r1.getUrl(), r2.getUrl())
&& repositoryPolicyEquals(r1.getReleases(), r2.getReleases())
&& repositoryPolicyEquals(r1.getSnapshots(), r2.getSnapshots());
}
private static boolean repositoryPolicyEquals(ArtifactRepositoryPolicy p1, ArtifactRepositoryPolicy p2) {
if (p1 == p2) {
return true;
}
return p1.isEnabled() == p2.isEnabled() && Objects.equals(p1.getUpdatePolicy(), p2.getUpdatePolicy());
}
private static boolean repositoriesEquals(List<ArtifactRepository> r1, List<ArtifactRepository> r2) {
if (r1.size() != r2.size()) {
return false;
}
for (Iterator<ArtifactRepository> it1 = r1.iterator(), it2 = r2.iterator(); it1.hasNext(); ) {
if (!repositoryEquals(it1.next(), it2.next())) {
return false;
}
}
return true;
}
/**
* CacheRecord
*/
public
|
CacheKey
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/dataflow/nullnesspropagation/NullnessPropagationTest.java
|
{
"start": 79674,
"end": 80339
}
|
class ____ {
public void test(@Nullable Object o) {
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(o);
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker((@NonNull Object) o);
}
}
""")
.doTest();
}
@Test
public void autoValue() {
compilationHelper
.addSourceLines(
"AutoValueTest.java",
"""
package com.google.errorprone.dataflow.nullnesspropagation;
import static com.google.errorprone.dataflow.nullnesspropagation.NullnessPropagationTest.triggerNullnessChecker;
import com.google.auto.value.AutoValue;
import org.checkerframework.checker.nullness.qual.Nullable;
public
|
CastsTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/RoundRobinOperatorStateRepartitioner.java
|
{
"start": 21497,
"end": 22632
}
|
class ____ {
private final EnumMap<
OperatorStateHandle.Mode,
Map<
String,
List<Tuple2<StreamStateHandle, OperatorStateHandle.StateMetaInfo>>>>
byMode;
GroupByStateNameResults(
EnumMap<
OperatorStateHandle.Mode,
Map<
String,
List<
Tuple2<
StreamStateHandle,
OperatorStateHandle.StateMetaInfo>>>>
byMode) {
this.byMode = Preconditions.checkNotNull(byMode);
}
public Map<String, List<Tuple2<StreamStateHandle, OperatorStateHandle.StateMetaInfo>>>
getByMode(OperatorStateHandle.Mode mode) {
return byMode.get(mode);
}
}
@VisibleForTesting
static final
|
GroupByStateNameResults
|
java
|
quarkusio__quarkus
|
extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/OAuth2OpaqueSecurityWithConfigTestCase.java
|
{
"start": 307,
"end": 1732
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(OpenApiResource.class, ResourceBean.class)
.addAsResource(
new StringAsset("quarkus.smallrye-openapi.security-scheme=oauth2\n"
+ "quarkus.smallrye-openapi.security-scheme-name=OAuth2CompanyAuthentication\n"
+ "quarkus.smallrye-openapi.security-scheme-description=OAuth2 Authentication"),
"application.properties"));
@Test
public void testOAuth2Authentication() {
RestAssured.given().header("Accept", "application/json")
.when().get("/q/openapi")
.then().body("components.securitySchemes.OAuth2CompanyAuthentication", Matchers.hasEntry("type", "http"))
.and()
.body("components.securitySchemes.OAuth2CompanyAuthentication",
Matchers.hasEntry("description", "OAuth2 Authentication"))
.and().body("components.securitySchemes.OAuth2CompanyAuthentication", Matchers.hasEntry("scheme", "bearer"))
.and()
.body("components.securitySchemes.OAuth2CompanyAuthentication", Matchers.hasEntry("bearerFormat", "Opaque"));
}
}
|
OAuth2OpaqueSecurityWithConfigTestCase
|
java
|
elastic__elasticsearch
|
x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/patterntext/Arg.java
|
{
"start": 1242,
"end": 4094
}
|
enum ____ {
GENERIC(0);
private final int code;
private static final Type[] lookup = new Type[values().length];
static {
for (var type : values()) {
lookup[type.code] = type;
}
}
Type(int code) {
this.code = code;
}
public int toCode() {
return code;
}
public static Type fromCode(int code) {
return lookup[code];
}
}
record Info(Type type, int offsetInTemplate) {
public Info {
assert offsetInTemplate >= 0;
}
void writeTo(ByteArrayDataOutput out, int previousOffset) throws IOException {
out.writeVInt(type.toCode());
int diff = offsetInTemplate - previousOffset;
out.writeVInt(diff);
}
static Info readFrom(DataInput in, int previousOffset) throws IOException {
var type = Type.fromCode(in.readVInt());
int diffFromPrevious = in.readVInt();
int offsetInfoTemplate = previousOffset + diffFromPrevious;
return new Info(type, offsetInfoTemplate);
}
}
static boolean isArg(String text) {
for (int i = 0; i < text.length(); i++) {
if (Character.isDigit(text.charAt(i))) {
return true;
}
}
return false;
}
static String encodeInfo(List<Info> arguments) throws IOException {
int maxSize = VINT_MAX_BYTES + arguments.size() * (VINT_MAX_BYTES + VINT_MAX_BYTES);
byte[] buffer = new byte[maxSize];
var dataInput = new ByteArrayDataOutput(buffer);
dataInput.writeVInt(arguments.size());
int previousOffset = 0;
for (var arg : arguments) {
arg.writeTo(dataInput, previousOffset);
previousOffset = arg.offsetInTemplate;
}
int size = dataInput.getPosition();
byte[] data = Arrays.copyOfRange(buffer, 0, size);
return ENCODER.encodeToString(data);
}
static List<Info> decodeInfo(String encoded) throws IOException {
byte[] encodedBytes = DECODER.decode(encoded);
var input = new ByteArrayDataInput(encodedBytes);
int numArgs = input.readVInt();
int previousOffset = 0;
List<Info> arguments = new ArrayList<>(numArgs);
for (int i = 0; i < numArgs; i++) {
var argInfo = Info.readFrom(input, previousOffset);
arguments.add(argInfo);
previousOffset = argInfo.offsetInTemplate;
}
return arguments;
}
static String encodeRemainingArgs(PatternTextValueProcessor.Parts parts) {
return String.join(SPACE, parts.args());
}
static String[] decodeRemainingArgs(String mergedArgs) {
return mergedArgs.split(SPACE);
}
}
|
Type
|
java
|
grpc__grpc-java
|
interop-testing/src/test/java/io/grpc/testing/integration/Http2Test.java
|
{
"start": 1892,
"end": 6484
}
|
enum ____ {
NETTY, OKHTTP;
}
/** Parameterized test cases. */
@Parameters(name = "client={0},server={1}")
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{Transport.NETTY, Transport.NETTY},
{Transport.OKHTTP, Transport.OKHTTP},
{Transport.OKHTTP, Transport.NETTY},
{Transport.NETTY, Transport.OKHTTP},
});
}
private final Transport clientType;
private final Transport serverType;
public Http2Test(Transport clientType, Transport serverType) {
this.clientType = clientType;
this.serverType = serverType;
}
@Override
protected ServerBuilder<?> getServerBuilder() {
// Starts the server with HTTPS.
ServerCredentials serverCreds;
try {
serverCreds = TlsServerCredentials.create(
TlsTesting.loadCert("server1.pem"), TlsTesting.loadCert("server1.key"));
} catch (IOException ex) {
throw new RuntimeException(ex);
}
ServerBuilder<?> builder;
if (serverType == Transport.NETTY) {
NettyServerBuilder nettyBuilder = NettyServerBuilder.forPort(0, serverCreds)
.flowControlWindow(AbstractInteropTest.TEST_FLOW_CONTROL_WINDOW);
// Disable the default census stats tracer, use testing tracer instead.
InternalNettyServerBuilder.setStatsEnabled(nettyBuilder, false);
builder = nettyBuilder;
} else {
OkHttpServerBuilder okHttpBuilder = OkHttpServerBuilder.forPort(0, serverCreds)
.flowControlWindow(AbstractInteropTest.TEST_FLOW_CONTROL_WINDOW);
// Disable the default census stats tracer, use testing tracer instead.
InternalOkHttpServerBuilder.setStatsEnabled(okHttpBuilder, false);
builder = okHttpBuilder;
}
return builder
.maxInboundMessageSize(AbstractInteropTest.MAX_MESSAGE_SIZE)
.addStreamTracerFactory(createCustomCensusTracerFactory());
}
@Override
protected ManagedChannelBuilder<?> createChannelBuilder() {
ChannelCredentials channelCreds;
try {
channelCreds = TlsChannelCredentials.newBuilder()
.trustManager(TlsTesting.loadCert("ca.pem"))
.build();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
int port = ((InetSocketAddress) getListenAddress()).getPort();
ManagedChannelBuilder<?> builder;
if (clientType == Transport.NETTY) {
NettyChannelBuilder nettyBuilder = NettyChannelBuilder
.forAddress("localhost", port, channelCreds)
.flowControlWindow(AbstractInteropTest.TEST_FLOW_CONTROL_WINDOW);
// Disable the default census stats interceptor, use testing interceptor instead.
InternalNettyChannelBuilder.setStatsEnabled(nettyBuilder, false);
builder = nettyBuilder;
} else {
OkHttpChannelBuilder okHttpBuilder = OkHttpChannelBuilder
.forAddress("localhost", port, channelCreds)
.flowControlWindow(AbstractInteropTest.TEST_FLOW_CONTROL_WINDOW);
// Disable the default census stats interceptor, use testing interceptor instead.
InternalOkHttpChannelBuilder.setStatsEnabled(okHttpBuilder, false);
builder = okHttpBuilder;
}
return builder
.overrideAuthority(TestUtils.TEST_SERVER_HOST)
.maxInboundMessageSize(AbstractInteropTest.MAX_MESSAGE_SIZE)
.intercept(createCensusStatsClientInterceptor());
}
@Test
public void remoteAddr() {
InetSocketAddress isa = (InetSocketAddress) obtainRemoteClientAddr();
assertTrue(isa.getAddress().isLoopbackAddress());
// It should not be the same as the server
assertNotEquals(((InetSocketAddress) getListenAddress()).getPort(), isa.getPort());
}
@Test
public void localAddr() throws Exception {
InetSocketAddress isa = (InetSocketAddress) obtainLocalServerAddr();
assertTrue(isa.getAddress().isLoopbackAddress());
assertEquals(((InetSocketAddress) getListenAddress()).getPort(), isa.getPort());
}
@Test
public void contentLengthPermitted() throws Exception {
// Some third-party gRPC implementations (e.g., ServiceTalk) include Content-Length. The HTTP/2
// code starting in Netty 4.1.60.Final has special-cased handling of Content-Length, and may
// call uncommon methods on our custom headers implementation.
// https://github.com/grpc/grpc-java/issues/7953
Metadata contentLength = new Metadata();
contentLength.put(Metadata.Key.of("content-length", Metadata.ASCII_STRING_MARSHALLER), "5");
blockingStub
.withInterceptors(MetadataUtils.newAttachHeadersInterceptor(contentLength))
.emptyCall(EMPTY);
}
}
|
Transport
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/TooManyParametersTest.java
|
{
"start": 5200,
"end": 5327
}
|
interface ____ {}
""")
.addSourceLines(
"Test.java",
"""
public
|
AutoFactory
|
java
|
spring-projects__spring-boot
|
documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/testing/springbootapplications/jsontests/VehicleDetails.java
|
{
"start": 707,
"end": 973
}
|
class ____ {
private final String make;
private final String model;
VehicleDetails(String make, String model) {
this.make = make;
this.model = model;
}
String getMake() {
return this.make;
}
String getModel() {
return this.model;
}
}
|
VehicleDetails
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
|
{
"start": 2853,
"end": 23216
}
|
class ____ {
private static final Path outDir = new Path(
System.getProperty("test.build.data",
System.getProperty("java.io.tmpdir")),
TestFileOutputCommitter.class.getName());
private final static String SUB_DIR = "SUB_DIR";
private final static Path OUT_SUB_DIR = new Path(outDir, SUB_DIR);
private static final Logger LOG =
LoggerFactory.getLogger(TestFileOutputCommitter.class);
// A random task attempt id for testing.
private static final String attempt = "attempt_200707121733_0001_m_000000_0";
private static final String partFile = "part-m-00000";
private static final TaskAttemptID taskID = TaskAttemptID.forName(attempt);
private static final String attempt1 = "attempt_200707121733_0001_m_000001_0";
private static final TaskAttemptID taskID1 = TaskAttemptID.forName(attempt1);
private Text key1 = new Text("key1");
private Text key2 = new Text("key2");
private Text val1 = new Text("val1");
private Text val2 = new Text("val2");
private static void cleanup() throws IOException {
Configuration conf = new Configuration();
FileSystem fs = outDir.getFileSystem(conf);
fs.delete(outDir, true);
}
@BeforeEach
public void setUp() throws IOException {
cleanup();
}
@AfterEach
public void tearDown() throws IOException {
cleanup();
}
private void writeOutput(RecordWriter theRecordWriter,
TaskAttemptContext context) throws IOException, InterruptedException {
NullWritable nullWritable = NullWritable.get();
try {
theRecordWriter.write(key1, val1);
theRecordWriter.write(null, nullWritable);
theRecordWriter.write(null, val1);
theRecordWriter.write(nullWritable, val2);
theRecordWriter.write(key2, nullWritable);
theRecordWriter.write(key1, null);
theRecordWriter.write(null, null);
theRecordWriter.write(key2, val2);
} finally {
theRecordWriter.close(context);
}
}
private void writeMapFileOutput(RecordWriter theRecordWriter,
TaskAttemptContext context) throws IOException, InterruptedException {
try {
int key = 0;
for (int i = 0 ; i < 10; ++i) {
key = i;
Text val = (i%2 == 1) ? val1 : val2;
theRecordWriter.write(new LongWritable(key),
val);
}
} finally {
theRecordWriter.close(context);
}
}
private void testRecoveryInternal(int commitVersion, int recoveryVersion)
throws Exception {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
conf.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, 1);
conf.setInt(FileOutputCommitter.FILEOUTPUTCOMMITTER_ALGORITHM_VERSION,
commitVersion);
JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
// setup
committer.setupJob(jContext);
committer.setupTask(tContext);
// write output
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
writeOutput(theRecordWriter, tContext);
// do commit
committer.commitTask(tContext);
Path jobTempDir1 = committer.getCommittedTaskPath(tContext);
File jtd = new File(jobTempDir1.toUri().getPath());
if (commitVersion == 1) {
assertTrue(jtd.exists(), "Version 1 commits to temporary dir " + jtd);
validateContent(jtd);
} else {
assertFalse(jtd.exists(), "Version 2 commits to output dir " + jtd);
}
//now while running the second app attempt,
//recover the task output from first attempt
Configuration conf2 = job.getConfiguration();
conf2.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
conf2.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, 2);
conf2.setInt(FileOutputCommitter.FILEOUTPUTCOMMITTER_ALGORITHM_VERSION,
recoveryVersion);
JobContext jContext2 = new JobContextImpl(conf2, taskID.getJobID());
TaskAttemptContext tContext2 = new TaskAttemptContextImpl(conf2, taskID);
FileOutputCommitter committer2 = new FileOutputCommitter(outDir, tContext2);
committer2.setupJob(tContext2);
Path jobTempDir2 = committer2.getCommittedTaskPath(tContext2);
File jtd2 = new File(jobTempDir2.toUri().getPath());
committer2.recoverTask(tContext2);
if (recoveryVersion == 1) {
assertTrue(jtd2.exists(), "Version 1 recovers to " + jtd2);
validateContent(jtd2);
} else {
assertFalse(jtd2.exists(), "Version 2 commits to output dir " + jtd2);
if (commitVersion == 1) {
assertEquals(0, jtd.list().length, "Version 2 recovery moves to output dir from " + jtd);
}
}
committer2.commitJob(jContext2);
validateContent(outDir);
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testRecoveryV1() throws Exception {
testRecoveryInternal(1, 1);
}
@Test
public void testRecoveryV2() throws Exception {
testRecoveryInternal(2, 2);
}
@Test
public void testRecoveryUpgradeV1V2() throws Exception {
testRecoveryInternal(1, 2);
}
private void validateContent(Path dir) throws IOException {
validateContent(new File(dir.toUri().getPath()));
}
private void validateContent(File dir) throws IOException {
File expectedFile = new File(dir, partFile);
assertTrue(expectedFile.exists(), "Could not find "+expectedFile);
StringBuilder expectedOutput = new StringBuilder();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = slurp(expectedFile);
assertThat(output).isEqualTo(expectedOutput.toString());
}
private void validateMapFileOutputContent(
FileSystem fs, Path dir) throws IOException {
// map output is a directory with index and data files
Path expectedMapDir = new Path(dir, partFile);
assert(fs.getFileStatus(expectedMapDir).isDirectory());
FileStatus[] files = fs.listStatus(expectedMapDir);
int fileCount = 0;
boolean dataFileFound = false;
boolean indexFileFound = false;
for (FileStatus f : files) {
if (f.isFile()) {
++fileCount;
if (f.getPath().getName().equals(MapFile.INDEX_FILE_NAME)) {
indexFileFound = true;
}
else if (f.getPath().getName().equals(MapFile.DATA_FILE_NAME)) {
dataFileFound = true;
}
}
}
assert(fileCount > 0);
assert(dataFileFound && indexFileFound);
}
private void testCommitterInternal(int version, boolean taskCleanup)
throws Exception {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
conf.setInt(
FileOutputCommitter.FILEOUTPUTCOMMITTER_ALGORITHM_VERSION,
version);
conf.setBoolean(
FileOutputCommitter.FILEOUTPUTCOMMITTER_TASK_CLEANUP_ENABLED,
taskCleanup);
JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
// setup
committer.setupJob(jContext);
committer.setupTask(tContext);
// write output
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
writeOutput(theRecordWriter, tContext);
// check task and job temp directories exist
File jobOutputDir = new File(
new Path(outDir, FileOutputCommitter.PENDING_DIR_NAME).toString());
File taskOutputDir = new File(Path.getPathWithoutSchemeAndAuthority(
committer.getWorkPath()).toString());
assertTrue(jobOutputDir.exists(), "job temp dir does not exist");
assertTrue(taskOutputDir.exists(), "task temp dir does not exist");
// do commit
committer.commitTask(tContext);
assertTrue(jobOutputDir.exists(), "job temp dir does not exist");
if (version == 1 || taskCleanup) {
// Task temp dir gets renamed in v1 and deleted if taskCleanup is
// enabled in v2
assertFalse(taskOutputDir.exists(), "task temp dir still exists");
} else {
// By default, in v2 the task temp dir is only deleted during commitJob
assertTrue(taskOutputDir.exists(), "task temp dir does not exist");
}
// Entire job temp directory gets deleted, including task temp dir
committer.commitJob(jContext);
assertFalse(jobOutputDir.exists(), "job temp dir still exists");
assertFalse(taskOutputDir.exists(), "task temp dir still exists");
// validate output
validateContent(outDir);
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testCommitterV1() throws Exception {
testCommitterInternal(1, false);
}
@Test
public void testCommitterV2() throws Exception {
testCommitterInternal(2, false);
}
@Test
public void testCommitterV2TaskCleanupEnabled() throws Exception {
testCommitterInternal(2, true);
}
@Test
public void testCommitterWithDuplicatedCommitV1() throws Exception {
testCommitterWithDuplicatedCommitInternal(1);
}
@Test
public void testCommitterWithDuplicatedCommitV2() throws Exception {
testCommitterWithDuplicatedCommitInternal(2);
}
private void testCommitterWithDuplicatedCommitInternal(int version) throws
Exception {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
conf.setInt(FileOutputCommitter.FILEOUTPUTCOMMITTER_ALGORITHM_VERSION,
version);
JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
// setup
committer.setupJob(jContext);
committer.setupTask(tContext);
// write output
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
writeOutput(theRecordWriter, tContext);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// validate output
validateContent(outDir);
// commit job again on a successful commit job.
try {
committer.commitJob(jContext);
if (version == 1) {
fail("Duplicate commit success: wrong behavior for version 1.");
}
} catch (IOException e) {
if (version == 2) {
fail("Duplicate commit failed: wrong behavior for version 2.");
}
}
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testCommitterWithFailureV1() throws Exception {
testCommitterWithFailureInternal(1, 1);
testCommitterWithFailureInternal(1, 2);
}
@Test
public void testCommitterWithFailureV2() throws Exception {
testCommitterWithFailureInternal(2, 1);
testCommitterWithFailureInternal(2, 2);
}
private void testCommitterWithFailureInternal(int version, int maxAttempts)
throws Exception {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
conf.setInt(FileOutputCommitter.FILEOUTPUTCOMMITTER_ALGORITHM_VERSION,
version);
conf.setInt(FileOutputCommitter.FILEOUTPUTCOMMITTER_FAILURE_ATTEMPTS,
maxAttempts);
JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
FileOutputCommitter committer = new CommitterWithFailedThenSucceed(outDir,
tContext);
// setup
committer.setupJob(jContext);
committer.setupTask(tContext);
// write output
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
writeOutput(theRecordWriter, tContext);
// do commit
committer.commitTask(tContext);
try {
committer.commitJob(jContext);
// (1,1), (1,2), (2,1) shouldn't reach to here.
if (version == 1 || maxAttempts <= 1) {
fail("Commit successful: wrong behavior for version 1.");
}
} catch (IOException e) {
// (2,2) shouldn't reach to here.
if (version == 2 && maxAttempts > 2) {
fail("Commit failed: wrong behavior for version 2.");
}
}
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testProgressDuringMerge() throws Exception {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
conf.setInt(FileOutputCommitter.FILEOUTPUTCOMMITTER_ALGORITHM_VERSION,
2);
JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
TaskAttemptContext tContext = spy(new TaskAttemptContextImpl(conf, taskID));
FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
// setup
committer.setupJob(jContext);
committer.setupTask(tContext);
// write output
MapFileOutputFormat theOutputFormat = new MapFileOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
writeMapFileOutput(theRecordWriter, tContext);
// do commit
committer.commitTask(tContext);
//make sure progress flag was set.
// The first time it is set is during commit but ensure that
// mergePaths call makes it go again.
verify(tContext, atLeast(2)).progress();
}
@Test
public void testCommitterRepeatableV1() throws Exception {
testCommitterRetryInternal(1);
}
@Test
public void testCommitterRepeatableV2() throws Exception {
testCommitterRetryInternal(2);
}
// retry committer for 2 times.
private void testCommitterRetryInternal(int version)
throws Exception {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
conf.setInt(FileOutputCommitter.FILEOUTPUTCOMMITTER_ALGORITHM_VERSION,
version);
// only attempt for 1 time.
conf.setInt(FileOutputCommitter.FILEOUTPUTCOMMITTER_FAILURE_ATTEMPTS,
1);
JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
FileOutputCommitter committer = new CommitterWithFailedThenSucceed(outDir,
tContext);
// setup
committer.setupJob(jContext);
committer.setupTask(tContext);
// write output
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
writeOutput(theRecordWriter, tContext);
// do commit
committer.commitTask(tContext);
try {
committer.commitJob(jContext);
fail("Commit successful: wrong behavior for the first time " +
"commit.");
} catch (IOException e) {
// commit again.
try {
committer.commitJob(jContext);
// version 1 shouldn't reach to here.
if (version == 1) {
fail("Commit successful after retry: wrong behavior for " +
"version 1.");
}
} catch (FileNotFoundException ex) {
if (version == 2) {
fail("Commit failed after retry: wrong behavior for" +
" version 2.");
}
assertTrue(ex.getMessage().contains(committer.getJobAttemptPath(
jContext).toString() + " does not exist"));
}
}
FileUtil.fullyDelete(new File(outDir.toString()));
}
private void testMapFileOutputCommitterInternal(int version)
throws Exception {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
conf.setInt(FileOutputCommitter.FILEOUTPUTCOMMITTER_ALGORITHM_VERSION,
version);
JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
// setup
committer.setupJob(jContext);
committer.setupTask(tContext);
// write output
MapFileOutputFormat theOutputFormat = new MapFileOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
writeMapFileOutput(theRecordWriter, tContext);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// Ensure getReaders call works and also ignores
// hidden filenames (_ or . prefixes)
MapFile.Reader[] readers = {};
try {
readers = MapFileOutputFormat.getReaders(outDir, conf);
// validate output
validateMapFileOutputContent(FileSystem.get(job.getConfiguration()), outDir);
} finally {
IOUtils.cleanupWithLogger(null, readers);
FileUtil.fullyDelete(new File(outDir.toString()));
}
}
@Test
public void testMapFileOutputCommitterV1() throws Exception {
testMapFileOutputCommitterInternal(1);
}
@Test
public void testMapFileOutputCommitterV2() throws Exception {
testMapFileOutputCommitterInternal(2);
}
@Test
public void testInvalidVersionNumber() throws IOException {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
conf.setInt(FileOutputCommitter.FILEOUTPUTCOMMITTER_ALGORITHM_VERSION, 3);
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
try {
new FileOutputCommitter(outDir, tContext);
fail("should've thrown an exception!");
} catch (IOException e) {
//test passed
}
}
private void testAbortInternal(int version)
throws IOException, InterruptedException {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
conf.setInt(FileOutputCommitter.FILEOUTPUTCOMMITTER_ALGORITHM_VERSION,
version);
JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
// do setup
committer.setupJob(jContext);
committer.setupTask(tContext);
// write output
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
writeOutput(theRecordWriter, tContext);
// do abort
committer.abortTask(tContext);
File expectedFile = new File(new Path(committer.getWorkPath(), partFile)
.toString());
assertFalse(expectedFile.exists(), "task temp dir still exists");
committer.abortJob(jContext, JobStatus.State.FAILED);
expectedFile = new File(new Path(outDir, FileOutputCommitter.PENDING_DIR_NAME)
.toString());
assertFalse(expectedFile.exists(), "job temp dir still exists");
assertEquals(0, new File(outDir.toString())
.listFiles().length, "Output directory not empty");
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testAbortV1() throws IOException, InterruptedException {
testAbortInternal(1);
}
@Test
public void testAbortV2() throws IOException, InterruptedException {
testAbortInternal(2);
}
public static
|
TestFileOutputCommitter
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/action/internal/BulkOperationCleanupAction.java
|
{
"start": 11050,
"end": 11694
}
|
class ____ implements Serializable {
private final NaturalIdDataAccess naturalIdCacheAccessStrategy;
private final SoftLock cacheLock;
public NaturalIdCleanup(
NaturalIdDataAccess naturalIdCacheAccessStrategy,
SharedSessionContractImplementor session) {
this.naturalIdCacheAccessStrategy = naturalIdCacheAccessStrategy;
this.cacheLock = naturalIdCacheAccessStrategy.lockRegion();
naturalIdCacheAccessStrategy.removeAll( session );
}
private void release() {
naturalIdCacheAccessStrategy.unlockRegion( cacheLock );
}
}
@Override
public void afterDeserialize(EventSource session) {
// nop
}
}
|
NaturalIdCleanup
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/metagen/mappedsuperclass/overridden/Product2.java
|
{
"start": 394,
"end": 680
}
|
class ____ extends Product1 {
@Column(name = "overridenName"/*, insertable = false, updatable = false*/)
public String getOverridenName() {
return super.getOverridenName();
}
public void setOverridenName(String overridenName) {
super.setOverridenName(overridenName);
}
}
|
Product2
|
java
|
quarkusio__quarkus
|
core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DependencyVersionOverridesManagedVersionTest.java
|
{
"start": 483,
"end": 2975
}
|
class ____ extends BootstrapFromOriginalJarTestBase {
@Override
protected boolean createWorkspace() {
return true;
}
@Override
protected boolean workspaceModuleParentHierarchy() {
// this is to simply make sure the workspace modules available
// through ApplicationModel.getWorkspaceModules() include parent POMs and BOMs
return true;
}
@Override
protected TsArtifact composeApplication() {
final TsQuarkusExt extA_100 = new TsQuarkusExt("ext-a", "1.0.0");
install(extA_100);
final TsQuarkusExt extB_100 = new TsQuarkusExt("ext-b", "1.0.0");
install(extB_100);
final TsArtifact extB_100_rt = extB_100.getRuntime();
addToExpectedLib(extB_100_rt);
final TsArtifact bom = TsArtifact.pom("test-bom");
bom.addManagedDependency(platformDescriptor());
bom.addManagedDependency(platformProperties());
bom.addManagedDependency(new TsDependency(extA_100.getRuntime()));
bom.addManagedDependency(new TsDependency(extB_100_rt));
install(bom);
final TsQuarkusExt extA_101 = new TsQuarkusExt("ext-a", "1.0.1");
install(extA_101);
addToExpectedLib(extA_101.getRuntime());
createWorkspace();
final TsArtifact appJar = TsArtifact.jar("app")
.addManagedDependency(new TsDependency(bom, "import"))
.addDependency(new TsDependency(new TsArtifact(extB_100_rt.getGroupId(), extB_100_rt.getArtifactId(),
extB_100_rt.getClassifier(), extB_100_rt.getType(), null)))
.addDependency(extA_101.getRuntime());
return appJar;
}
@Override
protected void assertAppModel(ApplicationModel model) {
assertThat(model.getWorkspaceModules().stream().map(WorkspaceModule::getId).collect(Collectors.toSet()))
.isEqualTo(Set.of(
WorkspaceModuleId.of(TsArtifact.DEFAULT_GROUP_ID, "app-parent", TsArtifact.DEFAULT_VERSION),
WorkspaceModuleId.of(TsArtifact.DEFAULT_GROUP_ID, "test-bom", TsArtifact.DEFAULT_VERSION),
WorkspaceModuleId.of(TsArtifact.DEFAULT_GROUP_ID, "app", TsArtifact.DEFAULT_VERSION),
WorkspaceModuleId.of(TsArtifact.DEFAULT_GROUP_ID, "ext-a", "1.0.1"),
WorkspaceModuleId.of(TsArtifact.DEFAULT_GROUP_ID, "ext-b", "1.0.0")));
}
}
|
DependencyVersionOverridesManagedVersionTest
|
java
|
quarkusio__quarkus
|
integration-tests/hibernate-orm-graphql-panache/src/main/java/io/quarkus/it/hibertnate/orm/graphql/panache/GraphQLResource.java
|
{
"start": 314,
"end": 766
}
|
class ____ {
@Inject
BookRepository bookRepository;
@Query("authors")
@Description("Retrieve the stored authors")
public List<Author> getAuthors() {
Log.info("Getting all authors");
return Author.listAll();
}
@Query("books")
@Description("Retrieve the stored books")
public List<Book> getBooks() {
Log.info("Getting all books");
return bookRepository.listAll();
}
}
|
GraphQLResource
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java
|
{
"start": 1303,
"end": 2738
}
|
class ____ extends BaseRestHandler {
public static final String DEPRECATION_WARNING = "Legacy index templates are deprecated in favor of composable templates.";
@Override
public List<Route> routes() {
return List.of(
Route.builder(POST, "/_template/{name}").deprecateAndKeep(DEPRECATION_WARNING).build(),
Route.builder(PUT, "/_template/{name}").deprecateAndKeep(DEPRECATION_WARNING).build()
);
}
@Override
public String getName() {
return "put_index_template_action";
}
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest(request.param("name"));
putRequest.patterns(asList(request.paramAsStringArray("index_patterns", Strings.EMPTY_ARRAY)));
putRequest.order(request.paramAsInt("order", putRequest.order()));
putRequest.masterNodeTimeout(getMasterNodeTimeout(request));
putRequest.create(request.paramAsBoolean("create", false));
putRequest.cause(request.param("cause", ""));
putRequest.source(prepareMappings(XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2()));
return channel -> client.admin().indices().putTemplate(putRequest, new RestToXContentListener<>(channel));
}
}
|
RestPutIndexTemplateAction
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/User.java
|
{
"start": 279,
"end": 818
}
|
class ____ {
private Long id;
private String username;
private Set memberships = new HashSet();
/**
* for persistence
*/
User() {
}
public User(String username) {
this.username = username;
}
public Long getId() {
return id;
}
public String getUsername() {
return username;
}
public Iterator getMemberships() {
return memberships.iterator();
}
public Membership addMembership(Group group) {
Membership membership = new Membership( this, group );
memberships.add( membership );
return membership;
}
}
|
User
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/scripting/bsh/BshScriptUtils.java
|
{
"start": 4564,
"end": 6216
}
|
class ____ return an actual instance of
* the scripted object (in which case the Class of the object will be returned).
* In any other case, the returned Class will be {@code null}.
* @param scriptSource the script source text
* @param classLoader the ClassLoader to use for evaluating the script
* @return the scripted Java class, or {@code null} if none could be determined
* @throws EvalError in case of BeanShell parsing failure
*/
static @Nullable Class<?> determineBshObjectType(String scriptSource, @Nullable ClassLoader classLoader) throws EvalError {
Assert.hasText(scriptSource, "Script source must not be empty");
Interpreter interpreter = new Interpreter();
if (classLoader != null) {
interpreter.setClassLoader(classLoader);
}
Object result = interpreter.eval(scriptSource);
if (result instanceof Class<?> clazz) {
return clazz;
}
else if (result != null) {
return result.getClass();
}
else {
return null;
}
}
/**
* Evaluate the specified BeanShell script based on the given script source,
* keeping a returned script Class or script Object as-is.
* <p>The script may either be a simple script that needs a corresponding proxy
* generated (implementing the specified interfaces), or declare a full class
* or return an actual instance of the scripted object (in which case the
* specified interfaces, if any, need to be implemented by that class/instance).
* @param scriptSource the script source text
* @param scriptInterfaces the interfaces that the scripted Java object is
* supposed to implement (may be {@code null} or empty if the script itself
* declares a full
|
or
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/bigquery/BigQueryDataTypeTest.java
|
{
"start": 304,
"end": 618
}
|
class ____ {
@Test
public void testDataType() {
String sql = "ANY TYPE";
SQLExprParser exprParser = SQLParserUtils.createExprParser(sql, DbType.bigquery);
SQLDataType dataType = exprParser.parseDataType(false);
assertEquals(sql, dataType.getName());
}
}
|
BigQueryDataTypeTest
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/errors/TopicExistsException.java
|
{
"start": 847,
"end": 1140
}
|
class ____ extends ApiException {
private static final long serialVersionUID = 1L;
public TopicExistsException(String message) {
super(message);
}
public TopicExistsException(String message, Throwable cause) {
super(message, cause);
}
}
|
TopicExistsException
|
java
|
apache__dubbo
|
dubbo-compatible/src/main/java/com/alibaba/dubbo/rpc/cluster/Cluster.java
|
{
"start": 988,
"end": 1357
}
|
interface ____ extends org.apache.dubbo.rpc.cluster.Cluster {
<T> com.alibaba.dubbo.rpc.Invoker<T> join(com.alibaba.dubbo.rpc.cluster.Directory<T> directory)
throws com.alibaba.dubbo.rpc.RpcException;
@Override
default <T> Invoker<T> join(Directory<T> directory, boolean buildFilterChain) throws RpcException {
return null;
}
}
|
Cluster
|
java
|
spring-projects__spring-security
|
test/src/main/java/org/springframework/security/test/web/servlet/response/SecurityMockMvcResultHandlers.java
|
{
"start": 1199,
"end": 1742
}
|
class ____ {
private SecurityMockMvcResultHandlers() {
}
/**
* Exports the {@link SecurityContext} from {@link TestSecurityContextHolder} to
* {@link SecurityContextHolder}
*/
public static ResultHandler exportTestSecurityContext() {
return new ExportTestSecurityContextHandler();
}
/**
* A {@link ResultHandler} that copies the {@link SecurityContext} from
* {@link TestSecurityContextHolder} to {@link SecurityContextHolder}
*
* @author Marcus da Coregio
* @since 5.6
*/
private static
|
SecurityMockMvcResultHandlers
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/ExecutionGraphInfoTest.java
|
{
"start": 1479,
"end": 2937
}
|
class ____ {
@Test
void testExecutionGraphHistoryBeingDerivedFromFailedExecutionGraph() {
final ArchivedExecutionGraph executionGraph =
ArchivedExecutionGraph.createSparseArchivedExecutionGraph(
new JobID(),
"test job name",
JobStatus.FAILED,
JobType.STREAMING,
new RuntimeException("Expected RuntimeException"),
null,
System.currentTimeMillis());
final ExecutionGraphInfo executionGraphInfo = new ExecutionGraphInfo(executionGraph);
assertThat(executionGraphInfo.getArchivedExecutionGraph().getJobType())
.isEqualTo(JobType.STREAMING);
final ErrorInfo failureInfo =
executionGraphInfo.getArchivedExecutionGraph().getFailureInfo();
final RootExceptionHistoryEntry actualEntry =
Iterables.getOnlyElement(executionGraphInfo.getExceptionHistory());
assertThat(failureInfo).isNotNull();
assertThat(failureInfo.getException()).isEqualTo(actualEntry.getException());
assertThat(failureInfo.getTimestamp()).isEqualTo(actualEntry.getTimestamp());
assertThat(actualEntry.isGlobal()).isTrue();
assertThat(actualEntry.getFailingTaskName()).isNull();
assertThat(actualEntry.getTaskManagerLocation()).isNull();
}
}
|
ExecutionGraphInfoTest
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/DisplayNameGenerator.java
|
{
"start": 6770,
"end": 7128
}
|
class ____ by
* {@code testMethod.getDeclaringClass()} — for example, when a test
* method is inherited from a superclass.
*
* @param enclosingInstanceTypes the runtime types of the enclosing
* instances for the test class, ordered from outermost to innermost,
* excluding {@code testClass}; never {@code null}
* @param testClass the
|
returned
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/operators/CoGroupTaskTest.java
|
{
"start": 2078,
"end": 13301
}
|
class ____ extends DriverTestBase<CoGroupFunction<Record, Record, Record>> {
private static final long SORT_MEM = 3 * 1024 * 1024;
@SuppressWarnings("unchecked")
private final RecordComparator comparator1 =
new RecordComparator(
new int[] {0}, (Class<? extends Value>[]) new Class[] {IntValue.class});
@SuppressWarnings("unchecked")
private final RecordComparator comparator2 =
new RecordComparator(
new int[] {0}, (Class<? extends Value>[]) new Class[] {IntValue.class});
private final CountingOutputCollector output = new CountingOutputCollector();
CoGroupTaskTest(ExecutionConfig config) {
super(config, 0, 2, SORT_MEM);
}
@TestTemplate
void testSortBoth1CoGroupTask() throws Exception {
int keyCnt1 = 100;
int valCnt1 = 2;
int keyCnt2 = 200;
int valCnt2 = 1;
final int expCnt =
valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2)
+ (keyCnt1 > keyCnt2
? (keyCnt1 - keyCnt2) * valCnt1
: (keyCnt2 - keyCnt1) * valCnt2);
setOutput(this.output);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.CO_GROUP);
final CoGroupDriver<Record, Record, Record> testTask = new CoGroupDriver<>();
addInputSorted(
new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate());
addInputSorted(
new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
testDriver(testTask, MockCoGroupStub.class);
assertThat(this.output.getNumberOfRecords())
.withFailMessage("Wrong result set size.")
.isEqualTo(expCnt);
}
@TestTemplate
void testSortBoth2CoGroupTask() throws Exception {
int keyCnt1 = 200;
int valCnt1 = 2;
int keyCnt2 = 200;
int valCnt2 = 4;
final int expCnt =
valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2)
+ (keyCnt1 > keyCnt2
? (keyCnt1 - keyCnt2) * valCnt1
: (keyCnt2 - keyCnt1) * valCnt2);
setOutput(this.output);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.CO_GROUP);
final CoGroupDriver<Record, Record, Record> testTask = new CoGroupDriver<>();
addInputSorted(
new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate());
addInputSorted(
new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
testDriver(testTask, MockCoGroupStub.class);
assertThat(this.output.getNumberOfRecords())
.withFailMessage("Wrong result set size.")
.isEqualTo(expCnt);
}
@TestTemplate
void testSortFirstCoGroupTask() throws Exception {
int keyCnt1 = 200;
int valCnt1 = 2;
int keyCnt2 = 200;
int valCnt2 = 4;
final int expCnt =
valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2)
+ (keyCnt1 > keyCnt2
? (keyCnt1 - keyCnt2) * valCnt1
: (keyCnt2 - keyCnt1) * valCnt2);
setOutput(this.output);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.CO_GROUP);
final CoGroupDriver<Record, Record, Record> testTask = new CoGroupDriver<>();
addInputSorted(
new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate());
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, true));
testDriver(testTask, MockCoGroupStub.class);
assertThat(this.output.getNumberOfRecords())
.withFailMessage("Wrong result set size.")
.isEqualTo(expCnt);
}
@TestTemplate
void testSortSecondCoGroupTask() throws Exception {
int keyCnt1 = 200;
int valCnt1 = 2;
int keyCnt2 = 200;
int valCnt2 = 4;
final int expCnt =
valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2)
+ (keyCnt1 > keyCnt2
? (keyCnt1 - keyCnt2) * valCnt1
: (keyCnt2 - keyCnt1) * valCnt2);
setOutput(this.output);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.CO_GROUP);
final CoGroupDriver<Record, Record, Record> testTask = new CoGroupDriver<>();
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, true));
addInputSorted(
new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
testDriver(testTask, MockCoGroupStub.class);
assertThat(this.output.getNumberOfRecords())
.withFailMessage("Wrong result set size.")
.isEqualTo(expCnt);
}
@TestTemplate
void testMergeCoGroupTask() throws Exception {
int keyCnt1 = 200;
int valCnt1 = 2;
int keyCnt2 = 200;
int valCnt2 = 4;
final int expCnt =
valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2)
+ (keyCnt1 > keyCnt2
? (keyCnt1 - keyCnt2) * valCnt1
: (keyCnt2 - keyCnt1) * valCnt2);
setOutput(this.output);
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, true));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, true));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.CO_GROUP);
final CoGroupDriver<Record, Record, Record> testTask = new CoGroupDriver<>();
testDriver(testTask, MockCoGroupStub.class);
assertThat(this.output.getNumberOfRecords())
.withFailMessage("Wrong result set size.")
.isEqualTo(expCnt);
}
@TestTemplate
void testFailingSortCoGroupTask() {
int keyCnt1 = 100;
int valCnt1 = 2;
int keyCnt2 = 200;
int valCnt2 = 1;
setOutput(this.output);
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, true));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, true));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.CO_GROUP);
final CoGroupDriver<Record, Record, Record> testTask = new CoGroupDriver<>();
assertThatThrownBy(() -> testDriver(testTask, MockFailingCoGroupStub.class))
.isInstanceOf(ExpectedTestException.class);
}
@TestTemplate
void testCancelCoGroupTaskWhileSorting1() throws Exception {
int keyCnt = 10;
int valCnt = 2;
setOutput(this.output);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.CO_GROUP);
final CoGroupDriver<Record, Record, Record> testTask = new CoGroupDriver<>();
addInputSorted(new DelayingInfinitiveInputIterator(1000), this.comparator1.duplicate());
addInput(new UniformRecordGenerator(keyCnt, valCnt, true));
CheckedThread taskRunner =
new CheckedThread() {
@Override
public void go() throws Exception {
testDriver(testTask, MockCoGroupStub.class);
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this);
tct.start();
tct.join();
taskRunner.sync();
}
@TestTemplate
void testCancelCoGroupTaskWhileSorting2() throws Exception {
int keyCnt = 10;
int valCnt = 2;
setOutput(this.output);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.CO_GROUP);
final CoGroupDriver<Record, Record, Record> testTask = new CoGroupDriver<>();
addInput(new UniformRecordGenerator(keyCnt, valCnt, true));
addInputSorted(new DelayingInfinitiveInputIterator(1000), this.comparator2.duplicate());
CheckedThread taskRunner =
new CheckedThread() {
@Override
public void go() throws Exception {
testDriver(testTask, MockCoGroupStub.class);
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this);
tct.start();
tct.join();
taskRunner.sync();
}
@TestTemplate
void testCancelCoGroupTaskWhileCoGrouping() throws Exception {
int keyCnt = 100;
int valCnt = 5;
setOutput(this.output);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.CO_GROUP);
final CoGroupDriver<Record, Record, Record> testTask = new CoGroupDriver<>();
addInput(new UniformRecordGenerator(keyCnt, valCnt, true));
addInput(new UniformRecordGenerator(keyCnt, valCnt, true));
final OneShotLatch delayCoGroupProcessingLatch = new OneShotLatch();
CheckedThread taskRunner =
new CheckedThread() {
@Override
public void go() throws Exception {
testDriver(
testTask, new MockDelayingCoGroupStub(delayCoGroupProcessingLatch));
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this);
tct.start();
tct.join();
delayCoGroupProcessingLatch.trigger();
taskRunner.sync();
}
public static
|
CoGroupTaskTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/NullOptionalTest.java
|
{
"start": 1690,
"end": 2115
}
|
class ____ {
void a(Optional<Object> o) {}
void test() {
a(Optional.empty());
}
}
""")
.doTest();
}
@Test
public void annotatedWithNullable_noMatch() {
helper
.addSourceLines(
"Test.java",
"""
import java.util.Optional;
import javax.annotation.Nullable;
|
Test
|
java
|
spring-projects__spring-boot
|
documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/testing/springbootapplications/autoconfiguredwebservices/server/ExampleEndpoint.java
|
{
"start": 1050,
"end": 1244
}
|
class ____ {
@PayloadRoot(localPart = "ExampleRequest")
@ResponsePayload
public Source handleRequest() {
return new StringSource("<ExampleResponse>42</ExampleResponse>");
}
}
|
ExampleEndpoint
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/jdk8/SingleFlattenStreamAsFlowable.java
|
{
"start": 1142,
"end": 1657
}
|
class ____<T, R> extends Flowable<R> {
final Single<T> source;
final Function<? super T, ? extends Stream<? extends R>> mapper;
public SingleFlattenStreamAsFlowable(Single<T> source, Function<? super T, ? extends Stream<? extends R>> mapper) {
this.source = source;
this.mapper = mapper;
}
@Override
protected void subscribeActual(@NonNull Subscriber<? super R> s) {
source.subscribe(new FlattenStreamMultiObserver<>(s, mapper));
}
}
|
SingleFlattenStreamAsFlowable
|
java
|
quarkusio__quarkus
|
extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/ValidatorForEarlyInitializedBeanTest.java
|
{
"start": 1370,
"end": 1720
}
|
class ____ {
static boolean initInvoked = false;
// App scoped is activated very early (compared to observing Startup event)
void startUp(@Observes @Initialized(ApplicationScoped.class) final Object event) {
initInvoked = true;
}
void call(@NotNull final Object o) {
}
}
}
|
EagerInitBean
|
java
|
quarkusio__quarkus
|
devtools/cli-common/src/main/java/io/quarkus/cli/common/build/ExecuteUtil.java
|
{
"start": 384,
"end": 2555
}
|
class ____ {
private static ExecSupport withOutput(OutputOptionMixin output) {
return new ExecSupport(output.out(), output.err(), output.isVerbose(), output.isCliTest());
}
public static File findExecutableFile(String base) {
return Executable.findExecutableFile(base);
}
private static String findExecutable(String exec) {
return Executable.findExecutable(exec);
}
public static File findExecutable(String name, String errorMessage, OutputOptionMixin output) {
return Executable.findExecutable(name, errorMessage, output);
}
public static int executeProcess(OutputOptionMixin output, String[] args, File parentDir)
throws IOException, InterruptedException {
if (output.isVerbose()) {
output.out().println(String.join(" ", args));
output.out().println();
}
var holder = new Object() {
int exitCode;
};
io.smallrye.common.process.ProcessBuilder.Input<Void> pb = io.smallrye.common.process.ProcessBuilder.newBuilder(args[0])
.arguments(List.of(args).subList(1, args.length))
.directory(parentDir.toPath())
.exitCodeChecker(ec -> {
holder.exitCode = ec;
return true;
})
.softExitTimeout(null)
.hardExitTimeout(null)
.input().inherited();
if (output.isCliTest()) {
// We have to capture IO differently in tests..
pb.output().consumeWith(br -> br.lines().forEach(output.out()::println))
.error().consumeWith(br -> br.lines().forEach(output.err()::println))
.run();
} else {
pb.output().inherited()
.error().inherited()
.run();
}
if (holder.exitCode != 0) {
return SOFTWARE;
} else {
return OK;
}
}
public static File findWrapper(Path projectRoot, String[] windows, String other) {
return Executable.findWrapper(projectRoot, windows, other);
}
}
|
ExecuteUtil
|
java
|
quarkusio__quarkus
|
extensions/liquibase/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbBuildTimeConfig.java
|
{
"start": 677,
"end": 1019
}
|
interface ____ {
/**
* The liquibase configuration config by client name.
*/
@ConfigDocMapKey("client-name")
@ConfigDocSection
@WithParentName
@WithUnnamedKey(MongoConfig.DEFAULT_CLIENT_NAME)
@WithDefaults
Map<String, LiquibaseMongodbBuildTimeClientConfig> clientConfigs();
}
|
LiquibaseMongodbBuildTimeConfig
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/events/EventEngineContributionsTests.java
|
{
"start": 3944,
"end": 4739
}
|
class ____ implements EventEngineContributor {
/**
* Singleton access
*/
public static final TheContributor INSTANCE = new TheContributor();
private EventType<SexyRxySaveListener> saveEventType;
private EventType<SexyRxyPersistListener> persistEventType;
@Override
public void contribute(EventEngineContributions target) {
saveEventType = target.contributeEventType(
SexyRxySaveListener.EVENT_NAME,
SexyRxySaveListener.class,
SexyRxySaveListener.INSTANCE
);
persistEventType = target.contributeEventType(
SexyRxyPersistListener.EVENT_NAME,
SexyRxyPersistListener.class
);
target.configureListeners(
persistEventType,
(group) -> group.appendListener( SexyRxyPersistListener.INSTANCE )
);
}
}
public static
|
TheContributor
|
java
|
hibernate__hibernate-orm
|
hibernate-jcache/src/test/java/org/hibernate/orm/test/jcache/RefreshUpdatedDataTest.java
|
{
"start": 8730,
"end": 9516
}
|
class ____ {
@Id
@GeneratedValue(generator = "increment")
private Long id;
private String name;
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@ElementCollection
private List<String> tags = new ArrayList<>();
public ReadWriteCacheableItem() {
}
public ReadWriteCacheableItem(String name) {
this.name = name;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<String> getTags() {
return tags;
}
}
@Entity(name = "RwVersionedItem")
@Table(name = "RW_VERSIONED_ITEM")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region = "item")
public static
|
ReadWriteCacheableItem
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/support/ActionFilters.java
|
{
"start": 739,
"end": 1178
}
|
class ____ {
private final ActionFilter[] filters;
public ActionFilters(Set<ActionFilter> actionFilters) {
this.filters = actionFilters.toArray(new ActionFilter[actionFilters.size()]);
Arrays.sort(filters, Comparator.comparingInt(ActionFilter::order));
}
/**
* Returns the action filters that have been injected
*/
public ActionFilter[] filters() {
return filters;
}
}
|
ActionFilters
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregator.java
|
{
"start": 2061,
"end": 4090
}
|
class ____ {
/**
* An empty tree of {@link PipelineAggregator}s.
*/
public static final PipelineTree EMPTY = new PipelineTree(emptyMap(), emptyList());
private final Map<String, PipelineTree> subTrees;
private final List<PipelineAggregator> aggregators;
public PipelineTree(Map<String, PipelineTree> subTrees, List<PipelineAggregator> aggregators) {
this.subTrees = subTrees;
this.aggregators = aggregators;
}
/**
* The {@link PipelineAggregator}s for the aggregation at this
* position in the tree.
*/
public List<PipelineAggregator> aggregators() {
return aggregators;
}
/**
* Get the sub-tree at for the named sub-aggregation or {@link #EMPTY}
* if there are no pipeline aggragations for that sub-aggregator.
*/
public PipelineTree subTree(String name) {
return subTrees.getOrDefault(name, EMPTY);
}
/**
* Return {@code true} if this node in the tree has any subtrees.
*/
public boolean hasSubTrees() {
return false == subTrees.isEmpty();
}
@Override
public String toString() {
return "PipelineTree[" + aggregators + "," + subTrees + "]";
}
}
private final String name;
private final String[] bucketsPaths;
private final Map<String, Object> metadata;
protected PipelineAggregator(String name, String[] bucketsPaths, Map<String, Object> metadata) {
this.name = name;
this.bucketsPaths = bucketsPaths;
this.metadata = metadata;
}
public String name() {
return name;
}
public String[] bucketsPaths() {
return bucketsPaths;
}
public Map<String, Object> metadata() {
return metadata;
}
public abstract InternalAggregation reduce(InternalAggregation aggregation, AggregationReduceContext reduceContext);
}
|
PipelineTree
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java
|
{
"start": 5740,
"end": 6630
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory str;
private final Pattern regex;
private final EvalOperator.ExpressionEvaluator.Factory newStr;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, Pattern regex,
EvalOperator.ExpressionEvaluator.Factory newStr) {
this.source = source;
this.str = str;
this.regex = regex;
this.newStr = newStr;
}
@Override
public ReplaceConstantEvaluator get(DriverContext context) {
return new ReplaceConstantEvaluator(source, str.get(context), regex, newStr.get(context), context);
}
@Override
public String toString() {
return "ReplaceConstantEvaluator[" + "str=" + str + ", regex=" + regex + ", newStr=" + newStr + "]";
}
}
}
|
Factory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/fst/Outputs.java
|
{
"start": 1360,
"end": 3727
}
|
class ____<T> {
// TODO: maybe change this API to allow for re-use of the
// output instances -- this is an insane amount of garbage
// (new object per byte/char/int) if eg used during
// analysis
/** Eg common("foobar", "food") -> "foo" */
public abstract T common(T output1, T output2);
/** Eg subtract("foobar", "foo") -> "bar" */
public abstract T subtract(T output, T inc);
/** Eg add("foo", "bar") -> "foobar" */
public abstract T add(T prefix, T output);
/** Encode an output value into a {@link DataOutput}. */
public abstract void write(T output, DataOutput out) throws IOException;
/**
* Encode an final node output value into a {@link DataOutput}. By default this just calls {@link
* #write(Object, DataOutput)}.
*/
public void writeFinalOutput(T output, DataOutput out) throws IOException {
write(output, out);
}
/** Decode an output value previously written with {@link #write(Object, DataOutput)}. */
public abstract T read(DataInput in) throws IOException;
/** Skip the output; defaults to just calling {@link #read} and discarding the result. */
public void skipOutput(DataInput in) throws IOException {
read(in);
}
/**
* Decode an output value previously written with {@link #writeFinalOutput(Object, DataOutput)}.
* By default this just calls {@link #read(DataInput)}.
*/
public T readFinalOutput(DataInput in) throws IOException {
return read(in);
}
/**
* Skip the output previously written with {@link #writeFinalOutput}; defaults to just calling
* {@link #readFinalOutput} and discarding the result.
*/
public void skipFinalOutput(DataInput in) throws IOException {
skipOutput(in);
}
/**
* NOTE: this output is compared with == so you must ensure that all methods return the single
* object if it's really no output
*/
public abstract T getNoOutput();
public abstract String outputToString(T output);
// TODO: maybe make valid(T output) public...? for asserts
public T merge(T first, T second) {
throw new UnsupportedOperationException();
}
/**
* Return memory usage for the provided output.
*
* @see Accountable
*/
public abstract long ramBytesUsed(T output);
}
|
Outputs
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/properties/XPathPropertyPlaceholderTest.java
|
{
"start": 1116,
"end": 2963
}
|
class ____ extends ContextTestSupport {
@Test
public void testFilter() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:output-filter");
mock.expectedMessageCount(1);
template.sendBody("direct:filter", "<greeting><text>Hello, world!</text></greeting>");
assertMockEndpointsSatisfied();
}
@Test
public void testChoice() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:output-choice");
mock.expectedMessageCount(1);
template.sendBody("direct:choice", "<greeting><text>Bye, world!</text></greeting>");
assertMockEndpointsSatisfied();
}
@Test
public void testChoice2() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:output-choice2");
mock.expectedMessageCount(1);
template.sendBody("direct:choice2", "<greeting><text>Bye, world!</text></greeting>");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
Properties prop = new Properties();
prop.put("foo", "//greeting/text = 'Hello, world!'");
prop.put("bar", "//greeting/text = 'Bye, world!'");
PropertiesComponent pc = context.getPropertiesComponent();
pc.setInitialProperties(prop);
from("direct:filter").filter().xpath("{{foo}}").log("Passed filter!").to("mock:output-filter");
from("direct:choice").choice().when(xpath("{{bar}}")).log("Passed choice!").to("mock:output-choice");
from("direct:choice2").choice().when().xpath("{{bar}}").log("Passed choice2!").to("mock:output-choice2");
}
};
}
}
|
XPathPropertyPlaceholderTest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/support/jsse/SSLContextParametersTest.java
|
{
"start": 1370,
"end": 41384
}
|
class ____ extends AbstractJsseParametersTest {
@Test
public void testFilter() {
SSLContextParameters parameters = new SSLContextParameters();
Collection<String> result;
result = parameters.filter(null, Arrays.asList("SSLv3", "TLSv1", "TLSv1.1"), List.of(Pattern.compile("TLS.*")),
List.of());
assertEquals(2, result.size());
assertStartsWith(result, "TLS");
result = parameters.filter(null, Arrays.asList("SSLv3", "TLSv1", "TLSv1.1"), List.of(Pattern.compile(".*")),
List.of(Pattern.compile("SSL.*")));
assertEquals(2, result.size());
assertStartsWith(result, "TLS");
AssertionError error
= assertThrows(AssertionError.class, () -> assertStartsWith((String[]) null, "TLS"),
"We should got an exception here!");
assertTrue(error.getMessage().contains("The values should not be null"), "Get a wrong message");
}
@Test
public void testPropertyPlaceholders() throws Exception {
CamelContext camelContext = this.createPropertiesPlaceholderAwareContext();
KeyStoreParameters ksp = new KeyStoreParameters();
ksp.setCamelContext(camelContext);
ksp.setType("{{keyStoreParameters.type}}");
ksp.setProvider("{{keyStoreParameters.provider}}");
ksp.setResource("{{keyStoreParameters.resource}}");
ksp.setPassword("{{keyStoreParameters.password}}");
KeyManagersParameters kmp = new KeyManagersParameters();
kmp.setCamelContext(camelContext);
kmp.setKeyStore(ksp);
kmp.setKeyPassword("{{keyManagersParameters.keyPassword}}");
kmp.setAlgorithm("{{keyManagersParameters.algorithm}}");
kmp.setProvider("{{keyManagersParameters.provider}}");
TrustManagersParameters tmp = new TrustManagersParameters();
tmp.setCamelContext(camelContext);
tmp.setKeyStore(ksp);
tmp.setAlgorithm("{{trustManagersParameters.algorithm}}");
tmp.setProvider("{{trustManagersParameters.provider}}");
CipherSuitesParameters csp = new CipherSuitesParameters();
csp.setCipherSuite(Collections.singletonList("{{cipherSuite.0}}"));
SecureSocketProtocolsParameters sspp = new SecureSocketProtocolsParameters();
sspp.setSecureSocketProtocol(Collections.singletonList("{{secureSocketProtocol.0}}"));
SSLContextServerParameters scsp = new SSLContextServerParameters();
scsp.setCamelContext(camelContext);
scsp.setClientAuthentication("{{sslContextServerParameters.clientAuthentication}}");
SSLContextParameters scp = new SSLContextParameters();
scp.setCamelContext(camelContext);
scp.setKeyManagers(kmp);
scp.setTrustManagers(tmp);
scp.setServerParameters(scsp);
scp.setProvider("{{sslContextParameters.provider}}");
scp.setSecureSocketProtocol("{{sslContextParameters.protocol}}");
scp.setSessionTimeout("{{sslContextParameters.sessionTimeout}}");
scp.setCipherSuites(csp);
scp.setSecureSocketProtocols(sspp);
SSLContext context = scp.createSSLContext(null);
SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertTrue(serverSocket.getNeedClientAuth());
context.getSocketFactory().createSocket();
context.createSSLEngine();
}
@Test
public void testServerParametersClientAuthentication() throws Exception {
SSLContext controlContext = SSLContext.getInstance("TLS");
controlContext.init(null, null, null);
SSLEngine controlEngine = controlContext.createSSLEngine();
SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket();
SSLContextParameters scp = new SSLContextParameters();
SSLContextServerParameters scsp = new SSLContextServerParameters();
scp.setServerParameters(scsp);
SSLContext context = scp.createSSLContext(null);
SSLEngine engine = context.createSSLEngine();
SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(controlServerSocket.getWantClientAuth(), serverSocket.getWantClientAuth());
assertEquals(controlServerSocket.getNeedClientAuth(), serverSocket.getNeedClientAuth());
assertEquals(controlEngine.getWantClientAuth(), engine.getWantClientAuth());
assertEquals(controlEngine.getNeedClientAuth(), engine.getNeedClientAuth());
// ClientAuthentication - NONE
scsp.setClientAuthentication(ClientAuthentication.NONE.name());
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertFalse(serverSocket.getWantClientAuth());
assertFalse(serverSocket.getNeedClientAuth());
assertFalse(engine.getWantClientAuth());
assertFalse(engine.getNeedClientAuth());
// ClientAuthentication - WANT
scsp.setClientAuthentication(ClientAuthentication.WANT.name());
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertTrue(serverSocket.getWantClientAuth());
assertFalse(serverSocket.getNeedClientAuth());
assertTrue(engine.getWantClientAuth());
assertFalse(engine.getNeedClientAuth());
// ClientAuthentication - REQUIRE
scsp.setClientAuthentication(ClientAuthentication.REQUIRE.name());
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertFalse(serverSocket.getWantClientAuth());
assertTrue(serverSocket.getNeedClientAuth());
assertFalse(engine.getWantClientAuth());
assertTrue(engine.getNeedClientAuth());
}
@Test
public void testServerParameters() throws Exception {
SSLContext controlContext = SSLContext.getInstance("TLSv1.3");
controlContext.init(null, null, null);
SSLEngine controlEngine = controlContext.createSSLEngine();
SSLSocket controlSocket = (SSLSocket) controlContext.getSocketFactory().createSocket();
SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket();
SSLContextParameters scp = new SSLContextParameters();
SSLContextServerParameters scsp = new SSLContextServerParameters();
scp.setServerParameters(scsp);
SSLContext context = scp.createSSLContext(null);
SSLEngine engine = context.createSSLEngine();
SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket();
SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertStartsWith(socket.getEnabledProtocols(), "TLS");
assertStartsWith(serverSocket.getEnabledProtocols(), "TLS");
assertArrayEquals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites());
assertArrayEquals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites());
assertArrayEquals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()),
serverSocket.getEnabledCipherSuites());
assertEquals(controlServerSocket.getWantClientAuth(), serverSocket.getWantClientAuth());
assertEquals(controlServerSocket.getNeedClientAuth(), serverSocket.getNeedClientAuth());
// No csp or filter on server params passes through shared config
scp.setCipherSuites(new CipherSuitesParameters());
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(0, engine.getEnabledCipherSuites().length);
assertEquals(0, socket.getEnabledCipherSuites().length);
assertEquals(0, serverSocket.getEnabledCipherSuites().length);
// Csp on server params
scp.setCipherSuites(null);
CipherSuitesParameters csp = new CipherSuitesParameters();
scsp.setCipherSuites(csp);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertArrayEquals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites());
assertArrayEquals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites());
assertEquals(0, serverSocket.getEnabledCipherSuites().length);
// Cipher suites filter on server params
FilterParameters filter = new FilterParameters();
filter.getExclude().add(".*");
scsp.setCipherSuites(null);
scsp.setCipherSuitesFilter(filter);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertArrayEquals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites());
assertArrayEquals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites());
assertEquals(0, serverSocket.getEnabledCipherSuites().length);
// Csp on server overrides cipher suites filter on server
filter.getInclude().add(".*");
filter.getExclude().clear();
scsp.setCipherSuites(csp);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertArrayEquals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites());
assertArrayEquals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites());
assertEquals(0, serverSocket.getEnabledCipherSuites().length);
// Sspp on server params
SecureSocketProtocolsParameters sspp = new SecureSocketProtocolsParameters();
scsp.setSecureSocketProtocols(sspp);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertStartsWith(socket.getEnabledProtocols(), "TLS");
assertEquals(0, serverSocket.getEnabledProtocols().length);
// Secure socket protocols filter on client params
filter = new FilterParameters();
filter.getExclude().add(".*");
scsp.setSecureSocketProtocols(null);
scsp.setSecureSocketProtocolsFilter(filter);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertStartsWith(socket.getEnabledProtocols(), "TLS");
assertEquals(0, serverSocket.getEnabledProtocols().length);
// Sspp on client params overrides secure socket protocols filter on
// client
filter.getInclude().add(".*");
filter.getExclude().clear();
scsp.setSecureSocketProtocols(sspp);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertStartsWith(socket.getEnabledProtocols(), "TLS");
assertEquals(0, serverSocket.getEnabledProtocols().length);
// Server session timeout only affects server session configuration
scsp.setSessionTimeout("12345");
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(controlContext.getClientSessionContext().getSessionTimeout(),
context.getClientSessionContext().getSessionTimeout());
assertEquals(12345, context.getServerSessionContext().getSessionTimeout());
}
private void checkProtocols(String[] control, String[] configured) {
// With the IBM JDK, an "default" unconfigured control socket is more
// restricted than with the Sun JDK. For example, with
// SSLContext.getInstance("TLS"), on Sun, you get
// TLSv1, SSLv3, SSLv2Hello
// but with IBM, you only get:
// TLSv1
// We'll check to make sure the "default" protocols are amongst the list
// that are in after configuration.
assertTrue(Arrays.asList(configured).containsAll(Arrays.asList(control)));
}
@Test
public void testClientParameters() throws Exception {
SSLContext controlContext = SSLContext.getInstance("TLSv1.3");
controlContext.init(null, null, null);
SSLEngine controlEngine = controlContext.createSSLEngine();
SSLSocket controlSocket = (SSLSocket) controlContext.getSocketFactory().createSocket();
SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket();
SSLContextParameters scp = new SSLContextParameters();
SSLContextClientParameters sccp = new SSLContextClientParameters();
scp.setClientParameters(sccp);
SSLContext context = scp.createSSLContext(null);
SSLEngine engine = context.createSSLEngine();
SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket();
SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertStartsWith(socket.getEnabledProtocols(), "TLS");
assertStartsWith(serverSocket.getEnabledProtocols(), "TLS");
assertArrayEquals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites());
assertArrayEquals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites());
assertArrayEquals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()),
serverSocket.getEnabledCipherSuites());
// No csp or filter on client params passes through shared config
scp.setCipherSuites(new CipherSuitesParameters());
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(0, socket.getEnabledCipherSuites().length);
// Csp on client params
scp.setCipherSuites(null);
CipherSuitesParameters csp = new CipherSuitesParameters();
sccp.setCipherSuites(csp);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertArrayEquals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites());
assertEquals(0, socket.getEnabledCipherSuites().length);
assertArrayEquals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()),
serverSocket.getEnabledCipherSuites());
// Cipher suites filter on client params
FilterParameters filter = new FilterParameters();
filter.getExclude().add(".*");
sccp.setCipherSuites(null);
sccp.setCipherSuitesFilter(filter);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertArrayEquals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites());
assertEquals(0, socket.getEnabledCipherSuites().length);
assertArrayEquals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()),
serverSocket.getEnabledCipherSuites());
// Csp on client overrides cipher suites filter on client
filter.getInclude().add(".*");
filter.getExclude().clear();
sccp.setCipherSuites(csp);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertArrayEquals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites());
assertEquals(0, socket.getEnabledCipherSuites().length);
assertArrayEquals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()),
serverSocket.getEnabledCipherSuites());
// Sspp on client params
SecureSocketProtocolsParameters sspp = new SecureSocketProtocolsParameters();
sccp.setSecureSocketProtocols(sspp);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertEquals(0, socket.getEnabledProtocols().length);
assertStartsWith(serverSocket.getEnabledProtocols(), "TLS");
// Secure socket protocols filter on client params
filter = new FilterParameters();
filter.getExclude().add(".*");
sccp.setSecureSocketProtocols(null);
sccp.setSecureSocketProtocolsFilter(filter);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertEquals(0, socket.getEnabledProtocols().length);
assertStartsWith(serverSocket.getEnabledProtocols(), "TLS");
// Sspp on client params overrides secure socket protocols filter on
// client
filter.getInclude().add(".*");
filter.getExclude().clear();
sccp.setSecureSocketProtocols(sspp);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertEquals(0, socket.getEnabledProtocols().length);
assertStartsWith(serverSocket.getEnabledProtocols(), "TLS");
// Client session timeout only affects client session configuration
sccp.setSessionTimeout("12345");
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(controlContext.getServerSessionContext().getSessionTimeout(),
context.getServerSessionContext().getSessionTimeout());
assertEquals(12345, context.getClientSessionContext().getSessionTimeout());
}
@Test
public void testCipherSuites() throws Exception {
SSLContext controlContext = SSLContext.getInstance("TLSv1.3");
controlContext.init(null, null, null);
SSLEngine controlEngine = controlContext.createSSLEngine();
SSLSocket controlSocket = (SSLSocket) controlContext.getSocketFactory().createSocket();
SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket();
// default
SSLContextParameters scp = new SSLContextParameters();
SSLContext context = scp.createSSLContext(null);
SSLEngine engine = context.createSSLEngine();
SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket();
SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertArrayEquals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites());
assertArrayEquals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites());
assertArrayEquals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()),
serverSocket.getEnabledCipherSuites());
// empty csp
CipherSuitesParameters csp = new CipherSuitesParameters();
scp.setCipherSuites(csp);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(0, engine.getEnabledCipherSuites().length);
assertEquals(0, socket.getEnabledCipherSuites().length);
assertEquals(0, serverSocket.getEnabledCipherSuites().length);
// explicit csp
csp.setCipherSuite(Collections.singletonList(controlEngine.getEnabledCipherSuites()[0]));
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(1, engine.getEnabledCipherSuites().length);
assertEquals(controlEngine.getEnabledCipherSuites()[0], engine.getEnabledCipherSuites()[0]);
assertEquals(1, socket.getEnabledCipherSuites().length);
assertEquals(controlEngine.getEnabledCipherSuites()[0], socket.getEnabledCipherSuites()[0]);
assertEquals(1, serverSocket.getEnabledCipherSuites().length);
assertEquals(controlEngine.getEnabledCipherSuites()[0], serverSocket.getEnabledCipherSuites()[0]);
// explicit csp overrides filter
FilterParameters filter = new FilterParameters();
filter.getInclude().add(".*");
scp.setCipherSuitesFilter(filter);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(1, engine.getEnabledCipherSuites().length);
assertEquals(controlEngine.getEnabledCipherSuites()[0], engine.getEnabledCipherSuites()[0]);
assertEquals(1, socket.getEnabledCipherSuites().length);
assertEquals(controlEngine.getEnabledCipherSuites()[0], socket.getEnabledCipherSuites()[0]);
assertEquals(1, socket.getEnabledCipherSuites().length);
assertEquals(controlEngine.getEnabledCipherSuites()[0], serverSocket.getEnabledCipherSuites()[0]);
}
@Test
public void testCipherSuitesFilter() throws Exception {
SSLContext controlContext = SSLContext.getInstance("TLSv1.3");
controlContext.init(null, null, null);
SSLEngine controlEngine = controlContext.createSSLEngine();
SSLSocket controlSocket = (SSLSocket) controlContext.getSocketFactory().createSocket();
SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket();
// default
SSLContextParameters scp = new SSLContextParameters();
SSLContext context = scp.createSSLContext(null);
CipherSuitesParameters csp = new CipherSuitesParameters();
scp.setCipherSuites(csp);
SSLEngine engine = context.createSSLEngine();
SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket();
SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertArrayEquals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites());
assertArrayEquals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites());
assertArrayEquals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()),
serverSocket.getEnabledCipherSuites());
// empty filter
FilterParameters filter = new FilterParameters();
scp.setCipherSuitesFilter(filter);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(0, engine.getEnabledCipherSuites().length);
assertEquals(0, socket.getEnabledCipherSuites().length);
assertEquals(0, serverSocket.getEnabledCipherSuites().length);
// explicit filter
filter.getInclude().add(".*");
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(0, engine.getEnabledCipherSuites().length);
assertEquals(0, socket.getEnabledCipherSuites().length);
assertEquals(0, serverSocket.getEnabledCipherSuites().length);
// explicit filter with excludes (excludes overrides)
filter.getExclude().add(".*");
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(0, engine.getEnabledCipherSuites().length);
assertEquals(0, socket.getEnabledCipherSuites().length);
assertEquals(0, serverSocket.getEnabledCipherSuites().length);
// explicit filter single include
filter.getInclude().clear();
filter.getExclude().clear();
csp.setCipherSuite(Collections.singletonList("TLS_RSA_WITH_AES_128_CBC_SHA"));
filter.getInclude().add("TLS.*");
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
// not all platforms/JDKs have these cipher suites
if (!isPlatform("aix")) {
assertTrue(engine.getEnabledCipherSuites().length >= 1);
assertStartsWith(engine.getEnabledCipherSuites(), "TLS");
assertTrue(socket.getEnabledCipherSuites().length >= 1);
assertStartsWith(socket.getEnabledCipherSuites(), "TLS");
assertTrue(serverSocket.getEnabledCipherSuites().length >= 1);
assertStartsWith(serverSocket.getEnabledCipherSuites(), "TLS");
}
}
@Test
public void testSecureSocketProtocols() throws Exception {
SSLContext controlContext = SSLContext.getInstance("TLS");
controlContext.init(null, null, null);
// default
SSLContextParameters scp = new SSLContextParameters();
SSLContext context = scp.createSSLContext(null);
SSLEngine engine = context.createSSLEngine();
SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket();
SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
// default disable the SSL* protocols
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertStartsWith(socket.getEnabledProtocols(), "TLS");
assertStartsWith(serverSocket.getEnabledProtocols(), "TLS");
// checkProtocols(controlServerSocket.getEnabledProtocols(),
// serverSocket.getEnabledProtocols());
// empty sspp
SecureSocketProtocolsParameters sspp = new SecureSocketProtocolsParameters();
scp.setSecureSocketProtocols(sspp);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(0, engine.getEnabledProtocols().length);
assertEquals(0, socket.getEnabledProtocols().length);
assertEquals(0, serverSocket.getEnabledProtocols().length);
// explicit sspp
sspp.setSecureSocketProtocol(Collections.singletonList("TLSv1"));
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(1, engine.getEnabledProtocols().length);
assertEquals("TLSv1", engine.getEnabledProtocols()[0]);
assertEquals(1, socket.getEnabledProtocols().length);
assertEquals("TLSv1", socket.getEnabledProtocols()[0]);
assertEquals(1, serverSocket.getEnabledProtocols().length);
assertEquals("TLSv1", serverSocket.getEnabledProtocols()[0]);
// explicit sspp overrides filter
FilterParameters filter = new FilterParameters();
filter.getInclude().add(".*");
scp.setSecureSocketProtocolsFilter(filter);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
// not all platforms/JDKs have these cipher suites
if (!isPlatform("aix")) {
assertEquals(1, engine.getEnabledProtocols().length);
assertEquals("TLSv1", engine.getEnabledProtocols()[0]);
assertEquals(1, socket.getEnabledProtocols().length);
assertEquals("TLSv1", socket.getEnabledProtocols()[0]);
assertEquals(1, socket.getEnabledProtocols().length);
assertEquals("TLSv1", serverSocket.getEnabledProtocols()[0]);
}
}
@Test
public void testSecureSocketProtocolsFilter() throws Exception {
SSLContext controlContext = SSLContext.getInstance("TLSv1.3");
controlContext.init(null, null, null);
SSLEngine controlEngine = controlContext.createSSLEngine();
SSLSocket controlSocket = (SSLSocket) controlContext.getSocketFactory().createSocket();
SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket();
// default
SSLContextParameters scp = new SSLContextParameters();
SSLContext context = scp.createSSLContext(null);
SSLEngine engine = context.createSSLEngine();
SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket();
SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
// default disable the SSL* protocols
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertStartsWith(socket.getEnabledProtocols(), "TLS");
assertStartsWith(serverSocket.getEnabledProtocols(), "TLS");
// empty filter
FilterParameters filter = new FilterParameters();
scp.setSecureSocketProtocolsFilter(filter);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(0, engine.getEnabledProtocols().length);
assertEquals(0, socket.getEnabledProtocols().length);
assertEquals(0, serverSocket.getEnabledProtocols().length);
// explicit filter
filter.getInclude().add(".*");
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertArrayEquals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols());
assertArrayEquals(controlSocket.getEnabledProtocols(), socket.getEnabledProtocols());
checkProtocols(controlServerSocket.getEnabledProtocols(), serverSocket.getEnabledProtocols());
// explicit filter with excludes (excludes overrides)
filter.getExclude().add(".*");
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(0, engine.getEnabledProtocols().length);
assertEquals(0, socket.getEnabledProtocols().length);
assertEquals(0, serverSocket.getEnabledProtocols().length);
// explicit filter single include
filter.getInclude().clear();
filter.getExclude().clear();
filter.getInclude().add("TLS.*");
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
// not all platforms/JDKs have these cipher suites
if (!isPlatform("aix")) {
assertTrue(engine.getEnabledProtocols().length >= 1);
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertTrue(socket.getEnabledProtocols().length >= 1);
assertStartsWith(socket.getEnabledProtocols(), "TLS");
assertTrue(socket.getEnabledProtocols().length >= 1);
assertStartsWith(serverSocket.getEnabledProtocols(), "TLS");
}
}
@Test
public void testSessionTimeout() throws Exception {
SSLContextParameters scp = new SSLContextParameters();
scp.setSessionTimeout("60");
SSLContext context = scp.createSSLContext(null);
assertEquals(60, context.getClientSessionContext().getSessionTimeout());
assertEquals(60, context.getServerSessionContext().getSessionTimeout());
scp.setSessionTimeout("0");
context = scp.createSSLContext(null);
assertEquals(0, context.getClientSessionContext().getSessionTimeout());
assertEquals(0, context.getServerSessionContext().getSessionTimeout());
}
@Test
public void testDefaultSecureSocketProtocol() throws Exception {
SSLContextParameters scp = new SSLContextParameters();
SSLContext context = scp.createSSLContext(null);
assertEquals("TLSv1.3", context.getProtocol());
SSLEngine engine = context.createSSLEngine();
SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket();
SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
// default disable the SSL* protocols
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertStartsWith(socket.getEnabledProtocols(), "TLS");
assertStartsWith(serverSocket.getEnabledProtocols(), "TLS");
}
@Test
public void testSecureSocketProtocol() throws Exception {
SSLContextParameters scp = new SSLContextParameters();
scp.setSecureSocketProtocol("SSLv3");
SSLContext context = scp.createSSLContext(null);
assertEquals("SSLv3", context.getProtocol());
SSLEngine engine = context.createSSLEngine();
SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket();
SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
// default disable the SSL* protocols
assertStartsWith(engine.getEnabledProtocols(), "TLS");
assertStartsWith(socket.getEnabledProtocols(), "TLS");
assertStartsWith(serverSocket.getEnabledProtocols(), "TLS");
// allow SSL* protocols by explicitly asking for them
final SecureSocketProtocolsParameters protocols = new SecureSocketProtocolsParameters();
protocols.setSecureSocketProtocol(Collections.singletonList("SSLv3"));
scp.setSecureSocketProtocols(protocols);
context = scp.createSSLContext(null);
engine = context.createSSLEngine();
socket = (SSLSocket) context.getSocketFactory().createSocket();
serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket();
assertEquals(1, engine.getEnabledProtocols().length);
assertEquals("SSLv3", engine.getEnabledProtocols()[0]);
assertEquals(1, socket.getEnabledProtocols().length);
assertEquals("SSLv3", socket.getEnabledProtocols()[0]);
assertEquals(1, serverSocket.getEnabledProtocols().length);
assertEquals("SSLv3", serverSocket.getEnabledProtocols()[0]);
}
@Test
public void testProvider() throws Exception {
SSLContextParameters scp = new SSLContextParameters();
scp.createSSLContext(null);
SSLContext context = scp.createSSLContext(null);
SSLContext defaultContext = SSLContext.getDefault();
assertEquals(defaultContext.getProvider().getName(), context.getProvider().getName());
}
protected String[] getDefaultCipherSuiteIncludes(String[] availableCipherSuites) {
List<String> enabled = new LinkedList<>();
for (String string : availableCipherSuites) {
if (!string.contains("_anon_") && !string.contains("_NULL_") && !string.contains("_EXPORT_")
&& !string.contains("_DES_")) {
enabled.add(string);
}
}
return enabled.toArray(new String[0]);
}
protected void assertStartsWith(String[] values, String prefix) {
assertNotNull(values, "The values should not be null");
for (String value : values) {
assertTrue(value.startsWith(prefix), value + " does not start with the prefix " + prefix);
}
}
protected void assertStartsWith(Collection<String> values, String prefix) {
assertNotNull(values, "The values should not be null");
for (String value : values) {
assertTrue(value.startsWith(prefix), value + " does not start with the prefix " + prefix);
}
}
}
|
SSLContextParametersTest
|
java
|
google__guava
|
guava-testlib/test/com/google/common/testing/NullPointerTesterTest.java
|
{
"start": 46851,
"end": 47636
}
|
class ____ {
@Keep
public FailOnOneOfTwoConstructors(String s) {}
@Keep
public FailOnOneOfTwoConstructors(Object o) {
checkNotNull(o);
}
}
public void testConstructor_ignored_shouldPass() throws Exception {
new NullPointerTester()
.ignore(FailOnOneOfTwoConstructors.class.getDeclaredConstructor(String.class))
.testAllPublicConstructors(FailOnOneOfTwoConstructors.class);
}
public void testConstructor_shouldFail() throws Exception {
try {
new NullPointerTester().testAllPublicConstructors(FailOnOneOfTwoConstructors.class);
} catch (AssertionError expected) {
return;
}
fail("Should detect problem in " + FailOnOneOfTwoConstructors.class.getSimpleName());
}
public static
|
FailOnOneOfTwoConstructors
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/hql/SelectNewEmbeddedIdTest.java
|
{
"start": 3301,
"end": 3482
}
|
class ____ {
private Simple simple;
public Wrapper() {
}
public Wrapper(Simple simple) {
this.simple = simple;
}
public Wrapper(SimpleId simpleId) {
}
}
}
|
Wrapper
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/support/AbstractBeanDefinition.java
|
{
"start": 17018,
"end": 17393
}
|
class ____ [" + beanClassObject + "] has not been resolved into an actual Class");
}
return clazz;
}
/**
* Return whether this definition specifies a bean class.
* @see #getBeanClass()
* @see #setBeanClass(Class)
* @see #resolveBeanClass(ClassLoader)
*/
public boolean hasBeanClass() {
return (this.beanClass instanceof Class);
}
/**
* Determine the
|
name
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/ast/spi/AbstractSqlAstTranslator.java
|
{
"start": 282310,
"end": 299153
}
|
interface ____<X extends Expression> {
void renderComparison(final List<SqlSelection> lhsExpressions, X rhsExpression, ComparisonOperator operator);
}
/**
* An optimized emulation for relational tuple sub-query comparisons.
* The idea of this method is to use limit 1 to select the max or min tuple and only compare against that.
*/
protected void emulateQuantifiedTupleSubQueryPredicate(
Predicate predicate,
SelectStatement selectStatement,
SqlTuple lhsTuple,
ComparisonOperator tupleComparisonOperator) {
final QueryPart queryPart = selectStatement.getQueryPart();
final QuerySpec subQuery;
if ( queryPart instanceof QuerySpec querySpec
&& queryPart.getFetchClauseExpression() == null
&& queryPart.getOffsetClauseExpression() == null ) {
subQuery = querySpec;
// We can only emulate the tuple subquery predicate comparing against the top element when there are no limit/offsets
lhsTuple.accept( this );
appendSql( tupleComparisonOperator.sqlText() );
final QueryPart queryPartForRowNumbering = this.queryPartForRowNumbering;
final int queryPartForRowNumberingClauseDepth = this.queryPartForRowNumberingClauseDepth;
final boolean needsSelectAliases = this.needsSelectAliases;
try {
this.queryPartForRowNumbering = null;
this.queryPartForRowNumberingClauseDepth = -1;
this.needsSelectAliases = false;
queryPartStack.push( subQuery );
appendSql( OPEN_PARENTHESIS );
visitSelectClause( subQuery.getSelectClause() );
visitFromClause( subQuery.getFromClause() );
visitWhereClause( subQuery.getWhereClauseRestrictions() );
visitGroupByClause( subQuery, dialect.getGroupBySelectItemReferenceStrategy() );
visitHavingClause( subQuery );
appendSql( " order by " );
final List<SqlSelection> sqlSelections = subQuery.getSelectClause().getSqlSelections();
final String order;
if ( tupleComparisonOperator == ComparisonOperator.LESS_THAN
|| tupleComparisonOperator == ComparisonOperator.LESS_THAN_OR_EQUAL ) {
// Default order is asc so we don't need to specify the order explicitly
order = "";
}
else {
order = " desc";
}
appendSql( '1' );
appendSql( order );
for ( int i = 1; i < sqlSelections.size(); i++ ) {
appendSql( COMMA_SEPARATOR_CHAR );
appendSql( i + 1 );
appendSql( order );
}
renderFetch(
new QueryLiteral<>( 1, getIntegerType() ),
null,
FetchClauseType.ROWS_ONLY
);
appendSql( CLOSE_PARENTHESIS );
}
finally {
queryPartStack.pop();
this.queryPartForRowNumbering = queryPartForRowNumbering;
this.queryPartForRowNumberingClauseDepth = queryPartForRowNumberingClauseDepth;
this.needsSelectAliases = needsSelectAliases;
}
}
else {
// TODO: We could use nested queries and use row numbers to emulate this
throw new IllegalArgumentException( "Can't emulate in predicate with tuples and limit/offset or set operations: " + predicate );
}
}
@Override
public void visitExistsPredicate(ExistsPredicate existsPredicate) {
if ( existsPredicate.isNegated() ) {
appendSql( "not " );
}
appendSql( "exists" );
existsPredicate.getExpression().accept( this );
}
@Override
public void visitJunction(Junction junction) {
if ( junction.isEmpty() ) {
return;
}
final Junction.Nature nature = junction.getNature();
final String separator = nature == Junction.Nature.CONJUNCTION ? " and " : " or ";
final List<Predicate> predicates = junction.getPredicates();
visitJunctionPredicate( nature, predicates.get( 0 ) );
for ( int i = 1; i < predicates.size(); i++ ) {
appendSql( separator );
visitJunctionPredicate( nature, predicates.get( i ) );
}
}
private void visitJunctionPredicate(Junction.Nature nature, Predicate p) {
if ( p instanceof Junction junction ) {
// If we have the same nature, or if this is a disjunction and the operand is a conjunction,
// then we don't need parenthesis, because the AND operator binds stronger
if ( nature == junction.getNature() || nature == Junction.Nature.DISJUNCTION ) {
p.accept( this );
}
else {
appendSql( OPEN_PARENTHESIS );
p.accept( this );
appendSql( CLOSE_PARENTHESIS );
}
}
else {
p.accept( this );
}
}
@Override
public void visitLikePredicate(LikePredicate likePredicate) {
if ( likePredicate.isCaseSensitive() ) {
likePredicate.getMatchExpression().accept( this );
if ( likePredicate.isNegated() ) {
appendSql( " not" );
}
appendSql( " like " );
renderLikePredicate( likePredicate );
}
else {
if ( dialect.supportsCaseInsensitiveLike() ) {
likePredicate.getMatchExpression().accept( this );
if ( likePredicate.isNegated() ) {
appendSql( " not" );
}
appendSql( WHITESPACE );
appendSql( dialect.getCaseInsensitiveLike() );
appendSql( WHITESPACE );
renderLikePredicate( likePredicate );
}
else {
renderCaseInsensitiveLikeEmulation(likePredicate.getMatchExpression(), likePredicate.getPattern(), likePredicate.getEscapeCharacter(), likePredicate.isNegated());
}
}
}
protected void renderLikePredicate(LikePredicate likePredicate) {
likePredicate.getPattern().accept( this );
if ( likePredicate.getEscapeCharacter() != null ) {
appendSql( " escape " );
likePredicate.getEscapeCharacter().accept( this );
}
}
protected void renderCaseInsensitiveLikeEmulation(Expression lhs, Expression rhs, Expression escapeCharacter, boolean negated) {
//LOWER(lhs) operator LOWER(rhs)
appendSql( dialect.getLowercaseFunction() );
appendSql( OPEN_PARENTHESIS );
lhs.accept( this );
appendSql( CLOSE_PARENTHESIS );
if ( negated ) {
appendSql( " not" );
}
appendSql( " like " );
appendSql( dialect.getLowercaseFunction() );
appendSql( OPEN_PARENTHESIS );
rhs.accept( this );
appendSql( CLOSE_PARENTHESIS );
if ( escapeCharacter != null ) {
appendSql( " escape " );
escapeCharacter.accept( this );
}
}
protected void renderBackslashEscapedLikePattern(
Expression pattern,
Expression escapeCharacter,
boolean noBackslashEscapes) {
// Check if escapeCharacter was explicitly set and do nothing in that case
// Note: this does not cover cases where it's set via parameter binding
boolean isExplicitEscape = false;
if ( escapeCharacter instanceof Literal literal ) {
final Object literalValue = literal.getLiteralValue();
isExplicitEscape = literalValue != null && !literalValue.toString().equals( "" );
}
if ( isExplicitEscape ) {
pattern.accept( this );
}
else {
// Since escape with empty or null character is ignored we need
// four backslashes to render a single one in a like pattern
if ( pattern instanceof Literal literal ) {
final Object literalValue = literal.getLiteralValue();
if ( literalValue == null ) {
pattern.accept( this );
}
else {
appendBackslashEscapedLikeLiteral( this, literalValue.toString(), noBackslashEscapes );
}
}
else {
// replace(<pattern>,'\\','\\\\')
appendSql( "replace" );
appendSql( OPEN_PARENTHESIS );
pattern.accept( this );
if ( noBackslashEscapes ) {
appendSql( ",'\\','\\\\'" );
}
else {
appendSql( ",'\\\\','\\\\\\\\'" );
}
appendSql( CLOSE_PARENTHESIS );
}
}
}
protected void appendBackslashEscapedLikeLiteral(SqlAppender appender, String literal, boolean noBackslashEscapes) {
appender.appendSql( '\'' );
for ( int i = 0; i < literal.length(); i++ ) {
final char c = literal.charAt( i );
switch ( c ) {
case '\'':
appender.appendSql( '\'' );
break;
case '\\':
if ( noBackslashEscapes ) {
appender.appendSql( '\\' );
}
else {
appender.appendSql( "\\\\\\" );
}
break;
}
appender.appendSql( c );
}
appender.appendSql( '\'' );
}
@Override
public void visitNegatedPredicate(NegatedPredicate negatedPredicate) {
if ( !negatedPredicate.isEmpty() ) {
appendSql( "not(" );
negatedPredicate.getPredicate().accept( this );
appendSql( CLOSE_PARENTHESIS );
}
}
@Override
public void visitNullnessPredicate(NullnessPredicate nullnessPredicate) {
final Expression expression = nullnessPredicate.getExpression();
final String predicateValue = nullnessPredicate.isNegated() ? " is not null" : " is null";
final SqlTuple tuple;
if ( ( tuple = getSqlTuple( expression ) ) != null ) {
String separator = NO_SEPARATOR;
// HQL has different semantics for the not null check on embedded attribute mappings
// as the embeddable is not considered as null, if at least one sub-part is not null
if ( nullnessPredicate.isNegated()
&& expression.getExpressionType() instanceof AttributeMapping ) {
appendSql( '(' );
for ( Expression exp : tuple.getExpressions() ) {
appendSql( separator );
exp.accept( this );
appendSql( predicateValue );
separator = " or ";
}
appendSql( ')' );
}
// For the is null check, and also for tuples in SQL in general,
// the semantics is that all sub-parts must match the predicate
else {
for ( Expression exp : tuple.getExpressions() ) {
appendSql( separator );
exp.accept( this );
appendSql( predicateValue );
separator = " and ";
}
}
return;
}
expression.accept( this );
appendSql( predicateValue );
}
@Override
public void visitThruthnessPredicate(ThruthnessPredicate thruthnessPredicate) {
if ( dialect.supportsIsTrue() ) {
thruthnessPredicate.getExpression().accept( this );
appendSql(" is ");
if ( thruthnessPredicate.isNegated() ) {
appendSql("not ");
}
appendSql( thruthnessPredicate.getBooleanValue() );
}
else {
String literalTrue = dialect.toBooleanValueString(true);
String literalFalse = dialect.toBooleanValueString(false);
appendSql("(case ");
thruthnessPredicate.getExpression().accept(this);
appendSql(" when ");
appendSql(thruthnessPredicate.getBooleanValue() ? literalTrue : literalFalse);
appendSql(" then ");
appendSql(thruthnessPredicate.isNegated()? literalFalse : literalTrue);
appendSql(" when ");
appendSql(thruthnessPredicate.getBooleanValue() ? literalFalse : literalTrue);
appendSql(" then ");
appendSql(thruthnessPredicate.isNegated()? literalTrue : literalFalse);
appendSql(" else ");
appendSql(thruthnessPredicate.isNegated()? literalTrue : literalFalse);
appendSql(" end = ");
appendSql(literalTrue);
appendSql(")");
}
}
@Override
public void visitRelationalPredicate(ComparisonPredicate comparisonPredicate) {
// todo (6.0) : do we want to allow multi-valued parameters in a relational predicate?
// yes means we'd have to support dynamically converting this predicate into
// an IN predicate or an OR predicate
//
// NOTE: JPA does not define support for multi-valued parameters here.
//
// If we decide to support that ^^ we should validate that *both* sides of the
// predicate are multi-valued parameters. because...
// well... its stupid :)
final SqlTuple lhsTuple;
final SqlTuple rhsTuple;
if ( ( lhsTuple = getSqlTuple( comparisonPredicate.getLeftHandExpression() ) ) != null ) {
final Expression rhsExpression = comparisonPredicate.getRightHandExpression();
final boolean all;
final SelectStatement subquery;
// Handle emulation of quantified comparison
if ( rhsExpression instanceof SelectStatement selectStatement ) {
subquery = selectStatement;
all = true;
}
else if ( rhsExpression instanceof Every every ) {
subquery = every.getSubquery();
all = true;
}
else if ( rhsExpression instanceof Any any ) {
subquery = any.getSubquery();
all = false;
}
else {
subquery = null;
all = false;
}
final ComparisonOperator operator = comparisonPredicate.getOperator();
if ( lhsTuple.getExpressions().size() == 1 ) {
// Special case for tuples with arity 1 as any DBMS supports scalar IN predicates
if ( subquery == null && (rhsTuple = getSqlTuple(
comparisonPredicate.getRightHandExpression() )) != null ) {
renderComparison(
lhsTuple.getExpressions().get( 0 ),
operator,
rhsTuple.getExpressions().get( 0 )
);
}
else {
renderComparison( lhsTuple.getExpressions().get( 0 ), operator, rhsExpression );
}
}
else if ( subquery != null && !dialect.supportsRowValueConstructorSyntaxInQuantifiedPredicates() ) {
// For quantified relational comparisons, we can do an optimized emulation
if ( !needsTupleComparisonEmulation( operator ) && all ) {
switch ( operator ) {
case LESS_THAN:
case LESS_THAN_OR_EQUAL:
case GREATER_THAN:
case GREATER_THAN_OR_EQUAL: {
emulateQuantifiedTupleSubQueryPredicate(
comparisonPredicate,
subquery,
lhsTuple,
operator
);
return;
}
case NOT_EQUAL:
case EQUAL:
case DISTINCT_FROM:
case NOT_DISTINCT_FROM: {
// For this special case, we can rely on scalar subquery handling,
// given that the subquery fetches only one row
if ( isFetchFirstRowOnly( subquery.getQueryPart() ) ) {
renderComparison( lhsTuple, operator, subquery );
return;
}
}
}
}
emulateSubQueryRelationalRestrictionPredicate(
comparisonPredicate,
all,
subquery,
lhsTuple,
this::renderSelectTupleComparison,
all ? operator.negated() : operator
);
}
else if ( needsTupleComparisonEmulation( operator ) ) {
rhsTuple = getSqlTuple( rhsExpression );
assert rhsTuple != null;
// If the DB supports tuples in the IN list predicate, use that syntax as it's more concise
if ( ( operator == ComparisonOperator.EQUAL || operator == ComparisonOperator.NOT_EQUAL )
&& dialect.supportsRowValueConstructorSyntaxInInList() ) {
comparisonPredicate.getLeftHandExpression().accept( this );
if ( operator == ComparisonOperator.NOT_EQUAL ) {
appendSql( " not" );
}
appendSql( " in (" );
rhsTuple.accept( this );
appendSql( CLOSE_PARENTHESIS );
}
else {
emulateTupleComparison(
lhsTuple.getExpressions(),
rhsTuple.getExpressions(),
operator,
true
);
}
}
else {
renderComparison( comparisonPredicate.getLeftHandExpression(), operator, rhsExpression );
}
}
else if ( ( rhsTuple = getSqlTuple( comparisonPredicate.getRightHandExpression() ) ) != null ) {
final Expression lhsExpression = comparisonPredicate.getLeftHandExpression();
if ( lhsExpression instanceof SqlTupleContainer
|| lhsExpression instanceof SelectStatement selectStatement
&& selectStatement.getQueryPart() instanceof QueryGroup ) {
if ( rhsTuple.getExpressions().size() == 1 ) {
// Special case for tuples with arity 1 as any DBMS supports scalar IN predicates
renderComparison(
lhsExpression,
comparisonPredicate.getOperator(),
rhsTuple.getExpressions().get( 0 )
);
}
else if ( !needsTupleComparisonEmulation( comparisonPredicate.getOperator() ) ) {
renderComparison(
lhsExpression,
comparisonPredicate.getOperator(),
comparisonPredicate.getRightHandExpression()
);
}
else {
emulateSubQueryRelationalRestrictionPredicate(
comparisonPredicate,
false,
(SelectStatement) lhsExpression,
rhsTuple,
this::renderSelectTupleComparison,
// Since we switch the order of operands, we have to invert the operator
comparisonPredicate.getOperator().invert()
);
}
}
else {
throw new IllegalStateException(
"Unsupported tuple comparison combination. LHS is neither a tuple nor a tuple subquery but RHS is a tuple: " + comparisonPredicate );
}
}
else {
renderComparison(
comparisonPredicate.getLeftHandExpression(),
comparisonPredicate.getOperator(),
comparisonPredicate.getRightHandExpression()
);
}
}
private boolean needsTupleComparisonEmulation(ComparisonOperator operator) {
if ( !dialect.supportsRowValueConstructorSyntax() ) {
return true;
}
return switch (operator) {
case LESS_THAN, LESS_THAN_OR_EQUAL, GREATER_THAN, GREATER_THAN_OR_EQUAL ->
!dialect.supportsRowValueConstructorGtLtSyntax();
case DISTINCT_FROM, NOT_DISTINCT_FROM ->
!dialect.supportsRowValueConstructorDistinctFromSyntax();
default -> false;
};
}
/**
* Returns a table expression that has one row.
*
* @return the SQL equivalent to Oracle's {@code dual}.
*/
protected String getDual() {
return dialect.getDual();
}
protected String getFromDualForSelectOnly() {
return dialect.getFromDualForSelectOnly();
}
protected
|
SubQueryRelationalRestrictionEmulationRenderer
|
java
|
quarkusio__quarkus
|
extensions/amazon-lambda/runtime/src/main/java/io/quarkus/amazon/lambda/runtime/handlers/JacksonUtil.java
|
{
"start": 108,
"end": 433
}
|
class ____ {
public static String getText(String name, JsonNode node) {
JsonNode e = node.get(name);
return e == null ? null : e.asText();
}
public static Long getLong(String name, JsonNode node) {
JsonNode e = node.get(name);
return e == null ? null : e.asLong();
}
}
|
JacksonUtil
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-s3/src/test/java/org/apache/camel/component/aws2/s3/integration/S3UploadInputStreamMultipartNoStreamCacheIT.java
|
{
"start": 1503,
"end": 3525
}
|
class ____ extends Aws2S3Base {
@EndpointInject("mock:result")
private MockEndpoint result;
@Test
public void sendInputStream() throws Exception {
result.expectedMessageCount(1);
Exchange out = template.send("direct:stream1", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(AWS2S3Constants.KEY, "empty-big.bin");
exchange.getIn().setBody(new FileInputStream("src/test/resources/empty-big.bin"));
}
});
Assertions.assertFalse(out.isFailed());
MockEndpoint.assertIsSatisfied(context);
Exchange ex = template.request("direct:listObjects", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(AWS2S3Constants.S3_OPERATION, AWS2S3Operations.listObjects);
}
});
List<S3Object> resp = ex.getMessage().getBody(List.class);
assertEquals(1, resp.size());
assertEquals(1 * Files.size(Paths.get("src/test/resources/empty-big.bin")),
resp.stream().mapToLong(S3Object::size).sum());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
context.setStreamCaching(false);
String awsEndpoint1
= String.format(
"aws2-s3://%s?autoCreateBucket=true&keyName=fileTest.txt&multiPartUpload=true&partSize=6000000",
name.get());
from("direct:stream1").to(awsEndpoint1).to("mock:result");
String awsEndpoint = String.format("aws2-s3://%s?autoCreateBucket=true",
name.get());
from("direct:listObjects").to(awsEndpoint);
}
};
}
}
|
S3UploadInputStreamMultipartNoStreamCacheIT
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionServiceTests.java
|
{
"start": 17221,
"end": 19714
}
|
class ____ extends NoOpClient {
private TriFunction<ActionType<?>, ActionRequest, ActionListener<?>, ActionResponse> verifier = (a, r, l) -> {
fail("verifier not set");
return null;
};
VerifyingClient(ThreadPool threadPool) {
super(threadPool);
}
@Override
@SuppressWarnings("unchecked")
protected <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
ActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
try {
listener.onResponse((Response) verifier.apply(action, request, listener));
} catch (Exception e) {
listener.onFailure(e);
}
}
public void setVerifier(TriFunction<ActionType<?>, ActionRequest, ActionListener<?>, ActionResponse> verifier) {
this.verifier = verifier;
}
}
private List<AnalyticsCollection> awaitGetAnalyticsCollections(
AnalyticsCollectionService analyticsCollectionService,
ClusterState clusterState,
String... collectionName
) throws Exception {
GetAnalyticsCollectionAction.Request request = new GetAnalyticsCollectionAction.Request(TEST_REQUEST_TIMEOUT, collectionName);
return new Executor<>(clusterState, analyticsCollectionService::getAnalyticsCollection).execute(request).getAnalyticsCollections();
}
private PutAnalyticsCollectionAction.Response awaitPutAnalyticsCollection(
AnalyticsCollectionService analyticsCollectionService,
ClusterState clusterState,
String collectionName
) throws Exception {
PutAnalyticsCollectionAction.Request request = new PutAnalyticsCollectionAction.Request(TEST_REQUEST_TIMEOUT, collectionName);
return new Executor<>(clusterState, analyticsCollectionService::putAnalyticsCollection).execute(request);
}
private AcknowledgedResponse awaitDeleteAnalyticsCollection(
AnalyticsCollectionService analyticsCollectionService,
ClusterState clusterState,
String collectionName
) throws Exception {
DeleteAnalyticsCollectionAction.Request request = new DeleteAnalyticsCollectionAction.Request(TEST_REQUEST_TIMEOUT, collectionName);
return new Executor<>(clusterState, analyticsCollectionService::deleteAnalyticsCollection).execute(request);
}
private static
|
VerifyingClient
|
java
|
apache__maven
|
compat/maven-model-builder/src/main/java/org/apache/maven/model/interpolation/StringVisitorModelInterpolator.java
|
{
"start": 3518,
"end": 3607
}
|
class ____ extends AbstractStringBasedModelInterpolator {
|
StringVisitorModelInterpolator
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/FormLoginConfigurerTests.java
|
{
"start": 30603,
"end": 30778
}
|
class ____ {
@GetMapping("/profile")
@PreAuthorize("@authz.hasAuthority('profile:read')")
String profile() {
return "profile";
}
}
public static
|
BasicMfaController
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/cache/CacheOnClassAndMethodsImplementationTest.java
|
{
"start": 2069,
"end": 2335
}
|
interface ____ {
@Path("with")
@GET
String with();
@Path("without")
@GET
String without();
}
@Cache(sMaxAge = 100, noTransform = true, proxyRevalidate = true, noCache = true)
public static
|
IResourceWithCache
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/SafeCopyProperty.java
|
{
"start": 979,
"end": 1587
}
|
interface ____ be set as key value pair on exchange object via
* {@link ExchangeExtension#setSafeCopyProperty(String, SafeCopyProperty)}.
*
* When exchange object is copied it will invoke {@link SafeCopyProperty#safeCopy()} method on properties set using
* {@link ExchangeExtension#setSafeCopyProperty(String, SafeCopyProperty)}. This allows the property value object to
* return a copy object to be set on the target exchange object instead of the original value object. This protects the
* properties from unintended mutation when using parallelProcessing in Multicast or RecipientList EIP
*/
public
|
can
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/DaggerSuperficialValidationTest.java
|
{
"start": 23111,
"end": 23200
}
|
class ____ {",
" Child getChild() { return null; }",
" static
|
Outer
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/EventHubsEndpointBuilderFactory.java
|
{
"start": 1461,
"end": 1607
}
|
interface ____ {
/**
* Builder for endpoint consumers for the Azure Event Hubs component.
*/
public
|
EventHubsEndpointBuilderFactory
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/jsontype/TestWithGenerics.java
|
{
"start": 1841,
"end": 1974
}
|
class ____<T>{
public T value;
public MyParam() { }
public MyParam(T v) { value = v; }
}
static
|
MyParam
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/support/logging/NoLoggingImpl.java
|
{
"start": 667,
"end": 2711
}
|
class ____ implements Log {
private int infoCount;
private int errorCount;
private int warnCount;
private int debugCount;
private String loggerName;
private boolean debugEnable;
private boolean infoEnable = true;
private boolean warnEnable = true;
private boolean errorEnable = true;
public NoLoggingImpl(String loggerName) {
this.loggerName = loggerName;
}
public String getLoggerName() {
return this.loggerName;
}
public boolean isDebugEnabled() {
return debugEnable;
}
public void error(String s, Throwable e) {
if (!errorEnable) {
return;
}
error(s);
if (e != null) {
e.printStackTrace();
}
}
public void error(String s) {
errorCount++;
if (s != null) {
System.err.println(loggerName + " : " + s);
}
}
public void debug(String s) {
debugCount++;
}
public void debug(String s, Throwable e) {
debugCount++;
}
public void warn(String s) {
warnCount++;
}
@Override
public void warn(String s, Throwable e) {
warnCount++;
}
public int getErrorCount() {
return errorCount;
}
@Override
public int getWarnCount() {
return warnCount;
}
@Override
public void resetStat() {
errorCount = 0;
warnCount = 0;
infoCount = 0;
debugCount = 0;
}
@Override
public boolean isInfoEnabled() {
return infoEnable;
}
@Override
public void info(String s) {
infoCount++;
}
@Override
public boolean isWarnEnabled() {
return warnEnable;
}
public int getInfoCount() {
return infoCount;
}
public int getDebugCount() {
return debugCount;
}
public boolean isErrorEnabled() {
return errorEnable;
}
public void setErrorEnabled(boolean value) {
this.errorEnable = value;
}
}
|
NoLoggingImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/beanvalidation/MergeNotNullCollectionUsingIdentityTest.java
|
{
"start": 1802,
"end": 2614
}
|
class ____ {
@Test
@FailureExpected(jiraKey = "HHH-9979")
void testOneToManyNotNullCollection(SessionFactoryScope scope) {
Parent parent = new Parent();
Child child = new Child();
List<Child> children = new ArrayList<>();
children.add( child );
child.setParent( parent );
parent.setChildren( children );
Parent p = scope.fromTransaction( s -> s.merge( parent ) );
scope.inTransaction( s -> s.remove( p ) );
}
@Test
void testOneToManyNullCollection(SessionFactoryScope scope) {
Parent parent = new Parent();
Child child = new Child();
child.setParent( parent );
assertThatThrownBy( () -> scope.fromTransaction( s -> s.merge( parent ) ) )
.isInstanceOf( ConstraintViolationException.class );
}
@Entity
@Table(name = "PARENT")
static
|
MergeNotNullCollectionUsingIdentityTest
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/web/server/DefaultServerOAuth2AuthorizationRequestResolverTests.java
|
{
"start": 2465,
"end": 13602
}
|
class ____ {
@Mock
private ReactiveClientRegistrationRepository clientRegistrationRepository;
private DefaultServerOAuth2AuthorizationRequestResolver resolver;
private ClientRegistration registration = TestClientRegistrations.clientRegistration().build();
@BeforeEach
public void setup() {
this.resolver = new DefaultServerOAuth2AuthorizationRequestResolver(this.clientRegistrationRepository);
}
@Test
public void setAuthorizationRequestCustomizerWhenNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.resolver.setAuthorizationRequestCustomizer(null));
}
@Test
public void resolveWhenNotMatchThenNull() {
assertThat(resolve("/")).isNull();
}
@Test
public void resolveWhenClientRegistrationNotFoundMatchThenBadRequest() {
given(this.clientRegistrationRepository.findByRegistrationId(any())).willReturn(Mono.empty());
assertThatExceptionOfType(ResponseStatusException.class)
.isThrownBy(() -> resolve("/oauth2/authorization/not-found-id"))
.satisfies((ex) -> assertThat(ex.getStatusCode()).isEqualTo(HttpStatus.BAD_REQUEST));
}
@Test
public void resolveWhenClientRegistrationFoundThenWorks() {
given(this.clientRegistrationRepository.findByRegistrationId(any())).willReturn(Mono.just(this.registration));
OAuth2AuthorizationRequest request = resolve("/oauth2/authorization/not-found-id");
assertThat(request.getAuthorizationRequestUri())
.matches("https://example.com/login/oauth/authorize\\?" + "response_type=code&client_id=client-id&"
+ "scope=read:user&state=.*?&" + "redirect_uri=/login/oauth2/code/registration-id"
+ "&code_challenge=([a-zA-Z0-9\\-\\.\\_\\~]){43}&code_challenge_method=S256");
}
@Test
public void resolveWhenForwardedHeadersClientRegistrationFoundThenWorks() {
given(this.clientRegistrationRepository.findByRegistrationId(any())).willReturn(Mono.just(this.registration));
// @formatter:off
MockServerHttpRequest.BaseBuilder<?> httpRequest = MockServerHttpRequest
.get("/oauth2/authorization/id")
.header("X-Forwarded-Host", "evil.com");
// @formatter:on
ServerWebExchange exchange = MockServerWebExchange.from(httpRequest);
OAuth2AuthorizationRequest request = this.resolver.resolve(exchange).block();
assertThat(request.getAuthorizationRequestUri())
.matches("https://example.com/login/oauth/authorize\\?" + "response_type=code&client_id=client-id&"
+ "scope=read:user&state=.*?&" + "redirect_uri=/login/oauth2/code/registration-id"
+ "&code_challenge=([a-zA-Z0-9\\-\\.\\_\\~]){43}&code_challenge_method=S256");
}
@Test
public void resolveWhenAuthorizationRequestWithValidPublicClientThenResolves() {
given(this.clientRegistrationRepository.findByRegistrationId(any()))
.willReturn(Mono.just(TestClientRegistrations.clientRegistration()
.clientAuthenticationMethod(ClientAuthenticationMethod.NONE)
.clientSecret(null)
.build()));
OAuth2AuthorizationRequest request = resolve("/oauth2/authorization/registration-id");
assertThat((String) request.getAttribute(PkceParameterNames.CODE_VERIFIER))
.matches("^([a-zA-Z0-9\\-\\.\\_\\~]){128}$");
assertThat(request.getAuthorizationRequestUri())
.matches("https://example.com/login/oauth/authorize\\?" + "response_type=code&client_id=client-id&"
+ "scope=read:user&state=.*?&" + "redirect_uri=/login/oauth2/code/registration-id&"
+ "code_challenge=([a-zA-Z0-9\\-\\.\\_\\~]){43}&" + "code_challenge_method=S256");
}
// gh-6548
@Test
public void resolveWhenAuthorizationRequestApplyPkceToConfidentialClientsThenApplied() {
ClientRegistration registration1 = TestClientRegistrations.clientRegistration().build();
given(this.clientRegistrationRepository.findByRegistrationId(eq(registration1.getRegistrationId())))
.willReturn(Mono.just(registration1));
ClientRegistration registration2 = TestClientRegistrations.clientRegistration2().build();
given(this.clientRegistrationRepository.findByRegistrationId(eq(registration2.getRegistrationId())))
.willReturn(Mono.just(registration2));
OAuth2AuthorizationRequest request = resolve("/oauth2/authorization/" + registration1.getRegistrationId());
assertPkceApplied(request, registration1);
request = resolve("/oauth2/authorization/" + registration2.getRegistrationId());
assertPkceApplied(request, registration2);
}
@Test
void resolveWhenRequireProofKeyTrueThenPkceEnabled() {
ClientRegistration.ClientSettings pkceEnabled = ClientRegistration.ClientSettings.builder()
.requireProofKey(true)
.build();
ClientRegistration clientWithPkceEnabled = TestClientRegistrations.clientRegistration()
.clientSettings(pkceEnabled)
.build();
given(this.clientRegistrationRepository.findByRegistrationId(any()))
.willReturn(Mono.just(clientWithPkceEnabled));
OAuth2AuthorizationRequest request = resolve(
"/oauth2/authorization/" + clientWithPkceEnabled.getRegistrationId());
assertPkceApplied(request, clientWithPkceEnabled);
}
private void assertPkceApplied(OAuth2AuthorizationRequest authorizationRequest,
ClientRegistration clientRegistration) {
assertThat(authorizationRequest.getAdditionalParameters()).containsKey(PkceParameterNames.CODE_CHALLENGE);
assertThat(authorizationRequest.getAdditionalParameters())
.contains(entry(PkceParameterNames.CODE_CHALLENGE_METHOD, "S256"));
assertThat(authorizationRequest.getAttributes()).containsKey(PkceParameterNames.CODE_VERIFIER);
assertThat((String) authorizationRequest.getAttribute(PkceParameterNames.CODE_VERIFIER))
.matches("^([a-zA-Z0-9\\-\\.\\_\\~]){128}$");
assertThat(authorizationRequest.getAuthorizationRequestUri())
.matches("https://example.com/login/oauth/authorize\\?" + "response_type=code&" + "client_id="
+ clientRegistration.getClientId() + "&" + "scope=read:user&" + "state=.{15,}&"
+ "redirect_uri=/login/oauth2/code/" + clientRegistration.getRegistrationId() + "&"
+ "code_challenge=([a-zA-Z0-9\\-\\.\\_\\~]){43}&" + "code_challenge_method=S256");
}
private void assertPkceNotApplied(OAuth2AuthorizationRequest authorizationRequest,
ClientRegistration clientRegistration) {
assertThat(authorizationRequest.getAdditionalParameters()).doesNotContainKey(PkceParameterNames.CODE_CHALLENGE);
assertThat(authorizationRequest.getAdditionalParameters())
.doesNotContainKey(PkceParameterNames.CODE_CHALLENGE_METHOD);
assertThat(authorizationRequest.getAttributes()).doesNotContainKey(PkceParameterNames.CODE_VERIFIER);
assertThat(authorizationRequest.getAuthorizationRequestUri())
.matches("https://example.com/login/oauth/authorize\\?" + "response_type=code&" + "client_id="
+ clientRegistration.getClientId() + "&" + "scope=read:user&" + "state=.{15,}&"
+ "redirect_uri=/login/oauth2/code/" + clientRegistration.getRegistrationId());
}
@Test
public void resolveWhenAuthenticationRequestWithValidOidcClientThenResolves() {
given(this.clientRegistrationRepository.findByRegistrationId(any()))
.willReturn(Mono.just(TestClientRegistrations.clientRegistration().scope(OidcScopes.OPENID).build()));
OAuth2AuthorizationRequest request = resolve("/oauth2/authorization/registration-id");
assertThat((String) request.getAttribute(OidcParameterNames.NONCE)).matches("^([a-zA-Z0-9\\-\\.\\_\\~]){128}$");
assertThat(request.getAuthorizationRequestUri()).matches("https://example.com/login/oauth/authorize\\?"
+ "response_type=code&client_id=client-id&" + "scope=openid&state=.*?&"
+ "redirect_uri=/login/oauth2/code/registration-id&" + "nonce=([a-zA-Z0-9\\-\\.\\_\\~]){43}&"
+ "code_challenge=([a-zA-Z0-9\\-\\.\\_\\~]){43}&" + "code_challenge_method=S256");
}
// gh-7696
@Test
public void resolveWhenAuthorizationRequestCustomizerRemovesNonceThenQueryExcludesNonce() {
given(this.clientRegistrationRepository.findByRegistrationId(any()))
.willReturn(Mono.just(TestClientRegistrations.clientRegistration().scope(OidcScopes.OPENID).build()));
this.resolver.setAuthorizationRequestCustomizer(
(builder) -> builder.additionalParameters((params) -> params.remove(OidcParameterNames.NONCE))
.attributes((attrs) -> attrs.remove(OidcParameterNames.NONCE)));
OAuth2AuthorizationRequest authorizationRequest = resolve("/oauth2/authorization/registration-id");
assertThat(authorizationRequest.getAdditionalParameters()).doesNotContainKey(OidcParameterNames.NONCE);
assertThat(authorizationRequest.getAttributes()).doesNotContainKey(OidcParameterNames.NONCE);
assertThat(authorizationRequest.getAttributes()).containsKey(OAuth2ParameterNames.REGISTRATION_ID);
assertThat(authorizationRequest.getAuthorizationRequestUri())
.matches("https://example.com/login/oauth/authorize\\?" + "response_type=code&client_id=client-id&"
+ "scope=openid&state=.{15,}&" + "redirect_uri=/login/oauth2/code/registration-id&"
+ "code_challenge=([a-zA-Z0-9\\-\\.\\_\\~]){43}&" + "code_challenge_method=S256");
}
@Test
public void resolveWhenAuthorizationRequestCustomizerAddsParameterThenQueryIncludesParameter() {
given(this.clientRegistrationRepository.findByRegistrationId(any()))
.willReturn(Mono.just(TestClientRegistrations.clientRegistration().scope(OidcScopes.OPENID).build()));
this.resolver.setAuthorizationRequestCustomizer((builder) -> builder.authorizationRequestUri((uriBuilder) -> {
uriBuilder.queryParam("param1", "value1");
return uriBuilder.build();
}));
OAuth2AuthorizationRequest authorizationRequest = resolve("/oauth2/authorization/registration-id");
assertThat(authorizationRequest.getAuthorizationRequestUri())
.matches("https://example.com/login/oauth/authorize\\?" + "response_type=code&client_id=client-id&"
+ "scope=openid&state=.{15,}&" + "redirect_uri=/login/oauth2/code/registration-id&"
+ "nonce=([a-zA-Z0-9\\-\\.\\_\\~]){43}&" + "code_challenge=([a-zA-Z0-9\\-\\.\\_\\~]){43}&"
+ "code_challenge_method=S256&" + "param1=value1");
}
@Test
public void resolveWhenAuthorizationRequestCustomizerOverridesParameterThenQueryIncludesParameter() {
given(this.clientRegistrationRepository.findByRegistrationId(any()))
.willReturn(Mono.just(TestClientRegistrations.clientRegistration().scope(OidcScopes.OPENID).build()));
this.resolver.setAuthorizationRequestCustomizer((builder) -> builder.parameters((params) -> {
params.put("appid", params.get("client_id"));
params.remove("client_id");
}));
OAuth2AuthorizationRequest authorizationRequest = resolve("/oauth2/authorization/registration-id");
assertThat(authorizationRequest.getAuthorizationRequestUri())
.matches("https://example.com/login/oauth/authorize\\?" + "response_type=code&"
+ "scope=openid&state=.{15,}&" + "redirect_uri=/login/oauth2/code/registration-id&"
+ "nonce=([a-zA-Z0-9\\-\\.\\_\\~]){43}&" + "code_challenge=([a-zA-Z0-9\\-\\.\\_\\~]){43}&"
+ "code_challenge_method=S256&" + "appid=client-id");
}
private OAuth2AuthorizationRequest resolve(String path) {
ServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get(path));
return this.resolver.resolve(exchange).block();
}
}
|
DefaultServerOAuth2AuthorizationRequestResolverTests
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/schema/DubboNamespaceHandlerTest.java
|
{
"start": 3237,
"end": 13763
}
|
class ____ {}
@Test
void testProviderXmlOnConfigurationClass() {
AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext();
applicationContext.register(XmlConfiguration.class);
applicationContext.refresh();
testProviderXml(applicationContext);
}
@Test
void testProviderXml() {
ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext(
resourcePath + "/demo-provider.xml", resourcePath + "/demo-provider-properties.xml");
ctx.start();
testProviderXml(ctx);
}
private void testProviderXml(ApplicationContext context) {
String appName = "demo-provider";
String configId = ApplicationConfig.class.getName() + "#" + appName + "#0";
Map<String, ApplicationConfig> applicationConfigMap = context.getBeansOfType(ApplicationConfig.class);
ApplicationConfig providerAppConfig = context.getBean(configId, ApplicationConfig.class);
assertNotNull(providerAppConfig);
assertEquals(appName, providerAppConfig.getName());
// assertEquals(configId, providerAppConfig.getId());
ProtocolConfig protocolConfig = context.getBean(ProtocolConfig.class);
assertThat(protocolConfig, not(nullValue()));
assertThat(protocolConfig.getName(), is("dubbo"));
assertThat(protocolConfig.getPort(), is(20813));
ApplicationConfig applicationConfig = context.getBean(ApplicationConfig.class);
assertThat(applicationConfig, not(nullValue()));
assertThat(applicationConfig.getName(), is("demo-provider"));
RegistryConfig registryConfig = context.getBean(RegistryConfig.class);
assertThat(registryConfig, not(nullValue()));
assertThat(registryConfig.getAddress(), is("N/A"));
DemoService service = context.getBean(DemoService.class);
assertThat(service, not(nullValue()));
}
@Test
void testMultiProtocol() {
ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext(resourcePath + "/multi-protocol.xml");
ctx.start();
Map<String, ProtocolConfig> protocolConfigMap = ctx.getBeansOfType(ProtocolConfig.class);
assertThat(protocolConfigMap.size(), is(2));
ConfigManager configManager = ApplicationModel.defaultModel().getApplicationConfigManager();
Collection<ProtocolConfig> protocolConfigs = configManager.getProtocols();
assertThat(protocolConfigs.size(), is(2));
ProtocolConfig rmiProtocolConfig = configManager.getProtocol("rmi").get();
assertThat(rmiProtocolConfig.getPort(), is(10991));
ProtocolConfig dubboProtocolConfig = configManager.getProtocol("dubbo").get();
assertThat(dubboProtocolConfig.getPort(), is(20881));
}
@Test
void testDefaultProtocol() {
ClassPathXmlApplicationContext ctx =
new ClassPathXmlApplicationContext(resourcePath + "/override-protocol.xml");
ctx.start();
ProtocolConfig protocolConfig = ctx.getBean(ProtocolConfig.class);
protocolConfig.refresh();
assertThat(protocolConfig.getName(), is("dubbo"));
}
@Test
void testCustomParameter() {
ClassPathXmlApplicationContext ctx =
new ClassPathXmlApplicationContext(resourcePath + "/customize-parameter.xml");
ctx.start();
ProtocolConfig protocolConfig = ctx.getBean(ProtocolConfig.class);
assertThat(protocolConfig.getParameters().size(), is(1));
assertThat(protocolConfig.getParameters().get("protocol-paramA"), is("protocol-paramA"));
ServiceBean serviceBean = ctx.getBean(ServiceBean.class);
assertThat(serviceBean.getParameters().size(), is(1));
assertThat(serviceBean.getParameters().get("service-paramA"), is("service-paramA"));
}
@Test
void testDelayFixedTime() {
ClassPathXmlApplicationContext ctx =
new ClassPathXmlApplicationContext("classpath:/" + resourcePath + "/delay-fixed-time.xml");
ctx.start();
assertThat(ctx.getBean(ServiceBean.class).getDelay(), is(300));
}
@Test
void testTimeoutConfig() {
ClassPathXmlApplicationContext ctx =
new ClassPathXmlApplicationContext(resourcePath + "/provider-nested-service.xml");
ctx.start();
ModuleConfigManager configManager =
ApplicationModel.defaultModel().getDefaultModule().getConfigManager();
Collection<ProviderConfig> providerConfigs = configManager.getProviders();
Assertions.assertEquals(2, providerConfigs.size());
ProviderConfig defaultProvider = configManager.getDefaultProvider().get();
assertThat(defaultProvider.getTimeout(), is(2000));
ProviderConfig provider2 = configManager.getProvider("provider2").get();
ServiceConfigBase<Object> serviceConfig2 = configManager.getService("serviceConfig2");
Assertions.assertEquals(1000, provider2.getTimeout());
Assertions.assertEquals(provider2.getTimeout(), serviceConfig2.getTimeout());
}
@Test
void testMonitor() {
ClassPathXmlApplicationContext ctx =
new ClassPathXmlApplicationContext(resourcePath + "/provider-with-monitor.xml");
ctx.start();
assertThat(ctx.getBean(MonitorConfig.class), not(nullValue()));
}
// @Test
// public void testMultiMonitor() {
// Assertions.assertThrows(BeanCreationException.class, () -> {
// ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext(resourcePath +
// "/multi-monitor.xml");
// ctx.start();
// });
// }
//
// @Test
// public void testMultiProviderConfig() {
// Assertions.assertThrows(BeanCreationException.class, () -> {
// ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext(resourcePath +
// "/provider-multi.xml");
// ctx.start();
// });
// }
@Test
void testModuleInfo() {
ClassPathXmlApplicationContext ctx =
new ClassPathXmlApplicationContext(resourcePath + "/provider-with-module.xml");
ctx.start();
ModuleConfig moduleConfig = ctx.getBean(ModuleConfig.class);
assertThat(moduleConfig.getName(), is("test-module"));
}
@Test
void testNotificationWithWrongBean() {
Assertions.assertThrows(BeanCreationException.class, () -> {
ClassPathXmlApplicationContext ctx =
new ClassPathXmlApplicationContext(resourcePath + "/consumer-notification.xml");
ctx.start();
});
}
@Test
void testProperty() {
ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext(resourcePath + "/service-class.xml");
ctx.start();
ServiceBean serviceBean = ctx.getBean(ServiceBean.class);
String prefix = ((DemoServiceImpl) serviceBean.getRef()).getPrefix();
assertThat(prefix, is("welcome:"));
}
@Test
void testMetricsAggregation() {
ClassPathXmlApplicationContext ctx =
new ClassPathXmlApplicationContext(resourcePath + "/metrics-aggregation.xml");
ctx.start();
ConfigManager configManager = ApplicationModel.defaultModel().getApplicationConfigManager();
MetricsConfig metricsBean = ctx.getBean(MetricsConfig.class);
MetricsConfig metrics = configManager.getMetrics().get();
assertTrue(metrics.getEnableJvm());
assertEquals(metrics.getAggregation().getEnabled(), true);
assertEquals(metrics.getAggregation().getBucketNum(), 5);
assertEquals(metrics.getAggregation().getTimeWindowSeconds(), 120);
assertEquals(
metrics.getAggregation().getEnabled(),
metricsBean.getAggregation().getEnabled());
assertEquals(
metrics.getAggregation().getBucketNum(),
metricsBean.getAggregation().getBucketNum());
assertEquals(
metrics.getAggregation().getTimeWindowSeconds(),
metricsBean.getAggregation().getTimeWindowSeconds());
}
@Test
void testMetricsPrometheus() {
ClassPathXmlApplicationContext ctx =
new ClassPathXmlApplicationContext(resourcePath + "/metrics-prometheus.xml");
ctx.start();
ConfigManager configManager = ApplicationModel.defaultModel().getApplicationConfigManager();
MetricsConfig metricsBean = ctx.getBean(MetricsConfig.class);
MetricsConfig metrics = configManager.getMetrics().get();
assertEquals(metrics.getProtocol(), PROTOCOL_PROMETHEUS);
assertEquals(metrics.getPrometheus().getExporter().getEnabled(), true);
assertEquals(metrics.getPrometheus().getExporter().getEnableHttpServiceDiscovery(), true);
assertEquals(metrics.getPrometheus().getExporter().getHttpServiceDiscoveryUrl(), "localhost:8080");
assertEquals(metrics.getPrometheus().getPushgateway().getEnabled(), true);
assertEquals(metrics.getPrometheus().getPushgateway().getBaseUrl(), "localhost:9091");
assertEquals(metrics.getPrometheus().getPushgateway().getPushInterval(), 30);
assertEquals(metrics.getPrometheus().getPushgateway().getUsername(), "username");
assertEquals(metrics.getPrometheus().getPushgateway().getPassword(), "password");
assertEquals(metrics.getPrometheus().getPushgateway().getJob(), "job");
assertEquals(metricsBean.getProtocol(), PROTOCOL_PROMETHEUS);
assertEquals(metricsBean.getPrometheus().getExporter().getEnabled(), true);
assertEquals(metricsBean.getPrometheus().getExporter().getEnableHttpServiceDiscovery(), true);
assertEquals(metricsBean.getPrometheus().getExporter().getHttpServiceDiscoveryUrl(), "localhost:8080");
assertEquals(metricsBean.getPrometheus().getPushgateway().getEnabled(), true);
assertEquals(metricsBean.getPrometheus().getPushgateway().getBaseUrl(), "localhost:9091");
assertEquals(metricsBean.getPrometheus().getPushgateway().getPushInterval(), 30);
assertEquals(metricsBean.getPrometheus().getPushgateway().getUsername(), "username");
assertEquals(metricsBean.getPrometheus().getPushgateway().getPassword(), "password");
assertEquals(metricsBean.getPrometheus().getPushgateway().getJob(), "job");
}
}
|
XmlConfiguration
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/clients/producer/internals/KafkaProducerMetricsTest.java
|
{
"start": 1060,
"end": 4160
}
|
class ____ {
private static final long METRIC_VALUE = 123L;
private static final String FLUSH_TIME_TOTAL = "flush-time-ns-total";
private static final String TXN_INIT_TIME_TOTAL = "txn-init-time-ns-total";
private static final String TXN_BEGIN_TIME_TOTAL = "txn-begin-time-ns-total";
private static final String TXN_COMMIT_TIME_TOTAL = "txn-commit-time-ns-total";
private static final String TXN_ABORT_TIME_TOTAL = "txn-abort-time-ns-total";
private static final String TXN_SEND_OFFSETS_TIME_TOTAL = "txn-send-offsets-time-ns-total";
private static final String METADATA_WAIT_TIME_TOTAL = "metadata-wait-time-ns-total";
private final Metrics metrics = new Metrics();
private final KafkaProducerMetrics producerMetrics = new KafkaProducerMetrics(metrics);
@Test
public void shouldRecordFlushTime() {
// When:
producerMetrics.recordFlush(METRIC_VALUE);
// Then:
assertMetricValue(FLUSH_TIME_TOTAL);
}
@Test
public void shouldRecordInitTime() {
// When:
producerMetrics.recordInit(METRIC_VALUE);
// Then:
assertMetricValue(TXN_INIT_TIME_TOTAL);
}
@Test
public void shouldRecordTxBeginTime() {
// When:
producerMetrics.recordBeginTxn(METRIC_VALUE);
// Then:
assertMetricValue(TXN_BEGIN_TIME_TOTAL);
}
@Test
public void shouldRecordTxCommitTime() {
// When:
producerMetrics.recordCommitTxn(METRIC_VALUE);
// Then:
assertMetricValue(TXN_COMMIT_TIME_TOTAL);
}
@Test
public void shouldRecordTxAbortTime() {
// When:
producerMetrics.recordAbortTxn(METRIC_VALUE);
// Then:
assertMetricValue(TXN_ABORT_TIME_TOTAL);
}
@Test
public void shouldRecordSendOffsetsTime() {
// When:
producerMetrics.recordSendOffsets(METRIC_VALUE);
// Then:
assertMetricValue(TXN_SEND_OFFSETS_TIME_TOTAL);
}
@Test
public void shouldRecordMetadataWaitTime() {
// When:
producerMetrics.recordMetadataWait(METRIC_VALUE);
// Then:
assertMetricValue(METADATA_WAIT_TIME_TOTAL);
}
@Test
public void shouldRemoveMetricsOnClose() {
// When:
producerMetrics.close();
// Then:
assertMetricRemoved(FLUSH_TIME_TOTAL);
assertMetricRemoved(TXN_INIT_TIME_TOTAL);
assertMetricRemoved(TXN_BEGIN_TIME_TOTAL);
assertMetricRemoved(TXN_COMMIT_TIME_TOTAL);
assertMetricRemoved(TXN_ABORT_TIME_TOTAL);
assertMetricRemoved(TXN_SEND_OFFSETS_TIME_TOTAL);
assertMetricRemoved(METADATA_WAIT_TIME_TOTAL);
}
private void assertMetricRemoved(final String name) {
assertNull(metrics.metric(metrics.metricName(name, KafkaProducerMetrics.GROUP)));
}
private void assertMetricValue(final String name) {
assertEquals(
(double) METRIC_VALUE,
metrics.metric(metrics.metricName(name, KafkaProducerMetrics.GROUP)).metricValue()
);
}
}
|
KafkaProducerMetricsTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/configuration/Spr10744Tests.java
|
{
"start": 2778,
"end": 2977
}
|
class ____ extends MyConfiguration {
@Bean
@Scope(value = "myTestScope", proxyMode = ScopedProxyMode.TARGET_CLASS)
@Override
public Foo foo() {
return new Foo();
}
}
}
|
MyTestConfiguration
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/client/StatusHandler.java
|
{
"start": 6384,
"end": 6485
}
|
interface ____ {
boolean test(ClientHttpResponse response) throws IOException;
}
}
|
ResponsePredicate
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/CancelDelegationTokenRequest.java
|
{
"start": 1234,
"end": 1821
}
|
class ____ {
@Private
@Unstable
public static CancelDelegationTokenRequest newInstance(Token dToken) {
CancelDelegationTokenRequest request =
Records.newRecord(CancelDelegationTokenRequest.class);
request.setDelegationToken(dToken);
return request;
}
/**
* Get the delegation token requested to be cancelled.
* @return the delegation token requested to be cancelled.
*/
@Private
@Unstable
public abstract Token getDelegationToken();
@Private
@Unstable
public abstract void setDelegationToken(Token dToken);
}
|
CancelDelegationTokenRequest
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/state/TestTaskStateManagerBuilder.java
|
{
"start": 1710,
"end": 4818
}
|
class ____ {
private JobID jobID = new JobID();
private ExecutionAttemptID executionAttemptID = createExecutionAttemptId();
private CheckpointResponder checkpointResponder = new TestCheckpointResponder();
private LocalRecoveryConfig localRecoveryConfig = TestLocalRecoveryConfig.disabled();
@Nullable
private StateChangelogStorage<?> stateChangelogStorage = new InMemoryStateChangelogStorage();
private final Map<Long, TaskStateSnapshot> jobManagerTaskStateSnapshotsByCheckpointId =
new HashMap<>();
private long reportedCheckpointId = -1L;
private OneShotLatch waitForReportLatch = new OneShotLatch();
public TestTaskStateManagerBuilder setJobID(JobID jobID) {
this.jobID = checkNotNull(jobID);
return this;
}
public TestTaskStateManagerBuilder setExecutionAttemptID(
ExecutionAttemptID executionAttemptID) {
this.executionAttemptID = checkNotNull(executionAttemptID);
return this;
}
public TestTaskStateManagerBuilder setCheckpointResponder(
CheckpointResponder checkpointResponder) {
this.checkpointResponder = checkNotNull(checkpointResponder);
return this;
}
public TestTaskStateManagerBuilder setLocalRecoveryConfig(
LocalRecoveryConfig localRecoveryConfig) {
this.localRecoveryConfig = checkNotNull(localRecoveryConfig);
return this;
}
public TestTaskStateManagerBuilder setStateChangelogStorage(
StateChangelogStorage<?> stateChangelogStorage) {
Preconditions.checkState(
this.stateChangelogStorage == null
|| this.stateChangelogStorage instanceof InMemoryStateChangelogStorage,
"StateChangelogStorage was already initialized to " + this.stateChangelogStorage);
this.stateChangelogStorage = stateChangelogStorage;
return this;
}
public TestTaskStateManagerBuilder setJobManagerTaskStateSnapshotsByCheckpointId(
Map<Long, TaskStateSnapshot> jobManagerTaskStateSnapshotsByCheckpointId) {
this.jobManagerTaskStateSnapshotsByCheckpointId.clear();
this.jobManagerTaskStateSnapshotsByCheckpointId.putAll(
jobManagerTaskStateSnapshotsByCheckpointId);
return this;
}
public TestTaskStateManagerBuilder setReportedCheckpointId(long reportedCheckpointId) {
this.reportedCheckpointId = reportedCheckpointId;
return this;
}
public TestTaskStateManagerBuilder setWaitForReportLatch(OneShotLatch waitForReportLatch) {
this.waitForReportLatch = checkNotNull(waitForReportLatch);
return this;
}
public TestTaskStateManager build() {
return new TestTaskStateManager(
jobID,
executionAttemptID,
checkpointResponder,
localRecoveryConfig,
stateChangelogStorage,
jobManagerTaskStateSnapshotsByCheckpointId,
reportedCheckpointId,
waitForReportLatch);
}
}
|
TestTaskStateManagerBuilder
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
|
{
"start": 2186,
"end": 4301
}
|
enum ____.
*
* @param columnFamily
* that this column is stored in.
* @param columnPrefix
* for this column.
*/
private FlowActivityColumnPrefix(
ColumnFamily<FlowActivityTable> columnFamily, String columnPrefix,
AggregationOperation aggOp) {
this(columnFamily, columnPrefix, aggOp, false);
}
private FlowActivityColumnPrefix(
ColumnFamily<FlowActivityTable> columnFamily, String columnPrefix,
AggregationOperation aggOp, boolean compoundColQual) {
this.valueConverter = GenericConverter.getInstance();
this.columnFamily = columnFamily;
this.columnPrefix = columnPrefix;
if (columnPrefix == null) {
this.columnPrefixBytes = null;
} else {
// Future-proof by ensuring the right column prefix hygiene.
this.columnPrefixBytes = Bytes.toBytes(Separator.SPACE
.encode(columnPrefix));
}
this.aggOp = aggOp;
}
/**
* @return the column name value
*/
public String getColumnPrefix() {
return columnPrefix;
}
@Override
public byte[] getColumnPrefixBytes(byte[] qualifierPrefix) {
return ColumnHelper.getColumnQualifier(
this.columnPrefixBytes, qualifierPrefix);
}
@Override
public byte[] getColumnPrefixBytes(String qualifierPrefix) {
return ColumnHelper.getColumnQualifier(
this.columnPrefixBytes, qualifierPrefix);
}
public byte[] getColumnPrefixBytes() {
return columnPrefixBytes.clone();
}
@Override
public byte[] getColumnFamilyBytes() {
return columnFamily.getBytes();
}
@Override
public byte[] getColumnPrefixInBytes() {
return columnPrefixBytes != null ? columnPrefixBytes.clone() : null;
}
@Override
public ValueConverter getValueConverter() {
return valueConverter;
}
@Override
public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) {
return HBaseTimelineSchemaUtils.combineAttributes(attributes, aggOp);
}
@Override
public boolean supplementCellTimeStamp() {
return false;
}
public AggregationOperation getAttribute() {
return aggOp;
}
}
|
definition
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/PreconditionsCheckNotNullRepeatedTest.java
|
{
"start": 1683,
"end": 2175
}
|
class ____ {
public void error() {
Object someObject = new Object();
Preconditions.checkNotNull(someObject, someObject);
checkNotNull(someObject, someObject);
}
}
""")
.addOutputLines(
"out/Test.java",
"""
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.base.Preconditions;
public
|
Test
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/AbstractIterator.java
|
{
"start": 2490,
"end": 2706
}
|
class ____<T extends @Nullable Object> extends UnmodifiableIterator<T> {
private State state = State.NOT_READY;
/** Constructor for use by subclasses. */
protected AbstractIterator() {}
private
|
AbstractIterator
|
java
|
apache__flink
|
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/dql/SqlShowCurrentCatalog.java
|
{
"start": 1251,
"end": 1841
}
|
class ____ extends SqlCall {
public static final SqlSpecialOperator OPERATOR =
new SqlSpecialOperator("SHOW CURRENT CATALOG", SqlKind.OTHER);
public SqlShowCurrentCatalog(SqlParserPos pos) {
super(pos);
}
@Override
public SqlOperator getOperator() {
return OPERATOR;
}
@Override
public List<SqlNode> getOperandList() {
return Collections.emptyList();
}
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("SHOW CURRENT CATALOG");
}
}
|
SqlShowCurrentCatalog
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/DuplicateBindingsValidationTest.java
|
{
"start": 4358,
"end": 4654
}
|
class ____ {",
" @Provides String provideString() { return \"\"; }",
" @Provides A provideA2(String s) { return new A() {}; }",
" }",
"",
" @Module(includes = { Module1.class, Module2.class})",
" abstract static
|
Module2
|
java
|
elastic__elasticsearch
|
modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java
|
{
"start": 1173,
"end": 2861
}
|
class ____ extends AbstractBulkByQueryRestHandler<UpdateByQueryRequest, UpdateByQueryAction> {
private final Predicate<NodeFeature> clusterSupportsFeature;
public RestUpdateByQueryAction(Predicate<NodeFeature> clusterSupportsFeature) {
super(UpdateByQueryAction.INSTANCE);
this.clusterSupportsFeature = clusterSupportsFeature;
}
@Override
public List<Route> routes() {
return List.of(new Route(POST, "/{index}/_update_by_query"));
}
@Override
public String getName() {
return "update_by_query_action";
}
@Override
public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
return doPrepareRequest(request, client, false, true);
}
@Override
protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOException {
/*
* Passing the search request through UpdateByQueryRequest first allows
* it to set its own defaults which differ from SearchRequest's
* defaults. Then the parse can override them.
*/
UpdateByQueryRequest internal = new UpdateByQueryRequest();
Map<String, Consumer<Object>> consumers = new HashMap<>();
consumers.put("conflicts", o -> internal.setConflicts((String) o));
consumers.put("script", o -> internal.setScript(Script.parse(o)));
consumers.put("max_docs", s -> setMaxDocsValidateIdentical(internal, ((Number) s).intValue()));
parseInternalRequest(internal, request, clusterSupportsFeature, consumers);
internal.setPipeline(request.param("pipeline"));
return internal;
}
}
|
RestUpdateByQueryAction
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/ShardOperationFailedExceptionTests.java
|
{
"start": 2520,
"end": 3011
}
|
class ____ extends ShardOperationFailedException {
Failure(@Nullable String index, int shardId, String reason, RestStatus status, Throwable cause) {
super(index, shardId, reason, status, cause);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return null;
}
}
}
|
Failure
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/NonFinalStaticFieldTest.java
|
{
"start": 2153,
"end": 2482
}
|
class ____ {
// BUG: Diagnostic contains:
public static String FOO = "";
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void negative() {
compilationTestHelper
.addSourceLines(
"Test.java",
"""
public
|
Test
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java
|
{
"start": 2933,
"end": 11980
}
|
class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(DiskBalancerCluster.class);
private static final ObjectReader READER =
new ObjectMapper().readerFor(DiskBalancerCluster.class);
private final Set<String> exclusionList;
private final Set<String> inclusionList;
private ClusterConnector clusterConnector;
private List<DiskBalancerDataNode> nodes;
private String outputpath;
@JsonIgnore
private List<DiskBalancerDataNode> nodesToProcess;
@JsonIgnore
private final Map<String, DiskBalancerDataNode> ipList;
@JsonIgnore
private final Map<String, DiskBalancerDataNode> hostNames;
@JsonIgnore
private final Map<String, DiskBalancerDataNode> hostUUID;
private float threshold;
/**
* Empty Constructor needed by Jackson.
*/
public DiskBalancerCluster() {
nodes = new LinkedList<>();
exclusionList = new TreeSet<>();
inclusionList = new TreeSet<>();
ipList = new HashMap<>();
hostNames = new HashMap<>();
hostUUID = new HashMap<>();
}
/**
* Constructs a DiskBalancerCluster.
*
* @param connector - ClusterConnector
* @throws IOException
*/
public DiskBalancerCluster(ClusterConnector connector) throws IOException {
this();
Preconditions.checkNotNull(connector);
clusterConnector = connector;
}
/**
* Parses a Json string and converts to DiskBalancerCluster.
*
* @param json - Json String
* @return DiskBalancerCluster
* @throws IOException
*/
public static DiskBalancerCluster parseJson(String json) throws IOException {
return READER.readValue(json);
}
/**
* readClusterInfo connects to the cluster and reads the node's data. This
* data is used as basis of rest of computation in DiskBalancerCluster
*/
public void readClusterInfo() throws Exception {
Preconditions.checkNotNull(clusterConnector);
LOG.debug("Using connector : {}" , clusterConnector.getConnectorInfo());
nodes = clusterConnector.getNodes();
for(DiskBalancerDataNode node : nodes) {
if(node.getDataNodeIP()!= null && !node.getDataNodeIP().isEmpty()) {
ipList.put(node.getDataNodeIP(), node);
}
if(node.getDataNodeName() != null && !node.getDataNodeName().isEmpty()) {
// TODO : should we support Internationalized Domain Names ?
// Disk balancer assumes that host names are ascii. If not
// end user can always balance the node via IP address or DataNode UUID.
hostNames.put(node.getDataNodeName().toLowerCase(Locale.US), node);
}
if(node.getDataNodeUUID() != null && !node.getDataNodeUUID().isEmpty()) {
hostUUID.put(node.getDataNodeUUID(), node);
}
}
}
/**
* Gets all DataNodes in the Cluster.
*
* @return Array of DisKBalancerDataNodes
*/
public List<DiskBalancerDataNode> getNodes() {
return nodes;
}
/**
* Sets the list of nodes of this cluster.
*
* @param clusterNodes List of Nodes
*/
public void setNodes(List<DiskBalancerDataNode> clusterNodes) {
this.nodes = clusterNodes;
}
/**
* Returns the current ExclusionList.
*
* @return List of Nodes that are excluded from diskBalancer right now.
*/
public Set<String> getExclusionList() {
return exclusionList;
}
/**
* sets the list of nodes to exclude from process of diskBalancer.
*
* @param excludedNodes - exclusionList of nodes.
*/
public void setExclusionList(Set<String> excludedNodes) {
this.exclusionList.addAll(excludedNodes);
}
/**
* Returns the threshold value. This is used for indicating how much skew is
* acceptable, This is expressed as a percentage. For example to say 20% skew
* between volumes is acceptable set this value to 20.
*
* @return float
*/
public float getThreshold() {
return threshold;
}
/**
* Sets the threshold value.
*
* @param thresholdPercent - float - in percentage
*/
public void setThreshold(float thresholdPercent) {
Preconditions.checkState((thresholdPercent >= 0.0f) &&
(thresholdPercent <= 100.0f), "A percentage value expected.");
this.threshold = thresholdPercent;
}
/**
* Gets the Inclusion list.
*
* @return List of machine to be processed by diskBalancer.
*/
public Set<String> getInclusionList() {
return inclusionList;
}
/**
* Sets the inclusionList.
*
* @param includeNodes - set of machines to be processed by diskBalancer.
*/
public void setInclusionList(Set<String> includeNodes) {
this.inclusionList.addAll(includeNodes);
}
/**
* returns a serialized json string.
*
* @return String - json
* @throws IOException
*/
public String toJson() throws IOException {
return JsonUtil.toJsonString(this);
}
/**
* Returns the Nodes to Process which is the real list of nodes processed by
* diskBalancer.
*
* @return List of DiskBalancerDataNodes
*/
@JsonIgnore
public List<DiskBalancerDataNode> getNodesToProcess() {
return nodesToProcess;
}
/**
* Sets the nodes to process.
*
* @param dnNodesToProcess - List of DataNodes to process
*/
@JsonIgnore
public void setNodesToProcess(List<DiskBalancerDataNode> dnNodesToProcess) {
this.nodesToProcess = dnNodesToProcess;
}
/**
* Returns th output path for this cluster.
*/
public String getOutput() {
return outputpath;
}
/**
* Sets the output path for this run.
*
* @param output - Path
*/
public void setOutput(String output) {
this.outputpath = output;
}
/**
* Writes a snapshot of the cluster to the specified directory.
*
* @param snapShotName - name of the snapshot
*/
public void createSnapshot(String snapShotName) throws IOException {
String json = this.toJson();
File outFile = new File(getOutput() + "/" + snapShotName);
FileUtils.writeStringToFile(outFile, json, StandardCharsets.UTF_8);
}
/**
* Compute plan takes a node and constructs a planner that creates a plan that
* we would like to follow.
* <p>
* This function creates a thread pool and executes a planner on each node
* that we are supposed to plan for. Each of these planners return a NodePlan
* that we can persist or schedule for execution with a diskBalancer
* Executor.
*
* @param thresholdPercent - in percentage
* @return list of NodePlans
*/
public List<NodePlan> computePlan(double thresholdPercent) {
List<NodePlan> planList = new LinkedList<>();
if (nodesToProcess == null) {
LOG.warn("Nodes to process is null. No nodes processed.");
return planList;
}
int poolSize = computePoolSize(nodesToProcess.size());
ExecutorService executorService = Executors.newFixedThreadPool(poolSize);
List<Future<NodePlan>> futureList = new LinkedList<>();
for (int x = 0; x < nodesToProcess.size(); x++) {
final DiskBalancerDataNode node = nodesToProcess.get(x);
final Planner planner = PlannerFactory
.getPlanner(PlannerFactory.GREEDY_PLANNER, node,
thresholdPercent);
futureList.add(executorService.submit(new Callable<NodePlan>() {
@Override
public NodePlan call() throws Exception {
assert planner != null;
return planner.plan(node);
}
}));
}
for (Future<NodePlan> f : futureList) {
try {
planList.add(f.get());
} catch (InterruptedException e) {
LOG.error("Compute Node plan was cancelled or interrupted : ", e);
} catch (ExecutionException e) {
LOG.error("Unable to compute plan : ", e);
}
}
return planList;
}
/**
* Return the number of threads we should launch for this cluster.
* <p/>
* Here is the heuristic we are using.
* <p/>
* 1 thread per 100 nodes that we want to process. Minimum nodesToProcess
* threads in the pool. Maximum 100 threads in the pool.
* <p/>
* Generally return a rounded up multiple of 10.
*
* @return number
*/
private int computePoolSize(int nodeCount) {
if (nodeCount < 10) {
return nodeCount;
}
int threadRatio = nodeCount / 100;
int modValue = threadRatio % 10;
if (((10 - modValue) + threadRatio) > 100) {
return 100;
} else {
return (10 - modValue) + threadRatio;
}
}
/**
* Returns a node by UUID.
* @param uuid - Node's UUID
* @return DiskBalancerDataNode.
*/
public DiskBalancerDataNode getNodeByUUID(String uuid) {
return hostUUID.get(uuid);
}
/**
* Returns a node by IP Address.
* @param ipAddresss - IP address String.
* @return DiskBalancerDataNode.
*/
public DiskBalancerDataNode getNodeByIPAddress(String ipAddresss) {
return ipList.get(ipAddresss);
}
/**
* Returns a node by hostName.
* @param hostName - HostName.
* @return DiskBalancerDataNode.
*/
public DiskBalancerDataNode getNodeByName(String hostName) {
return hostNames.get(hostName.toLowerCase(Locale.US));
}
}
|
DiskBalancerCluster
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/cache/spi/support/DomainDataStorageAccess.java
|
{
"start": 315,
"end": 728
}
|
interface ____ extends StorageAccess {
/**
* Specialized form of putting something into the cache
* in cases where the put is coming from a load (read) from
* the database
*
* @implNote the method default is to call {@link #putIntoCache}
*/
default void putFromLoad(Object key, Object value, SharedSessionContractImplementor session) {
putIntoCache( key, value, session );
}
}
|
DomainDataStorageAccess
|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-http12/src/main/java/org/apache/dubbo/remoting/http12/AbstractServerHttpChannelObserver.java
|
{
"start": 1486,
"end": 12250
}
|
class ____<H extends HttpChannel> implements ServerHttpChannelObserver<H> {
private static final ErrorTypeAwareLogger LOGGER = getErrorTypeAwareLogger(AbstractServerHttpChannelObserver.class);
private final H httpChannel;
private List<BiConsumer<HttpHeaders, Throwable>> headersCustomizers;
private List<BiConsumer<HttpHeaders, Throwable>> trailersCustomizers;
private Function<Throwable, ?> exceptionCustomizer;
private HttpMessageEncoder responseEncoder;
private boolean headerSent;
private boolean completed;
private boolean closed;
protected AbstractServerHttpChannelObserver(H httpChannel) {
this.httpChannel = httpChannel;
}
@Override
public H getHttpChannel() {
return httpChannel;
}
@Override
public void addHeadersCustomizer(BiConsumer<HttpHeaders, Throwable> headersCustomizer) {
if (headersCustomizers == null) {
headersCustomizers = new ArrayList<>();
}
headersCustomizers.add(headersCustomizer);
}
@Override
public void addTrailersCustomizer(BiConsumer<HttpHeaders, Throwable> trailersCustomizer) {
if (trailersCustomizers == null) {
trailersCustomizers = new ArrayList<>();
}
trailersCustomizers.add(trailersCustomizer);
}
@Override
public void setExceptionCustomizer(Function<Throwable, ?> exceptionCustomizer) {
this.exceptionCustomizer = exceptionCustomizer;
}
public HttpMessageEncoder getResponseEncoder() {
return responseEncoder;
}
public void setResponseEncoder(HttpMessageEncoder responseEncoder) {
this.responseEncoder = responseEncoder;
}
@Override
public final void onNext(Object data) {
if (closed) {
return;
}
try {
doOnNext(data);
} catch (Throwable t) {
LOGGER.warn(INTERNAL_ERROR, "", "", "Error while doOnNext", t);
Throwable throwable = t;
try {
doOnError(throwable);
} catch (Throwable t1) {
LOGGER.warn(INTERNAL_ERROR, "", "", "Error while doOnError, original error: " + throwable, t1);
throwable = t1;
}
onCompleted(throwable);
}
}
@Override
public final void onError(Throwable throwable) {
if (closed) {
return;
}
try {
throwable = customizeError(throwable);
if (throwable == null) {
return;
}
} catch (Throwable t) {
LOGGER.warn(INTERNAL_ERROR, "", "", "Error while handleError, original error: " + throwable, t);
throwable = t;
}
try {
doOnError(throwable);
} catch (Throwable t) {
LOGGER.warn(INTERNAL_ERROR, "", "", "Error while doOnError, original error: " + throwable, t);
throwable = t;
}
onCompleted(throwable);
}
@Override
public final void onCompleted() {
if (closed) {
return;
}
onCompleted(null);
}
protected void doOnNext(Object data) throws Throwable {
int statusCode = resolveStatusCode(data);
if (!headerSent) {
sendMetadata(buildMetadata(statusCode, data, null, HttpOutputMessage.EMPTY_MESSAGE));
}
sendMessage(buildMessage(statusCode, data));
}
protected final int resolveStatusCode(Object data) {
if (data instanceof HttpResult) {
int status = ((HttpResult<?>) data).getStatus();
if (status >= 100) {
return status;
}
}
return HttpStatus.OK.getCode();
}
protected final HttpMetadata buildMetadata(
int statusCode, Object data, Throwable throwable, HttpOutputMessage message) {
HttpResponse response = RpcContext.getServiceContext().getResponse(HttpResponse.class);
HttpMetadata metadata = encodeHttpMetadata(message == null);
HttpHeaders headers = metadata.headers();
if (response != null && response.headers() != null) {
headers.set(response.headers());
}
headers.set(HttpHeaderNames.STATUS.getKey(), HttpUtils.toStatusString(statusCode));
if (message != null) {
headers.set(HttpHeaderNames.CONTENT_TYPE.getKey(), responseEncoder.contentType());
}
customizeHeaders(headers, throwable, message);
if (data instanceof HttpResult) {
HttpResult<?> result = (HttpResult<?>) data;
if (result.getHeaders() != null) {
headers.set(result.getHeaders());
}
}
return metadata;
}
protected abstract HttpMetadata encodeHttpMetadata(boolean endStream);
protected void customizeHeaders(HttpHeaders headers, Throwable throwable, HttpOutputMessage message) {
List<BiConsumer<HttpHeaders, Throwable>> headersCustomizers = this.headersCustomizers;
if (headersCustomizers != null) {
for (int i = 0, size = headersCustomizers.size(); i < size; i++) {
headersCustomizers.get(i).accept(headers, throwable);
}
}
}
protected final void sendMetadata(HttpMetadata metadata) {
if (headerSent) {
return;
}
getHttpChannel().writeHeader(metadata);
headerSent = true;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Http response headers sent: " + metadata.headers());
}
}
protected HttpOutputMessage buildMessage(int statusCode, Object data) throws Throwable {
if (statusCode < 200 || statusCode == 204 || statusCode == 304) {
return null;
}
if (data instanceof HttpResult) {
data = ((HttpResult<?>) data).getBody();
}
if (data == null && statusCode != 200) {
return null;
}
if (LOGGER.isDebugEnabled()) {
try {
String text;
if (data instanceof byte[]) {
text = new String((byte[]) data, StandardCharsets.UTF_8);
} else {
text = JsonUtils.toJson(data);
}
LOGGER.debug("Http response body sent: '{}' by [{}]", text, httpChannel);
} catch (Throwable ignored) {
}
}
HttpOutputMessage message = encodeHttpOutputMessage(data);
try {
preOutputMessage(message);
responseEncoder.encode(message.getBody(), data);
} catch (Throwable t) {
message.close();
throw t;
}
return message;
}
protected HttpOutputMessage encodeHttpOutputMessage(Object data) {
return getHttpChannel().newOutputMessage();
}
protected final void sendMessage(HttpOutputMessage message) throws Throwable {
if (message == null) {
return;
}
getHttpChannel().writeMessage(message);
postOutputMessage(message);
}
protected void preOutputMessage(HttpOutputMessage message) throws Throwable {}
protected void postOutputMessage(HttpOutputMessage message) throws Throwable {}
protected Throwable customizeError(Throwable throwable) {
if (exceptionCustomizer == null) {
return throwable;
}
Object result = exceptionCustomizer.apply(throwable);
if (result == null) {
return throwable;
}
if (result instanceof Throwable) {
return (Throwable) result;
}
onNext(result);
return null;
}
protected void doOnError(Throwable throwable) throws Throwable {
int statusCode = resolveErrorStatusCode(throwable);
Object data = buildErrorResponse(statusCode, throwable);
if (!headerSent) {
sendMetadata(buildMetadata(statusCode, data, throwable, HttpOutputMessage.EMPTY_MESSAGE));
}
sendMessage(buildMessage(statusCode, data));
}
protected final int resolveErrorStatusCode(Throwable throwable) {
if (throwable == null) {
return HttpStatus.OK.getCode();
}
if (throwable instanceof HttpStatusException) {
return ((HttpStatusException) throwable).getStatusCode();
}
return HttpStatus.INTERNAL_SERVER_ERROR.getCode();
}
protected final ErrorResponse buildErrorResponse(int statusCode, Throwable throwable) {
ErrorResponse errorResponse = new ErrorResponse();
errorResponse.setStatus(HttpUtils.toStatusString(statusCode));
if (throwable instanceof HttpStatusException) {
errorResponse.setMessage(((HttpStatusException) throwable).getDisplayMessage());
} else {
errorResponse.setMessage(getDisplayMessage(throwable));
}
return errorResponse;
}
protected String getDisplayMessage(Throwable throwable) {
return "Internal Server Error";
}
protected void onCompleted(Throwable throwable) {
if (completed) {
return;
}
doOnCompleted(throwable);
completed = true;
}
protected void doOnCompleted(Throwable throwable) {
HttpMetadata trailerMetadata = encodeTrailers(throwable);
if (trailerMetadata == null) {
return;
}
HttpHeaders headers = trailerMetadata.headers();
if (!headerSent) {
headers.set(HttpHeaderNames.STATUS.getKey(), HttpUtils.toStatusString(resolveErrorStatusCode(throwable)));
headers.set(HttpHeaderNames.CONTENT_TYPE.getKey(), getContentType());
}
customizeTrailers(headers, throwable);
getHttpChannel().writeHeader(trailerMetadata);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Http response trailers sent: " + headers);
}
}
protected HttpMetadata encodeTrailers(Throwable throwable) {
return null;
}
protected String getContentType() {
return responseEncoder.contentType();
}
protected boolean isHeaderSent() {
return headerSent;
}
protected void customizeTrailers(HttpHeaders headers, Throwable throwable) {
List<BiConsumer<HttpHeaders, Throwable>> trailersCustomizers = this.trailersCustomizers;
if (trailersCustomizers != null) {
for (int i = 0, size = trailersCustomizers.size(); i < size; i++) {
trailersCustomizers.get(i).accept(headers, throwable);
}
}
}
@Override
public void close() {
closed();
}
protected final void closed() {
closed = true;
}
}
|
AbstractServerHttpChannelObserver
|
java
|
apache__hadoop
|
hadoop-cloud-storage-project/hadoop-huaweicloud/src/test/java/org/apache/hadoop/fs/obs/TestOBSContractAppend.java
|
{
"start": 1127,
"end": 1419
}
|
class ____ extends AbstractContractAppendTest {
@Override
protected AbstractFSContract createContract(final Configuration conf) {
return new OBSContract(conf);
}
@Override
public void testRenameFileBeingAppended() {
assumeTrue(false, "unsupport.");
}
}
|
TestOBSContractAppend
|
java
|
google__guava
|
android/guava-testlib/src/com/google/common/collect/testing/google/SortedSetMultimapAsMapTester.java
|
{
"start": 1480,
"end": 2468
}
|
class ____<K, V>
extends AbstractMultimapTester<K, V, SortedSetMultimap<K, V>> {
public void testAsMapValuesImplementSortedSet() {
for (Collection<V> valueCollection : multimap().asMap().values()) {
SortedSet<V> valueSet = (SortedSet<V>) valueCollection;
assertEquals(multimap().valueComparator(), valueSet.comparator());
}
}
public void testAsMapGetImplementsSortedSet() {
for (K key : multimap().keySet()) {
SortedSet<V> valueSet = (SortedSet<V>) multimap().asMap().get(key);
assertEquals(multimap().valueComparator(), valueSet.comparator());
}
}
@MapFeature.Require(SUPPORTS_REMOVE)
public void testAsMapRemoveImplementsSortedSet() {
List<K> keys = new ArrayList<>(multimap().keySet());
for (K key : keys) {
resetCollection();
SortedSet<V> valueSet = (SortedSet<V>) multimap().asMap().remove(key);
assertEquals(multimap().valueComparator(), valueSet.comparator());
}
}
}
|
SortedSetMultimapAsMapTester
|
java
|
quarkusio__quarkus
|
core/runtime/src/main/java/io/quarkus/runtime/graal/Target_javax_net_ssl_SSLContext.java
|
{
"start": 447,
"end": 3462
}
|
class ____ {
@Alias
private static SSLContext defaultContext;
@Alias
protected Target_javax_net_ssl_SSLContext(SSLContextSpi contextSpi, Provider provider, String protocol) {
}
@Substitute
public static synchronized SSLContext getDefault()
throws NoSuchAlgorithmException {
if (defaultContext == null) {
if (SslContextConfiguration.isSslNativeEnabled()) {
defaultContext = SSLContext.getInstance("Default");
} else {
defaultContext = new DisabledSSLContext();
}
}
return defaultContext;
}
// TODO sun.security.jca.GetInstance is not accessible in JDK 11. We cannot add an export
// as we still compile with a JDK 8 target. So for now, we will have to leave with this
// and only override getDefault().
// @Substitute
// public static Target_javax_net_ssl_SSLContext getInstance(String protocol)
// throws NoSuchAlgorithmException {
// Objects.requireNonNull(protocol, "null protocol name");
//
// if (!SslContextConfiguration.isSslNativeEnabled()) {
// return (Target_javax_net_ssl_SSLContext) (Object) getDefault();
// }
//
// GetInstance.Instance instance = GetInstance.getInstance("SSLContext", SSLContextSpi.class, protocol);
// return new Target_javax_net_ssl_SSLContext((SSLContextSpi) instance.impl, instance.provider,
// protocol);
// }
//
// @Substitute
// public static Target_javax_net_ssl_SSLContext getInstance(String protocol, String provider)
// throws NoSuchAlgorithmException, NoSuchProviderException {
// Objects.requireNonNull(protocol, "null protocol name");
//
// if (!SslContextConfiguration.isSslNativeEnabled()) {
// return (Target_javax_net_ssl_SSLContext) (Object) getDefault();
// }
//
// GetInstance.Instance instance = GetInstance.getInstance("SSLContext", SSLContextSpi.class, protocol, provider);
// return new Target_javax_net_ssl_SSLContext((SSLContextSpi) instance.impl, instance.provider,
// protocol);
// }
//
// @Substitute
// public static Target_javax_net_ssl_SSLContext getInstance(String protocol, Provider provider)
// throws NoSuchAlgorithmException {
// Objects.requireNonNull(protocol, "null protocol name");
//
// if (!SslContextConfiguration.isSslNativeEnabled()) {
// return (Target_javax_net_ssl_SSLContext) (Object) getDefault();
// }
//
// GetInstance.Instance instance = GetInstance.getInstance("SSLContext", SSLContextSpi.class, protocol, provider);
// return new Target_javax_net_ssl_SSLContext((SSLContextSpi) instance.impl, instance.provider,
// protocol);
// }
}
|
Target_javax_net_ssl_SSLContext
|
java
|
apache__flink
|
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/AbstractInput.java
|
{
"start": 1763,
"end": 1879
}
|
interface ____ to be used when extending {@link
* AbstractStreamOperatorV2}.
*/
@Experimental
public abstract
|
intended
|
java
|
quarkusio__quarkus
|
integration-tests/spring-boot-properties/src/main/java/io/quarkus/it/spring/boot/BeanPropertiesResource.java
|
{
"start": 142,
"end": 722
}
|
class ____ {
@Inject
BeanProperties properties;
@Path("/value")
@GET
public int getValue() {
return properties.getValue();
}
@Path("/finalValue")
@GET
public String getFinalValue() {
return properties.getFinalValue();
}
@Path("/packagePrivateValue")
@GET
public int getPackagePrivateValue() {
return properties.packagePrivateValue;
}
@Path("/innerClass/value")
@GET
public String getInnerClassValue() {
return properties.getInnerClass().getValue();
}
}
|
BeanPropertiesResource
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/BindToRegistryBeanLazyMethodTest.java
|
{
"start": 1184,
"end": 2319
}
|
class ____ extends ContextTestSupport {
private String hello = "Hello World";
@BindToRegistry(lazy = false)
public FooService myEager() {
return new FooService(hello);
}
@BindToRegistry(lazy = true)
public FooService myLazy() {
return new FooService(hello);
}
@Test
public void testLazy() throws Exception {
// bean post processing dont run on ContextTestSupport
CamelBeanPostProcessor cbpp = PluginHelper.getBeanPostProcessor(context);
cbpp.postProcessBeforeInitialization(this, "this");
cbpp.postProcessAfterInitialization(this, "this");
// change message which should only affect lazy
hello = "Bye World";
FooService eager = context.getRegistry().lookupByNameAndType("myEager", FooService.class);
assertNotNull(eager);
assertEquals("Hello World", eager.getMessage());
FooService lazy = context.getRegistry().lookupByNameAndType("myLazy", FooService.class);
assertNotNull(lazy);
assertEquals("Bye World", lazy.getMessage());
}
public static
|
BindToRegistryBeanLazyMethodTest
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/executor/RedissonExecutorFuture.java
|
{
"start": 857,
"end": 1336
}
|
class ____<V> extends CompletableFutureWrapper<V> implements RExecutorFuture<V> {
private final String taskId;
public RedissonExecutorFuture(RemotePromise<V> promise) {
this(promise, promise.getRequestId());
}
public RedissonExecutorFuture(CompletableFuture<V> promise, String taskId) {
super(promise);
this.taskId = taskId;
}
@Override
public String getTaskId() {
return taskId;
}
}
|
RedissonExecutorFuture
|
java
|
apache__rocketmq
|
test/src/main/java/org/apache/rocketmq/test/clientinterface/AbstractMQProducer.java
|
{
"start": 1236,
"end": 4615
}
|
class ____ extends MQCollector implements MQProducer {
protected String topic = null;
protected ResultWrapper sendResult = new ResultWrapper();
protected boolean startSuccess = false;
protected String producerGroupName = null;
protected String producerInstanceName = null;
protected boolean isDebug = false;
public AbstractMQProducer(String topic) {
super();
producerGroupName = RandomUtil.getStringByUUID();
producerInstanceName = RandomUtil.getStringByUUID();
this.topic = topic;
}
public AbstractMQProducer(String topic, String originMsgCollector, String msgBodyCollector) {
super(originMsgCollector, msgBodyCollector);
producerGroupName = RandomUtil.getStringByUUID();
producerInstanceName = RandomUtil.getStringByUUID();
this.topic = topic;
}
public boolean isStartSuccess() {
return startSuccess;
}
public void setStartSuccess(boolean startSuccess) {
this.startSuccess = startSuccess;
}
public String getProducerInstanceName() {
return producerInstanceName;
}
public void setProducerInstanceName(String producerInstanceName) {
this.producerInstanceName = producerInstanceName;
}
public String getProducerGroupName() {
return producerGroupName;
}
public void setProducerGroupName(String producerGroupName) {
this.producerGroupName = producerGroupName;
}
public void setDebug() {
isDebug = true;
}
public void setDebug(boolean isDebug) {
this.isDebug = isDebug;
}
public void setRun() {
isDebug = false;
}
public List<MessageQueue> getMessageQueue() {
return null;
}
private Object getMessage() {
return this.getMessageByTag(null);
}
public Object getMessageByTag(String tag) {
Object objMsg = null;
if (this instanceof RMQNormalProducer) {
org.apache.rocketmq.common.message.Message msg = new org.apache.rocketmq.common.message.Message(
topic, (RandomUtil.getStringByUUID() + "." + new Date()).getBytes(StandardCharsets.UTF_8));
objMsg = msg;
if (tag != null) {
msg.setTags(tag);
}
}
return objMsg;
}
public void send() {
Object msg = getMessage();
send(msg, null);
}
public void send(Object msg) {
send(msg, null);
}
public void send(long msgNum) {
for (int i = 0; i < msgNum; i++) {
this.send();
}
}
public void send(long msgNum, int intervalMills) {
for (int i = 0; i < msgNum; i++) {
this.send();
TestUtil.waitForMonment(intervalMills);
}
}
public void send(String tag, int msgSize) {
for (int i = 0; i < msgSize; i++) {
Object msg = getMessageByTag(tag);
send(msg, null);
}
}
public void send(String tag, int msgSize, int intervalMills) {
for (int i = 0; i < msgSize; i++) {
Object msg = getMessageByTag(tag);
send(msg, null);
TestUtil.waitForMonment(intervalMills);
}
}
public void send(List<Object> msgs) {
for (Object msg : msgs) {
this.send(msg, null);
}
}
}
|
AbstractMQProducer
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/PersistenceUnit.java
|
{
"start": 1815,
"end": 1876
}
|
interface ____ {
PersistenceUnit[] value();
}
}
|
List
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeOnErrorComplete.java
|
{
"start": 1569,
"end": 3152
}
|
class ____<T>
implements MaybeObserver<T>, SingleObserver<T>, Disposable {
final MaybeObserver<? super T> downstream;
final Predicate<? super Throwable> predicate;
Disposable upstream;
public OnErrorCompleteMultiObserver(MaybeObserver<? super T> actual, Predicate<? super Throwable> predicate) {
this.downstream = actual;
this.predicate = predicate;
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.validate(this.upstream, d)) {
this.upstream = d;
downstream.onSubscribe(this);
}
}
@Override
public void onSuccess(T value) {
downstream.onSuccess(value);
}
@Override
public void onError(Throwable e) {
boolean b;
try {
b = predicate.test(e);
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
downstream.onError(new CompositeException(e, ex));
return;
}
if (b) {
downstream.onComplete();
} else {
downstream.onError(e);
}
}
@Override
public void onComplete() {
downstream.onComplete();
}
@Override
public void dispose() {
upstream.dispose();
}
@Override
public boolean isDisposed() {
return upstream.isDisposed();
}
}
}
|
OnErrorCompleteMultiObserver
|
java
|
jhy__jsoup
|
src/main/java/org/jsoup/parser/Token.java
|
{
"start": 11443,
"end": 11707
}
|
class ____ extends Tag{
EndTag(TreeBuilder treeBuilder) {
super(TokenType.EndTag, treeBuilder);
}
@Override
public String toString() {
return "</" + toStringName() + ">";
}
}
final static
|
EndTag
|
java
|
quarkusio__quarkus
|
extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/OidcClient.java
|
{
"start": 181,
"end": 2474
}
|
interface ____ extends Closeable {
/**
* Get the grant access and refresh tokens.
*/
default Uni<Tokens> getTokens() {
return getTokens(Collections.emptyMap());
}
/**
* Get the grant access and refresh tokens with additional grant parameters.
*
* @param additionalGrantParameters additional grant parameters
* @return Uni<Tokens>
*/
Uni<Tokens> getTokens(Map<String, String> additionalGrantParameters);
/**
* Refresh and return a new pair of access and refresh tokens.
* Note a refresh token grant will typically return not only a new access token but also a new refresh token.
*
* @param refreshToken refresh token
* @return Uni<Tokens>
*/
default Uni<Tokens> refreshTokens(String refreshToken) {
return refreshTokens(refreshToken, Collections.emptyMap());
}
/**
* Refresh and return a new pair of access and refresh tokens.
* Note a refresh token grant will typically return not only a new access token but also a new refresh token.
*
* @param refreshToken refresh token
* @param additionalGrantParameters additional grant parameters
* @return Uni<Tokens>
*/
Uni<Tokens> refreshTokens(String refreshToken, Map<String, String> additionalGrantParameters);
/**
* Revoke the access token.
*
* @param accessToken access token which needs to be revoked
* @return Uni<Boolean> true if the token has been revoked or found already being invalidated,
* false if the token can not be currently revoked in which case a revocation request might be retried.
*/
default Uni<Boolean> revokeAccessToken(String accessToken) {
return revokeAccessToken(accessToken, Collections.emptyMap());
}
/**
* Revoke the access token.
*
* @param accessToken access token which needs to be revoked
* @param additionalParameters additional parameters
* @return Uni<Boolean> true if the token has been revoked or found already being invalidated,
* false if the token can not be currently revoked in which case a revocation request might be retried.
*/
Uni<Boolean> revokeAccessToken(String accessToken, Map<String, String> additionalParameters);
}
|
OidcClient
|
java
|
apache__camel
|
components/camel-ai/camel-djl/src/test/java/org/apache/camel/component/djl/TimeSeriesForecastingTest.java
|
{
"start": 1178,
"end": 3114
}
|
class ____ extends CamelTestSupport {
@BeforeAll
public static void setupDefaultEngine() {
// Since Apache MXNet is discontinued, prefer PyTorch as the default engine
System.setProperty("ai.djl.default_engine", "PyTorch");
}
@Test
void testDJL() throws Exception {
var mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.await();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("timer:testDJL?repeatCount=1")
.process(exchange -> {
var input = new TimeSeriesData(10);
input.setStartTime(LocalDateTime.now());
var manager = NDManager.newBaseManager("PyTorch");
var data = manager.create(new float[] {
1.0f, 2.0f, 1.0f, 2.0f, 3.0f,
1.0f, 2.0f, 3.0f, 4.0f, 1.0f });
input.setField(FieldName.TARGET, data);
exchange.getIn().setBody(input);
exchange.getIn().setHeader("NDManager", manager);
})
.doTry()
.to("djl:timeseries/forecasting?artifactId=ai.djl.pytorch:deepar:0.0.1")
.log("Mean: ${body.mean}")
.log("Median: ${body.median}")
.doFinally()
.process(exchange -> {
var manager = exchange.getIn().getHeader("NDManager", NDManager.class);
manager.close();
})
.end()
.to("mock:result");
}
};
}
}
|
TimeSeriesForecastingTest
|
java
|
quarkusio__quarkus
|
extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/AutoSecurityRolesAllowedTestCase.java
|
{
"start": 852,
"end": 10136
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(ResourceBean.class, OpenApiResourceSecuredAtClassLevel.class,
OpenApiResourceSecuredAtClassLevel2.class, OpenApiResourceSecuredAtMethodLevel.class,
OpenApiResourceSecuredAtMethodLevel2.class)
.addAsResource(
new StringAsset("quarkus.smallrye-openapi.security-scheme=jwt\n"
+ "quarkus.smallrye-openapi.security-scheme-name=JWTCompanyAuthentication\n"
+ "quarkus.smallrye-openapi.security-scheme-description=JWT Authentication\n"
+ "quarkus.smallrye-openapi.security-scheme-extensions.x-my-extension1=extension-value\n"
+ "quarkus.smallrye-openapi.security-scheme-extensions.my-extension2=extension-value"),
"application.properties"));
static Matcher<Iterable<Object>> schemeArray(String schemeName) {
return allOf(
iterableWithSize(1),
hasItem(allOf(
aMapWithSize(1),
hasEntry(equalTo(schemeName), emptyIterable()))));
}
@Test
void testAutoSecurityRequirement() {
var defaultSecurity = schemeArray("JWTCompanyAuthentication");
RestAssured.given()
.header("Accept", "application/json")
.when()
.get("/q/openapi")
.then()
.log().body()
.and()
.body("components.securitySchemes.JWTCompanyAuthentication", allOf(
hasEntry("type", "http"),
hasEntry("scheme", "bearer"),
hasEntry("bearerFormat", "JWT"),
hasEntry("description", "JWT Authentication"),
hasEntry("x-my-extension1", "extension-value"),
not(hasKey("my-extension2"))))
.and()
// OpenApiResourceSecuredAtMethodLevel
.body("paths.'/resource2/test-security/naked'.get.security", defaultSecurityScheme("admin"))
.body("paths.'/resource2/test-security/annotated'.get.security", defaultSecurity)
.body("paths.'/resource2/test-security/methodLevel/1'.get.security", defaultSecurityScheme("user1"))
.body("paths.'/resource2/test-security/methodLevel/2'.get.security", defaultSecurityScheme("user2"))
.body("paths.'/resource2/test-security/methodLevel/public'.get.security", nullValue())
.body("paths.'/resource2/test-security/annotated/documented'.get.security", defaultSecurity)
.body("paths.'/resource2/test-security/methodLevel/3'.get.security", defaultSecurityScheme("admin"))
.body("paths.'/resource2/test-security/methodLevel/4'.get.security", defaultSecurity)
.and()
// OpenApiResourceSecuredAtClassLevel
.body("paths.'/resource2/test-security/classLevel/1'.get.security", defaultSecurityScheme("user1"))
.body("paths.'/resource2/test-security/classLevel/2'.get.security", defaultSecurityScheme("user2"))
.body("paths.'/resource2/test-security/classLevel/3'.get.security", schemeArray("MyOwnName"))
.body("paths.'/resource2/test-security/classLevel/4'.get.security", defaultSecurityScheme("admin"))
.and()
// OpenApiResourceSecuredAtMethodLevel2
.body("paths.'/resource3/test-security/annotated'.get.security", schemeArray("AtClassLevel"))
.and()
// OpenApiResourceSecuredAtClassLevel2
.body("paths.'/resource3/test-security/classLevel-2/1'.get.security", defaultSecurity);
}
@Test
void testOpenAPIAnnotations() {
RestAssured.given().header("Accept", "application/json")
.when().get("/q/openapi")
.then()
.log().body()
.and()
.body("paths.'/resource2/test-security/classLevel/1'.get.responses.401.description",
Matchers.equalTo("Not Authorized"))
.and()
.body("paths.'/resource2/test-security/classLevel/1'.get.responses.403.description",
Matchers.equalTo("Not Allowed"))
.and()
.body("paths.'/resource2/test-security/classLevel/2'.get.responses.401.description",
Matchers.equalTo("Not Authorized"))
.and()
.body("paths.'/resource2/test-security/classLevel/2'.get.responses.403.description",
Matchers.equalTo("Not Allowed"))
.and()
.body("paths.'/resource2/test-security/classLevel/3'.get.responses.401.description",
Matchers.nullValue())
.and()
.body("paths.'/resource2/test-security/classLevel/3'.get.responses.403.description",
Matchers.nullValue())
.and()
.body("paths.'/resource2/test-security/classLevel/4'.get.responses.401.description",
Matchers.equalTo("Who are you?"))
.and()
.body("paths.'/resource2/test-security/classLevel/4'.get.responses.403.description",
Matchers.equalTo("You cannot do that."))
.and()
.body("paths.'/resource2/test-security/naked'.get.responses.401.description",
Matchers.equalTo("Not Authorized"))
.and()
.body("paths.'/resource2/test-security/naked'.get.responses.403.description",
Matchers.equalTo("Not Allowed"))
.and()
.body("paths.'/resource2/test-security/annotated'.get.responses.401.description",
Matchers.nullValue())
.and()
.body("paths.'/resource2/test-security/annotated'.get.responses.403.description",
Matchers.nullValue())
.and()
.body("paths.'/resource2/test-security/methodLevel/1'.get.responses.401.description",
Matchers.equalTo("Not Authorized"))
.and()
.body("paths.'/resource2/test-security/methodLevel/1'.get.responses.403.description",
Matchers.equalTo("Not Allowed"))
.and()
.body("paths.'/resource2/test-security/methodLevel/2'.get.responses.401.description",
Matchers.equalTo("Not Authorized"))
.and()
.body("paths.'/resource2/test-security/methodLevel/2'.get.responses.403.description",
Matchers.equalTo("Not Allowed"))
.and()
.body("paths.'/resource2/test-security/methodLevel/public'.get.responses.401.description",
Matchers.nullValue())
.and()
.body("paths.'/resource2/test-security/methodLevel/public'.get.responses.403.description",
Matchers.nullValue())
.and()
.body("paths.'/resource2/test-security/annotated/documented'.get.responses.401.description",
Matchers.equalTo("Who are you?"))
.and()
.body("paths.'/resource2/test-security/annotated/documented'.get.responses.403.description",
Matchers.equalTo("You cannot do that."))
.and()
.body("paths.'/resource2/test-security/methodLevel/3'.get.responses.401.description",
Matchers.equalTo("Who are you?"))
.and()
.body("paths.'/resource2/test-security/methodLevel/3'.get.responses.403.description",
Matchers.equalTo("You cannot do that."))
.and()
.body("paths.'/resource2/test-security/methodLevel/4'.get.responses.401.description",
Matchers.equalTo("Not Authorized"))
.and()
.body("paths.'/resource2/test-security/methodLevel/4'.get.responses.403.description",
Matchers.equalTo("Not Allowed"))
.and()
.body("paths.'/resource3/test-security/classLevel-2/1'.get.responses.401.description",
Matchers.equalTo("Not Authorized"))
.and()
.body("paths.'/resource3/test-security/classLevel-2/1'.get.responses.403.description",
Matchers.equalTo("Not Allowed"));
}
static Matcher<Iterable<Object>> defaultSecurityScheme(String... roles) {
return allOf(
iterableWithSize(1),
hasItem(allOf(
aMapWithSize(1),
hasEntry(equalTo("JWTCompanyAuthentication"), containsInAnyOrder(roles)))));
}
}
|
AutoSecurityRolesAllowedTestCase
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStore.java
|
{
"start": 29690,
"end": 30284
}
|
class ____ {
// DTIdentifier -> renewDate
Map<RMDelegationTokenIdentifier, Long> delegationTokenState =
new HashMap<RMDelegationTokenIdentifier, Long>();
Set<DelegationKey> masterKeyState =
new HashSet<DelegationKey>();
int dtSequenceNumber = 0;
public Map<RMDelegationTokenIdentifier, Long> getTokenState() {
return delegationTokenState;
}
public Set<DelegationKey> getMasterKeyState() {
return masterKeyState;
}
public int getDTSequenceNumber() {
return dtSequenceNumber;
}
}
public static
|
RMDTSecretManagerState
|
java
|
google__dagger
|
hilt-compiler/main/java/dagger/hilt/android/processor/internal/bindvalue/BindValueMetadata.java
|
{
"start": 1746,
"end": 1811
}
|
class ____ has {@code BindValue} fields.
*/
@AutoValue
abstract
|
that
|
java
|
apache__flink
|
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketsTest.java
|
{
"start": 2183,
"end": 13178
}
|
class ____ {
@TempDir private static java.nio.file.Path tempFolder;
@Test
void testSnapshotAndRestore() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
final Path path = new Path(outDir.toURI());
final RollingPolicy<String, String> onCheckpointRollingPolicy =
OnCheckpointRollingPolicy.build();
final Buckets<String, String> buckets = createBuckets(path, onCheckpointRollingPolicy, 0);
final ListState<byte[]> bucketStateContainer = new MockListState<>();
final ListState<Long> partCounterContainer = new MockListState<>();
buckets.onElement("test1", new TestUtils.MockSinkContext(null, 1L, 2L));
buckets.snapshotState(0L, bucketStateContainer, partCounterContainer);
assertThat(buckets.getActiveBuckets().get("test1"))
.is(matching(hasSinglePartFileToBeCommittedOnCheckpointAck(path, "test1")));
buckets.onElement("test2", new TestUtils.MockSinkContext(null, 1L, 2L));
buckets.snapshotState(1L, bucketStateContainer, partCounterContainer);
assertThat(buckets.getActiveBuckets().get("test1"))
.is(matching(hasSinglePartFileToBeCommittedOnCheckpointAck(path, "test1")));
assertThat(buckets.getActiveBuckets().get("test2"))
.is(matching(hasSinglePartFileToBeCommittedOnCheckpointAck(path, "test2")));
Buckets<String, String> restoredBuckets =
restoreBuckets(
path,
onCheckpointRollingPolicy,
0,
bucketStateContainer,
partCounterContainer);
final Map<String, Bucket<String, String>> activeBuckets =
restoredBuckets.getActiveBuckets();
// because we commit pending files for previous checkpoints upon recovery
assertThat(activeBuckets).isEmpty();
}
private static TypeSafeMatcher<Bucket<String, String>>
hasSinglePartFileToBeCommittedOnCheckpointAck(
final Path testTmpPath, final String bucketId) {
return new TypeSafeMatcher<Bucket<String, String>>() {
@Override
protected boolean matchesSafely(Bucket<String, String> bucket) {
return bucket.getBucketId().equals(bucketId)
&& bucket.getBucketPath().equals(new Path(testTmpPath, bucketId))
&& bucket.getInProgressPart() == null
&& bucket.getPendingFileRecoverablesForCurrentCheckpoint().isEmpty()
&& bucket.getPendingFileRecoverablesPerCheckpoint().size() == 1;
}
@Override
public void describeTo(Description description) {
description
.appendText("a Bucket with a single pending part file @ ")
.appendValue(new Path(testTmpPath, bucketId))
.appendText("'");
}
};
}
@Test
void testMergeAtScaleInAndMaxCounterAtRecovery() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
final Path path = new Path(outDir.toURI());
final RollingPolicy<String, String> onCheckpointRP =
DefaultRollingPolicy.builder()
.withMaxPartSize(new MemorySize(7L)) // roll with 2 elements
.build();
final MockListState<byte[]> bucketStateContainerOne = new MockListState<>();
final MockListState<byte[]> bucketStateContainerTwo = new MockListState<>();
final MockListState<Long> partCounterContainerOne = new MockListState<>();
final MockListState<Long> partCounterContainerTwo = new MockListState<>();
final Buckets<String, String> bucketsOne = createBuckets(path, onCheckpointRP, 0);
final Buckets<String, String> bucketsTwo = createBuckets(path, onCheckpointRP, 1);
bucketsOne.onElement("test1", new TestUtils.MockSinkContext(null, 1L, 2L));
bucketsOne.snapshotState(0L, bucketStateContainerOne, partCounterContainerOne);
assertThat(bucketsOne.getMaxPartCounter()).isOne();
// make sure we have one in-progress file here
assertThat(bucketsOne.getActiveBuckets().get("test1").getInProgressPart()).isNotNull();
// add a couple of in-progress files so that the part counter increases.
bucketsTwo.onElement("test1", new TestUtils.MockSinkContext(null, 1L, 2L));
bucketsTwo.onElement("test1", new TestUtils.MockSinkContext(null, 1L, 2L));
bucketsTwo.onElement("test1", new TestUtils.MockSinkContext(null, 1L, 2L));
bucketsTwo.snapshotState(0L, bucketStateContainerTwo, partCounterContainerTwo);
assertThat(bucketsTwo.getMaxPartCounter()).isEqualTo(2L);
// make sure we have one in-progress file here and a pending
assertThat(
bucketsTwo
.getActiveBuckets()
.get("test1")
.getPendingFileRecoverablesPerCheckpoint())
.hasSize(1);
assertThat(bucketsTwo.getActiveBuckets().get("test1").getInProgressPart()).isNotNull();
final ListState<byte[]> mergedBucketStateContainer = new MockListState<>();
final ListState<Long> mergedPartCounterContainer = new MockListState<>();
mergedBucketStateContainer.addAll(bucketStateContainerOne.getBackingList());
mergedBucketStateContainer.addAll(bucketStateContainerTwo.getBackingList());
mergedPartCounterContainer.addAll(partCounterContainerOne.getBackingList());
mergedPartCounterContainer.addAll(partCounterContainerTwo.getBackingList());
final Buckets<String, String> restoredBuckets =
restoreBuckets(
path,
onCheckpointRP,
0,
mergedBucketStateContainer,
mergedPartCounterContainer);
// we get the maximum of the previous tasks
assertThat(restoredBuckets.getMaxPartCounter()).isEqualTo(2L);
final Map<String, Bucket<String, String>> activeBuckets =
restoredBuckets.getActiveBuckets();
assertThat(activeBuckets).hasSize(1).containsKey("test1");
final Bucket<String, String> bucket = activeBuckets.get("test1");
assertThat(bucket.getBucketId()).isEqualTo("test1");
assertThat(bucket.getBucketPath()).isEqualTo(new Path(path, "test1"));
assertThat(bucket.getInProgressPart()).isNotNull(); // the restored part file
// this is due to the Bucket#merge(). The in progress file of one
// of the previous tasks is put in the list of pending files.
assertThat(bucket.getPendingFileRecoverablesForCurrentCheckpoint()).hasSize(1);
// we commit the pending for previous checkpoints
assertThat(bucket.getPendingFileRecoverablesPerCheckpoint()).isEmpty();
}
@Test
void testOnProcessingTime() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
final Path path = new Path(outDir.toURI());
final OnProcessingTimePolicy<String, String> rollOnProcessingTimeCountingPolicy =
new OnProcessingTimePolicy<>(2L);
final Buckets<String, String> buckets =
createBuckets(path, rollOnProcessingTimeCountingPolicy, 0);
// it takes the current processing time of the context for the creation time,
// and for the last modification time.
buckets.onElement("test", new TestUtils.MockSinkContext(1L, 2L, 3L));
// now it should roll
buckets.onProcessingTime(7L);
assertThat(rollOnProcessingTimeCountingPolicy.getOnProcessingTimeRollCounter()).isOne();
final Map<String, Bucket<String, String>> activeBuckets = buckets.getActiveBuckets();
assertThat(activeBuckets).hasSize(1).containsKey("test");
final Bucket<String, String> bucket = activeBuckets.get("test");
assertThat(bucket.getBucketId()).isEqualTo("test");
assertThat(bucket.getBucketPath()).isEqualTo(new Path(path, "test"));
assertThat(bucket.getBucketId()).isEqualTo("test");
assertThat(bucket.getInProgressPart()).isNull();
assertThat(bucket.getPendingFileRecoverablesForCurrentCheckpoint()).hasSize(1);
assertThat(bucket.getPendingFileRecoverablesPerCheckpoint()).isEmpty();
}
@Test
void testBucketIsRemovedWhenNotActive() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
final Path path = new Path(outDir.toURI());
final OnProcessingTimePolicy<String, String> rollOnProcessingTimeCountingPolicy =
new OnProcessingTimePolicy<>(2L);
final Buckets<String, String> buckets =
createBuckets(path, rollOnProcessingTimeCountingPolicy, 0);
// it takes the current processing time of the context for the creation time, and for the
// last modification time.
buckets.onElement("test", new TestUtils.MockSinkContext(1L, 2L, 3L));
// now it should roll
buckets.onProcessingTime(7L);
assertThat(rollOnProcessingTimeCountingPolicy.getOnProcessingTimeRollCounter()).isOne();
buckets.snapshotState(0L, new MockListState<>(), new MockListState<>());
buckets.commitUpToCheckpoint(0L);
assertThat(buckets.getActiveBuckets()).isEmpty();
}
@Test
void testPartCounterAfterBucketResurrection() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
final Path path = new Path(outDir.toURI());
final OnProcessingTimePolicy<String, String> rollOnProcessingTimeCountingPolicy =
new OnProcessingTimePolicy<>(2L);
final Buckets<String, String> buckets =
createBuckets(path, rollOnProcessingTimeCountingPolicy, 0);
// it takes the current processing time of the context for the creation time, and for the
// last modification time.
buckets.onElement("test", new TestUtils.MockSinkContext(1L, 2L, 3L));
assertThat(buckets.getActiveBuckets().get("test").getPartCounter()).isOne();
// now it should roll
buckets.onProcessingTime(7L);
assertThat(rollOnProcessingTimeCountingPolicy.getOnProcessingTimeRollCounter()).isOne();
assertThat(buckets.getActiveBuckets().get("test").getPartCounter()).isOne();
buckets.snapshotState(0L, new MockListState<>(), new MockListState<>());
buckets.commitUpToCheckpoint(0L);
assertThat(buckets.getActiveBuckets()).isEmpty();
buckets.onElement("test", new TestUtils.MockSinkContext(2L, 3L, 4L));
assertThat(buckets.getActiveBuckets().get("test").getPartCounter()).isEqualTo(2L);
}
private static
|
BucketsTest
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertHasSizeLessThanOrEqualTo_Test.java
|
{
"start": 1011,
"end": 1914
}
|
class ____ extends ObjectArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
// WHEN
var error = expectAssertionError(() -> arrays.assertHasSizeLessThanOrEqualTo(INFO, null, 6));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_size_of_actual_is_not_less_than_or_equal_to_boundary() {
// WHEN
var error = expectAssertionError(() -> arrays.assertHasSizeLessThanOrEqualTo(INFO, actual, 1));
// THEN
then(error).hasMessage(shouldHaveSizeLessThanOrEqualTo(actual, actual.length, 1).create());
}
@Test
void should_pass_if_size_of_actual_is_less_than_boundary() {
arrays.assertHasSizeLessThanOrEqualTo(INFO, actual, 4);
}
@Test
void should_pass_if_size_of_actual_is_equal_to_boundary() {
arrays.assertHasSizeLessThanOrEqualTo(INFO, actual, actual.length);
}
}
|
ObjectArrays_assertHasSizeLessThanOrEqualTo_Test
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.