language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java
|
{
"start": 18943,
"end": 19346
}
|
class ____ extends Parent {
public Child() {
super(Duration.ofMillis(42));
}
}
""")
.doTest();
}
@Test
public void fluentMethodChain() {
refactoringTestHelper
.addInputLines(
"Client.java",
"""
import com.google.errorprone.annotations.InlineMe;
public final
|
Child
|
java
|
quarkusio__quarkus
|
extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/devmode/GrpcHotReplacementSetup.java
|
{
"start": 886,
"end": 1784
}
|
class ____ implements Supplier<Boolean> {
public Boolean get() {
boolean restarted = false;
synchronized (this) {
if (nextUpdate < System.currentTimeMillis() || context.isTest()) {
try {
restarted = context.doScan(true);
if (context.getDeploymentProblem() != null) {
LOGGER.error("Failed to redeploy application on changes", context.getDeploymentProblem());
}
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
nextUpdate = System.currentTimeMillis() + TWO_SECONDS;
}
}
return restarted;
}
}
}
|
RestartHandler
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
|
{
"start": 21405,
"end": 74381
}
|
class ____ extends FSDataOutputStream {
private HttpURLConnection conn;
private int closeStatus;
public HttpFSDataOutputStream(HttpURLConnection conn, OutputStream out, int closeStatus, Statistics stats)
throws IOException {
super(out, stats);
this.conn = conn;
this.closeStatus = closeStatus;
}
@Override
public void close() throws IOException {
try {
super.close();
} finally {
HttpExceptionUtils.validateResponse(conn, closeStatus);
}
}
}
/**
* Converts a <code>FsPermission</code> to a Unix octal representation.
*
* @param p the permission.
*
* @return the Unix string symbolic reprentation.
*/
public static String permissionToString(FsPermission p) {
return Integer.toString((p == null) ? DEFAULT_PERMISSION : p.toShort(), 8);
}
/*
* Common handling for uploading data for create and append operations.
*/
private FSDataOutputStream uploadData(String method, Path f, Map<String, String> params,
int bufferSize, int expectedStatus) throws IOException {
HttpURLConnection conn = getConnection(method, params, f, true);
conn.setInstanceFollowRedirects(false);
boolean exceptionAlreadyHandled = false;
try {
if (conn.getResponseCode() == HTTP_TEMPORARY_REDIRECT) {
exceptionAlreadyHandled = true;
String location = conn.getHeaderField("Location");
if (location != null) {
conn = getConnection(new URL(location), method);
conn.setRequestProperty("Content-Type", UPLOAD_CONTENT_TYPE);
try {
OutputStream os = new BufferedOutputStream(conn.getOutputStream(), bufferSize);
return new HttpFSDataOutputStream(conn, os, expectedStatus, statistics);
} catch (IOException ex) {
HttpExceptionUtils.validateResponse(conn, expectedStatus);
throw ex;
}
} else {
HttpExceptionUtils.validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
throw new IOException("Missing HTTP 'Location' header for [" + conn.getURL() + "]");
}
} else {
throw new IOException(
MessageFormat.format("Expected HTTP status was [307], received [{0}]",
conn.getResponseCode()));
}
} catch (IOException ex) {
if (exceptionAlreadyHandled) {
throw ex;
} else {
HttpExceptionUtils.validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
throw ex;
}
}
}
/**
* Opens an FSDataOutputStream at the indicated Path with write-progress
* reporting.
* <p>
* IMPORTANT: The <code>Progressable</code> parameter is not used.
*
* @param f the file name to open.
* @param permission file permission.
* @param overwrite if a file with this name already exists, then if true,
* the file will be overwritten, and if false an error will be thrown.
* @param bufferSize the size of the buffer to be used.
* @param replication required block replication for the file.
* @param blockSize block size.
* @param progress progressable.
*
* @throws IOException
* @see #setPermission(Path, FsPermission)
*/
@Override
public FSDataOutputStream create(Path f, FsPermission permission,
boolean overwrite, int bufferSize,
short replication, long blockSize,
Progressable progress) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.CREATE.toString());
params.put(OVERWRITE_PARAM, Boolean.toString(overwrite));
params.put(REPLICATION_PARAM, Short.toString(replication));
params.put(BLOCKSIZE_PARAM, Long.toString(blockSize));
params.put(PERMISSION_PARAM, permissionToString(permission));
return uploadData(Operation.CREATE.getMethod(), f, params, bufferSize,
HttpURLConnection.HTTP_CREATED);
}
/**
* Append to an existing file (optional operation).
* <p>
* IMPORTANT: The <code>Progressable</code> parameter is not used.
*
* @param f the existing file to be appended.
* @param bufferSize the size of the buffer to be used.
* @param progress for reporting progress if it is not null.
*
* @throws IOException
*/
@Override
public FSDataOutputStream append(Path f, int bufferSize,
Progressable progress) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.APPEND.toString());
return uploadData(Operation.APPEND.getMethod(), f, params, bufferSize,
HttpURLConnection.HTTP_OK);
}
/**
* Truncate a file.
*
* @param f the file to be truncated.
* @param newLength The size the file is to be truncated to.
*
* @throws IOException
*/
@Override
public boolean truncate(Path f, long newLength) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.TRUNCATE.toString());
params.put(NEW_LENGTH_PARAM, Long.toString(newLength));
HttpURLConnection conn = getConnection(Operation.TRUNCATE.getMethod(),
params, f, true);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(TRUNCATE_JSON);
}
/**
* Concat existing files together.
* @param f the path to the target destination.
* @param psrcs the paths to the sources to use for the concatenation.
*
* @throws IOException
*/
@Override
public void concat(Path f, Path[] psrcs) throws IOException {
List<String> strPaths = new ArrayList<String>(psrcs.length);
for(Path psrc : psrcs) {
strPaths.add(psrc.toUri().getPath());
}
String srcs = StringUtils.join(",", strPaths);
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.CONCAT.toString());
params.put(SOURCES_PARAM, srcs);
HttpURLConnection conn = getConnection(Operation.CONCAT.getMethod(),
params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
* Renames Path src to Path dst. Can take place on local fs
* or remote DFS.
*/
@Override
public boolean rename(Path src, Path dst) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.RENAME.toString());
params.put(DESTINATION_PARAM, dst.toString());
HttpURLConnection conn = getConnection(Operation.RENAME.getMethod(),
params, src, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(RENAME_JSON);
}
/**
* Delete a file.
*
* @deprecated Use delete(Path, boolean) instead
*/
@Deprecated
@Override
public boolean delete(Path f) throws IOException {
return delete(f, false);
}
/**
* Delete a file.
*
* @param f the path to delete.
* @param recursive if path is a directory and set to
* true, the directory is deleted else throws an exception. In
* case of a file the recursive can be set to either true or false.
*
* @return true if delete is successful else false.
*
* @throws IOException
*/
@Override
public boolean delete(Path f, boolean recursive) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.DELETE.toString());
params.put(RECURSIVE_PARAM, Boolean.toString(recursive));
HttpURLConnection conn = getConnection(Operation.DELETE.getMethod(),
params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(DELETE_JSON);
}
private FileStatus[] toFileStatuses(JSONObject json, Path f) {
json = (JSONObject) json.get(FILE_STATUSES_JSON);
JSONArray jsonArray = (JSONArray) json.get(FILE_STATUS_JSON);
FileStatus[] array = new FileStatus[jsonArray.size()];
f = makeQualified(f);
for (int i = 0; i < jsonArray.size(); i++) {
array[i] = createFileStatus(f, (JSONObject) jsonArray.get(i));
}
return array;
}
/**
* Get {@link FileStatus} of files/directories in the given path. If path
* corresponds to a file then {@link FileStatus} of that file is returned.
* Else if path represents a directory then {@link FileStatus} of all
* files/directories inside given path is returned.
*
* @param f given path
* @return the statuses of the files/directories in the given path
*/
@Override
public FileStatus[] listStatus(Path f) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.LISTSTATUS.toString());
HttpURLConnection conn = getConnection(Operation.LISTSTATUS.getMethod(),
params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return toFileStatuses(json, f);
}
/**
* Get {@link DirectoryEntries} of the given path. {@link DirectoryEntries}
* contains an array of {@link FileStatus}, as well as iteration information.
*
* @param f given path
* @return {@link DirectoryEntries} for given path
*/
@Override
public DirectoryEntries listStatusBatch(Path f, byte[] token) throws
FileNotFoundException, IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.LISTSTATUS_BATCH.toString());
if (token != null) {
params.put(START_AFTER_PARAM, new String(token, StandardCharsets.UTF_8));
}
HttpURLConnection conn = getConnection(
Operation.LISTSTATUS_BATCH.getMethod(),
params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
// Parse the FileStatus array
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
JSONObject listing = (JSONObject) json.get(DIRECTORY_LISTING_JSON);
FileStatus[] statuses = toFileStatuses(
(JSONObject) listing.get(PARTIAL_LISTING_JSON), f);
// New token is the last FileStatus entry
byte[] newToken = null;
if (statuses.length > 0) {
newToken = statuses[statuses.length - 1].getPath().getName().toString()
.getBytes(StandardCharsets.UTF_8);
}
// Parse the remainingEntries boolean into hasMore
final long remainingEntries = (Long) listing.get(REMAINING_ENTRIES_JSON);
final boolean hasMore = remainingEntries > 0 ? true : false;
return new DirectoryEntries(statuses, newToken, hasMore);
}
/**
* Set the current working directory for the given file system. All relative
* paths will be resolved relative to it.
*
* @param newDir new directory.
*/
@Override
public void setWorkingDirectory(Path newDir) {
String result = newDir.toUri().getPath();
if (!DFSUtilClient.isValidName(result)) {
throw new IllegalArgumentException(
"Invalid DFS directory name " + result);
}
workingDir = newDir;
}
/**
* Get the current working directory for the given file system
*
* @return the directory pathname
*/
@Override
public Path getWorkingDirectory() {
if (workingDir == null) {
workingDir = getHomeDirectory();
}
return workingDir;
}
/**
* Make the given file and all non-existent parents into
* directories. Has the semantics of Unix 'mkdir -p'.
* Existence of the directory hierarchy is not an error.
*/
@Override
public boolean mkdirs(Path f, FsPermission permission) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.MKDIRS.toString());
params.put(PERMISSION_PARAM, permissionToString(permission));
HttpURLConnection conn = getConnection(Operation.MKDIRS.getMethod(),
params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(MKDIRS_JSON);
}
/**
* Return a file status object that represents the path.
*
* @param f The path we want information from
*
* @return a FileStatus object
*
* @throws FileNotFoundException when the path does not exist;
* IOException see specific implementation
*/
@Override
public FileStatus getFileStatus(Path f) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETFILESTATUS.toString());
HttpURLConnection conn = getConnection(Operation.GETFILESTATUS.getMethod(),
params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
HdfsFileStatus status = JsonUtilClient.toFileStatus(json, true);
return status.makeQualified(getUri(), f);
}
/**
* Return the current user's home directory in this filesystem.
* The default implementation returns "/user/$USER/".
*/
@Override
public Path getHomeDirectory() {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETHOMEDIRECTORY.toString());
try {
HttpURLConnection conn =
getConnection(Operation.GETHOMEDIRECTORY.getMethod(), params,
new Path(getUri().toString(), "/"), false);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return new Path((String) json.get(HOME_DIR_JSON));
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
/**
* Get the root directory of Trash for a path in HDFS.
* 1. File in encryption zone returns /ez1/.Trash/username.
* 2. File not in encryption zone, or encountered exception when checking
* the encryption zone of the path, returns /users/username/.Trash.
* Caller appends either Current or checkpoint timestamp
* for trash destination.
* The default implementation returns "/user/username/.Trash".
* @param fullPath the trash root of the path to be determined.
* @return trash root
*/
@Override
public Path getTrashRoot(Path fullPath) {
Map<String, String> params = new HashMap<>();
params.put(OP_PARAM, Operation.GETTRASHROOT.toString());
try {
HttpURLConnection conn = getConnection(
Operation.GETTRASHROOT.getMethod(), params, fullPath, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return new Path((String) json.get(TRASH_DIR_JSON));
} catch (IOException ex) {
LOG.warn("Cannot find trash root of " + fullPath, ex);
return super.getTrashRoot(fullPath);
}
}
/**
* Set owner of a path (i.e. a file or a directory).
* The parameters username and groupname cannot both be null.
*
* @param p The path
* @param username If it is null, the original username remains unchanged.
* @param groupname If it is null, the original groupname remains unchanged.
*/
@Override
public void setOwner(Path p, String username, String groupname)
throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.SETOWNER.toString());
params.put(OWNER_PARAM, username);
params.put(GROUP_PARAM, groupname);
HttpURLConnection conn = getConnection(Operation.SETOWNER.getMethod(),
params, p, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
* Set permission of a path.
*
* @param p path.
* @param permission permission.
*/
@Override
public void setPermission(Path p, FsPermission permission) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.SETPERMISSION.toString());
params.put(PERMISSION_PARAM, permissionToString(permission));
HttpURLConnection conn = getConnection(Operation.SETPERMISSION.getMethod(), params, p, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
* Set access time of a file
*
* @param p The path
* @param mtime Set the modification time of this file.
* The number of milliseconds since Jan 1, 1970.
* A value of -1 means that this call should not set modification time.
* @param atime Set the access time of this file.
* The number of milliseconds since Jan 1, 1970.
* A value of -1 means that this call should not set access time.
*/
@Override
public void setTimes(Path p, long mtime, long atime) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.SETTIMES.toString());
params.put(MODIFICATION_TIME_PARAM, Long.toString(mtime));
params.put(ACCESS_TIME_PARAM, Long.toString(atime));
HttpURLConnection conn = getConnection(Operation.SETTIMES.getMethod(),
params, p, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
* Set replication for an existing file.
*
* @param src file name
* @param replication new replication
*
* @return true if successful;
* false if file does not exist or is a directory
*
* @throws IOException
*/
@Override
public boolean setReplication(Path src, short replication)
throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.SETREPLICATION.toString());
params.put(REPLICATION_PARAM, Short.toString(replication));
HttpURLConnection conn =
getConnection(Operation.SETREPLICATION.getMethod(), params, src, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(SET_REPLICATION_JSON);
}
/**
* Modify the ACL entries for a file.
*
* @param path Path to modify
* @param aclSpec describing modifications
* @throws IOException
*/
@Override
public void modifyAclEntries(Path path, List<AclEntry> aclSpec)
throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.MODIFYACLENTRIES.toString());
params.put(ACLSPEC_PARAM, AclEntry.aclSpecToString(aclSpec));
HttpURLConnection conn = getConnection(
Operation.MODIFYACLENTRIES.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
* Remove the specified ACL entries from a file
* @param path Path to modify
* @param aclSpec describing entries to remove
* @throws IOException
*/
@Override
public void removeAclEntries(Path path, List<AclEntry> aclSpec)
throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.REMOVEACLENTRIES.toString());
params.put(ACLSPEC_PARAM, AclEntry.aclSpecToString(aclSpec));
HttpURLConnection conn = getConnection(
Operation.REMOVEACLENTRIES.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
* Removes the default ACL for the given file
* @param path Path from which to remove the default ACL.
* @throws IOException
*/
@Override
public void removeDefaultAcl(Path path) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.REMOVEDEFAULTACL.toString());
HttpURLConnection conn = getConnection(
Operation.REMOVEDEFAULTACL.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
* Remove all ACLs from a file
* @param path Path from which to remove all ACLs
* @throws IOException
*/
@Override
public void removeAcl(Path path) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.REMOVEACL.toString());
HttpURLConnection conn = getConnection(Operation.REMOVEACL.getMethod(),
params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
* Set the ACLs for the given file
* @param path Path to modify
* @param aclSpec describing modifications, must include
* entries for user, group, and others for compatibility
* with permission bits.
* @throws IOException
*/
@Override
public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.SETACL.toString());
params.put(ACLSPEC_PARAM, AclEntry.aclSpecToString(aclSpec));
HttpURLConnection conn = getConnection(Operation.SETACL.getMethod(),
params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
* Get the ACL information for a given file
* @param path Path to acquire ACL info for
* @return the ACL information in JSON format
* @throws IOException
*/
@Override
public AclStatus getAclStatus(Path path) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETACLSTATUS.toString());
HttpURLConnection conn = getConnection(Operation.GETACLSTATUS.getMethod(),
params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
json = (JSONObject) json.get(ACL_STATUS_JSON);
return createAclStatus(json);
}
/** Convert a string to a FsPermission object. */
static FsPermission toFsPermission(JSONObject json) {
final String s = (String) json.get(PERMISSION_JSON);
return new FsPermission(Short.parseShort(s, 8));
}
private FileStatus createFileStatus(Path parent, JSONObject json) {
String pathSuffix = (String) json.get(PATH_SUFFIX_JSON);
Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix);
FILE_TYPE type = FILE_TYPE.valueOf((String) json.get(TYPE_JSON));
String symLinkValue =
type == FILE_TYPE.SYMLINK ? (String) json.get(SYMLINK_JSON) : null;
Path symLink = symLinkValue == null ? null : new Path(symLinkValue);
long len = (Long) json.get(LENGTH_JSON);
String owner = (String) json.get(OWNER_JSON);
String group = (String) json.get(GROUP_JSON);
final FsPermission permission = toFsPermission(json);
long aTime = (Long) json.get(ACCESS_TIME_JSON);
long mTime = (Long) json.get(MODIFICATION_TIME_JSON);
long blockSize = (Long) json.get(BLOCK_SIZE_JSON);
short replication = ((Long) json.get(REPLICATION_JSON)).shortValue();
final Boolean aclBit = (Boolean) json.get(ACL_BIT_JSON);
final Boolean encBit = (Boolean) json.get(ENC_BIT_JSON);
final Boolean erasureBit = (Boolean) json.get(EC_BIT_JSON);
final Boolean snapshotEnabledBit = (Boolean) json.get(SNAPSHOT_BIT_JSON);
final boolean aBit = (aclBit != null) ? aclBit : false;
final boolean eBit = (encBit != null) ? encBit : false;
final boolean ecBit = (erasureBit != null) ? erasureBit : false;
final boolean seBit =
(snapshotEnabledBit != null) ? snapshotEnabledBit : false;
if (aBit || eBit || ecBit || seBit) {
// include this for compatibility with 2.x
FsPermissionExtension deprecatedPerm =
new FsPermissionExtension(permission, aBit, eBit, ecBit);
FileStatus fileStatus = new FileStatus(len, FILE_TYPE.DIRECTORY == type,
replication, blockSize, mTime, aTime, deprecatedPerm, owner, group,
symLink, path, FileStatus.attributes(aBit, eBit, ecBit, seBit));
return fileStatus;
} else {
return new FileStatus(len, FILE_TYPE.DIRECTORY == type,
replication, blockSize, mTime, aTime, permission, owner, group,
symLink, path);
}
}
/**
* Convert the given JSON object into an AclStatus
* @param json Input JSON representing the ACLs
* @return Resulting AclStatus
*/
private AclStatus createAclStatus(JSONObject json) {
AclStatus.Builder aclStatusBuilder = new AclStatus.Builder()
.owner((String) json.get(OWNER_JSON))
.group((String) json.get(GROUP_JSON))
.stickyBit((Boolean) json.get(ACL_STICKY_BIT_JSON));
final FsPermission permission = toFsPermission(json);
aclStatusBuilder.setPermission(permission);
JSONArray entries = (JSONArray) json.get(ACL_ENTRIES_JSON);
for ( Object e : entries ) {
aclStatusBuilder.addEntry(AclEntry.parseAclEntry(e.toString(), true));
}
return aclStatusBuilder.build();
}
@Override
public ContentSummary getContentSummary(Path f) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETCONTENTSUMMARY.toString());
HttpURLConnection conn =
getConnection(Operation.GETCONTENTSUMMARY.getMethod(), params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) ((JSONObject)
HttpFSUtils.jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
ContentSummary.Builder builder = new ContentSummary.Builder()
.length((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON))
.fileCount((Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON))
.directoryCount((Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON))
.erasureCodingPolicy((String) json.get(CONTENT_SUMMARY_ECPOLICY_JSON));
builder = buildQuotaUsage(builder, json, ContentSummary.Builder.class);
return builder.build();
}
@Override
public QuotaUsage getQuotaUsage(Path f) throws IOException {
Map<String, String> params = new HashMap<>();
params.put(OP_PARAM, Operation.GETQUOTAUSAGE.toString());
HttpURLConnection conn =
getConnection(Operation.GETQUOTAUSAGE.getMethod(), params, f, true);
JSONObject json = (JSONObject) ((JSONObject)
HttpFSUtils.jsonParse(conn)).get(QUOTA_USAGE_JSON);
QuotaUsage.Builder builder = new QuotaUsage.Builder();
builder = buildQuotaUsage(builder, json, QuotaUsage.Builder.class);
return builder.build();
}
/**
* Given a builder for QuotaUsage, parse the provided JSON object and
* construct the relevant fields. Return the updated builder.
*/
private static <T extends QuotaUsage.Builder> T buildQuotaUsage(
T builder, JSONObject json, Class<T> type) {
long quota = (Long) json.get(QUOTA_USAGE_QUOTA_JSON);
long spaceConsumed = (Long) json.get(QUOTA_USAGE_SPACE_CONSUMED_JSON);
long spaceQuota = (Long) json.get(QUOTA_USAGE_SPACE_QUOTA_JSON);
JSONObject typeJson = (JSONObject) json.get(QUOTA_USAGE_TYPE_QUOTA_JSON);
builder = type.cast(builder
.quota(quota)
.spaceConsumed(spaceConsumed)
.spaceQuota(spaceQuota)
);
// ContentSummary doesn't set this so check before using it
if (json.get(QUOTA_USAGE_FILE_AND_DIRECTORY_COUNT_JSON) != null) {
long fileAndDirectoryCount = (Long)
json.get(QUOTA_USAGE_FILE_AND_DIRECTORY_COUNT_JSON);
builder = type.cast(builder.fileAndDirectoryCount(fileAndDirectoryCount));
}
if (typeJson != null) {
for (StorageType t : StorageType.getTypesSupportingQuota()) {
JSONObject typeQuota = (JSONObject) typeJson.get(t.toString());
if (typeQuota != null) {
builder = type.cast(builder
.typeQuota(t, ((Long) typeQuota.get(QUOTA_USAGE_QUOTA_JSON)))
.typeConsumed(t, ((Long) typeQuota.get(QUOTA_USAGE_CONSUMED_JSON))
));
}
}
}
return builder;
}
@Override
public FileChecksum getFileChecksum(Path f) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString());
HttpURLConnection conn =
getConnection(Operation.GETFILECHECKSUM.getMethod(), params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
final JSONObject json = (JSONObject) ((JSONObject)
HttpFSUtils.jsonParse(conn)).get(FILE_CHECKSUM_JSON);
return new FileChecksum() {
@Override
public String getAlgorithmName() {
return (String) json.get(CHECKSUM_ALGORITHM_JSON);
}
@Override
public int getLength() {
return ((Long) json.get(CHECKSUM_LENGTH_JSON)).intValue();
}
@Override
public byte[] getBytes() {
return StringUtils.hexStringToByte((String) json.get(CHECKSUM_BYTES_JSON));
}
@Override
public void write(DataOutput out) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void readFields(DataInput in) throws IOException {
throw new UnsupportedOperationException();
}
};
}
@Override
public Token<?> getDelegationToken(final String renewer)
throws IOException {
try {
return UserGroupInformation.getCurrentUser().doAs(
new PrivilegedExceptionAction<Token<?>>() {
@Override
public Token<?> run() throws Exception {
return authURL.getDelegationToken(uri.toURL(), authToken,
renewer);
}
}
);
} catch (Exception ex) {
if (ex instanceof IOException) {
throw (IOException) ex;
} else {
throw new IOException(ex);
}
}
}
public long renewDelegationToken(final Token<?> token) throws IOException {
try {
return UserGroupInformation.getCurrentUser().doAs(
new PrivilegedExceptionAction<Long>() {
@Override
public Long run() throws Exception {
return authURL.renewDelegationToken(uri.toURL(), authToken);
}
}
);
} catch (Exception ex) {
if (ex instanceof IOException) {
throw (IOException) ex;
} else {
throw new IOException(ex);
}
}
}
public void cancelDelegationToken(final Token<?> token) throws IOException {
authURL.cancelDelegationToken(uri.toURL(), authToken);
}
@Override
public Token<?> getRenewToken() {
return null; //TODO : for renewer
}
@Override
@SuppressWarnings("unchecked")
public <T extends TokenIdentifier> void setDelegationToken(Token<T> token) {
//TODO : for renewer
}
@Override
public void setXAttr(Path f, String name, byte[] value,
EnumSet<XAttrSetFlag> flag) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.SETXATTR.toString());
params.put(XATTR_NAME_PARAM, name);
if (value != null) {
params.put(XATTR_VALUE_PARAM,
XAttrCodec.encodeValue(value, XAttrCodec.HEX));
}
params.put(XATTR_SET_FLAG_PARAM, EnumSetParam.toString(flag));
HttpURLConnection conn = getConnection(Operation.SETXATTR.getMethod(),
params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
@Override
public byte[] getXAttr(Path f, String name) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETXATTRS.toString());
params.put(XATTR_NAME_PARAM, name);
HttpURLConnection conn = getConnection(Operation.GETXATTRS.getMethod(),
params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
Map<String, byte[]> xAttrs = createXAttrMap(
(JSONArray) json.get(XATTRS_JSON));
return xAttrs.get(name);
}
/** Convert xAttrs json to xAttrs map */
private Map<String, byte[]> createXAttrMap(JSONArray jsonArray)
throws IOException {
Map<String, byte[]> xAttrs = Maps.newHashMap();
for (Object obj : jsonArray) {
JSONObject jsonObj = (JSONObject) obj;
final String name = (String)jsonObj.get(XATTR_NAME_JSON);
final byte[] value = XAttrCodec.decodeValue(
(String)jsonObj.get(XATTR_VALUE_JSON));
xAttrs.put(name, value);
}
return xAttrs;
}
/** Convert xAttr names json to names list */
private List<String> createXAttrNames(String xattrNamesStr) throws IOException {
JSONParser parser = new JSONParser();
JSONArray jsonArray;
try {
jsonArray = (JSONArray)parser.parse(xattrNamesStr);
List<String> names = Lists.newArrayListWithCapacity(jsonArray.size());
for (Object name : jsonArray) {
names.add((String) name);
}
return names;
} catch (ParseException e) {
throw new IOException("JSON parser error, " + e.getMessage(), e);
}
}
@Override
public Map<String, byte[]> getXAttrs(Path f) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETXATTRS.toString());
HttpURLConnection conn = getConnection(Operation.GETXATTRS.getMethod(),
params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return createXAttrMap((JSONArray) json.get(XATTRS_JSON));
}
@Override
public Map<String, byte[]> getXAttrs(Path f, List<String> names)
throws IOException {
Preconditions.checkArgument(names != null && !names.isEmpty(),
"XAttr names cannot be null or empty.");
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETXATTRS.toString());
Map<String, List<String>> multiValuedParams = Maps.newHashMap();
multiValuedParams.put(XATTR_NAME_PARAM, names);
HttpURLConnection conn = getConnection(Operation.GETXATTRS.getMethod(),
params, multiValuedParams, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return createXAttrMap((JSONArray) json.get(XATTRS_JSON));
}
@Override
public List<String> listXAttrs(Path f) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.LISTXATTRS.toString());
HttpURLConnection conn = getConnection(Operation.LISTXATTRS.getMethod(),
params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return createXAttrNames((String) json.get(XATTRNAMES_JSON));
}
@Override
public void removeXAttr(Path f, String name) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.REMOVEXATTR.toString());
params.put(XATTR_NAME_PARAM, name);
HttpURLConnection conn = getConnection(Operation.REMOVEXATTR.getMethod(),
params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
@Override
public Collection<BlockStoragePolicy> getAllStoragePolicies()
throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETALLSTORAGEPOLICY.toString());
HttpURLConnection conn = getConnection(
Operation.GETALLSTORAGEPOLICY.getMethod(), params, new Path(getUri()
.toString(), "/"), false);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return createStoragePolicies((JSONObject) json.get(STORAGE_POLICIES_JSON));
}
private Collection<BlockStoragePolicy> createStoragePolicies(JSONObject map)
throws IOException {
JSONArray jsonArray = (JSONArray) map.get(STORAGE_POLICY_JSON);
BlockStoragePolicy[] policies = new BlockStoragePolicy[jsonArray.size()];
for (int i = 0; i < jsonArray.size(); i++) {
policies[i] = createStoragePolicy((JSONObject) jsonArray.get(i));
}
return Arrays.asList(policies);
}
@Override
public BlockStoragePolicy getStoragePolicy(Path src) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETSTORAGEPOLICY.toString());
HttpURLConnection conn = getConnection(
Operation.GETSTORAGEPOLICY.getMethod(), params, src, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return createStoragePolicy((JSONObject) json.get(STORAGE_POLICY_JSON));
}
private BlockStoragePolicy createStoragePolicy(JSONObject policyJson)
throws IOException {
byte id = ((Number) policyJson.get("id")).byteValue();
String name = (String) policyJson.get("name");
StorageType[] storageTypes = toStorageTypes((JSONArray) policyJson
.get("storageTypes"));
StorageType[] creationFallbacks = toStorageTypes((JSONArray) policyJson
.get("creationFallbacks"));
StorageType[] replicationFallbacks = toStorageTypes((JSONArray) policyJson
.get("replicationFallbacks"));
Boolean copyOnCreateFile = (Boolean) policyJson.get("copyOnCreateFile");
return new BlockStoragePolicy(id, name, storageTypes, creationFallbacks,
replicationFallbacks, copyOnCreateFile.booleanValue());
}
private StorageType[] toStorageTypes(JSONArray array) throws IOException {
if (array == null) {
return null;
} else {
List<StorageType> storageTypes = new ArrayList<StorageType>(array.size());
for (Object name : array) {
storageTypes.add(StorageType.parseStorageType((String) name));
}
return storageTypes.toArray(new StorageType[storageTypes.size()]);
}
}
@Override
public void setStoragePolicy(Path src, String policyName) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.SETSTORAGEPOLICY.toString());
params.put(POLICY_NAME_PARAM, policyName);
HttpURLConnection conn = getConnection(
Operation.SETSTORAGEPOLICY.getMethod(), params, src, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
@Override
public void unsetStoragePolicy(Path src) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.UNSETSTORAGEPOLICY.toString());
HttpURLConnection conn = getConnection(
Operation.UNSETSTORAGEPOLICY.getMethod(), params, src, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
public void allowSnapshot(Path path) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.ALLOWSNAPSHOT.toString());
HttpURLConnection conn = getConnection(
Operation.ALLOWSNAPSHOT.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
public void disallowSnapshot(Path path) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.DISALLOWSNAPSHOT.toString());
HttpURLConnection conn = getConnection(
Operation.DISALLOWSNAPSHOT.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
@Override
public final Path createSnapshot(Path path, String snapshotName)
throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.CREATESNAPSHOT.toString());
if (snapshotName != null) {
params.put(SNAPSHOT_NAME_PARAM, snapshotName);
}
HttpURLConnection conn = getConnection(Operation.CREATESNAPSHOT.getMethod(),
params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return new Path((String) json.get(SNAPSHOT_JSON));
}
@Override
public void renameSnapshot(Path path, String snapshotOldName,
String snapshotNewName) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.RENAMESNAPSHOT.toString());
params.put(SNAPSHOT_NAME_PARAM, snapshotNewName);
params.put(OLD_SNAPSHOT_NAME_PARAM, snapshotOldName);
HttpURLConnection conn = getConnection(Operation.RENAMESNAPSHOT.getMethod(),
params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
@Override
public void deleteSnapshot(Path path, String snapshotName)
throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.DELETESNAPSHOT.toString());
params.put(SNAPSHOT_NAME_PARAM, snapshotName);
HttpURLConnection conn = getConnection(Operation.DELETESNAPSHOT.getMethod(),
params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
public SnapshotDiffReport getSnapshotDiffReport(Path path,
String snapshotOldName, String snapshotNewName) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETSNAPSHOTDIFF.toString());
params.put(SNAPSHOT_NAME_PARAM, snapshotNewName);
params.put(OLD_SNAPSHOT_NAME_PARAM, snapshotOldName);
HttpURLConnection conn = getConnection(
Operation.GETSNAPSHOTDIFF.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return JsonUtilClient.toSnapshotDiffReport(json);
}
public SnapshotDiffReportListing getSnapshotDiffReportListing(Path path, String snapshotOldName,
String snapshotNewName, byte[] snapshotDiffStartPath, Integer snapshotDiffIndex)
throws IOException {
Map<String, String> params = new HashMap<>();
params.put(OP_PARAM, Operation.GETSNAPSHOTDIFFLISTING.toString());
params.put(SNAPSHOT_NAME_PARAM, snapshotNewName);
params.put(OLD_SNAPSHOT_NAME_PARAM, snapshotOldName);
params.put(SNAPSHOT_DIFF_START_PATH, DFSUtilClient.bytes2String(snapshotDiffStartPath));
params.put(SNAPSHOT_DIFF_INDEX, snapshotDiffIndex.toString());
HttpURLConnection conn = getConnection(
Operation.GETSNAPSHOTDIFFLISTING.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return JsonUtilClient.toSnapshotDiffReportListing(json);
}
public SnapshottableDirectoryStatus[] getSnapshottableDirectoryList()
throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETSNAPSHOTTABLEDIRECTORYLIST.toString());
HttpURLConnection conn = getConnection(
Operation.GETSNAPSHOTTABLEDIRECTORYLIST.getMethod(),
params, new Path(getUri().toString(), "/"), true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return JsonUtilClient.toSnapshottableDirectoryList(json);
}
public SnapshotStatus[] getSnapshotListing(Path snapshotRoot)
throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETSNAPSHOTLIST.toString());
HttpURLConnection conn = getConnection(
Operation.GETSNAPSHOTLIST.getMethod(),
params, snapshotRoot, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return JsonUtilClient.toSnapshotList(json);
}
/**
* This filesystem's capabilities must be in sync with that of
* {@code DistributedFileSystem.hasPathCapability()} except
* where the feature is not exposed (e.g. symlinks).
* {@inheritDoc}
*/
@Override
public boolean hasPathCapability(final Path path, final String capability)
throws IOException {
// query the superclass, which triggers argument validation.
final Path p = makeQualified(path);
switch (validatePathCapabilityArgs(p, capability)) {
case CommonPathCapabilities.FS_ACLS:
case CommonPathCapabilities.FS_APPEND:
case CommonPathCapabilities.FS_CONCAT:
case CommonPathCapabilities.FS_PERMISSIONS:
case CommonPathCapabilities.FS_SNAPSHOTS:
case CommonPathCapabilities.FS_STORAGEPOLICY:
case CommonPathCapabilities.FS_XATTRS:
case CommonPathCapabilities.FS_TRUNCATE:
return true;
case CommonPathCapabilities.FS_SYMLINKS:
return false;
default:
return super.hasPathCapability(p, capability);
}
}
@Override
public FsServerDefaults getServerDefaults() throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETSERVERDEFAULTS.toString());
HttpURLConnection conn =
getConnection(Operation.GETSERVERDEFAULTS.getMethod(), params,
new Path(getUri().toString(), "/"), true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return JsonUtilClient.toFsServerDefaults(json);
}
@Override
public FsServerDefaults getServerDefaults(Path p) throws IOException {
return getServerDefaults();
}
@Override
public void access(final Path path, final FsAction mode) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.CHECKACCESS.toString());
params.put(FSACTION_MODE_PARAM, mode.SYMBOL);
HttpURLConnection conn =
getConnection(Operation.CHECKACCESS.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
public void setErasureCodingPolicy(final Path path, String policyName)
throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.SETECPOLICY.toString());
params.put(EC_POLICY_NAME_PARAM, policyName);
HttpURLConnection conn =
getConnection(Operation.SETECPOLICY.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
public ErasureCodingPolicy getErasureCodingPolicy(final Path path)
throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETECPOLICY.toString());
HttpURLConnection conn =
getConnection(Operation.GETECPOLICY.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return JsonUtilClient.toECPolicy(json);
}
public void unsetErasureCodingPolicy(final Path path) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.UNSETECPOLICY.toString());
HttpURLConnection conn =
getConnection(Operation.UNSETECPOLICY.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
@Override
public void satisfyStoragePolicy(final Path path) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.SATISFYSTORAGEPOLICY.toString());
HttpURLConnection conn = getConnection(
Operation.SATISFYSTORAGEPOLICY.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
@Override
public BlockLocation[] getFileBlockLocations(Path path, long start, long len)
throws IOException {
Map<String, String> params = new HashMap<>();
params.put(OP_PARAM, Operation.GETFILEBLOCKLOCATIONS.toString());
params.put(OFFSET_PARAM, Long.toString(start));
params.put(LENGTH_PARAM, Long.toString(len));
HttpURLConnection conn = getConnection(
Operation.GETFILEBLOCKLOCATIONS.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return toBlockLocations(json);
}
@Override
public BlockLocation[] getFileBlockLocations(final FileStatus status,
final long offset, final long length) throws IOException {
if (status == null) {
return null;
}
return getFileBlockLocations(status.getPath(), offset, length);
}
@Override
public FileStatus getFileLinkStatus(final Path path) throws IOException {
Map<String, String> params = new HashMap<>();
params.put(OP_PARAM, Operation.GETFILELINKSTATUS.toString());
HttpURLConnection conn =
getConnection(Operation.GETFILELINKSTATUS.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
HdfsFileStatus status = JsonUtilClient.toFileStatus(json, true);
return status.makeQualified(getUri(), path);
}
@Override
public FsStatus getStatus(final Path path) throws IOException {
Map<String, String> params = new HashMap<>();
params.put(OP_PARAM, Operation.GETSTATUS.toString());
HttpURLConnection conn =
getConnection(Operation.GETSTATUS.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return JsonUtilClient.toFsStatus(json);
}
public Collection<ErasureCodingPolicyInfo> getAllErasureCodingPolicies() throws IOException {
Map<String, String> params = new HashMap<>();
params.put(OP_PARAM, Operation.GETECPOLICIES.toString());
Path path = new Path(getUri().toString(), "/");
HttpURLConnection conn =
getConnection(Operation.GETECPOLICIES.getMethod(), params, path, false);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return JsonUtilClient.getAllErasureCodingPolicies(json);
}
public Map<String, String> getAllErasureCodingCodecs() throws IOException {
Map<String, String> params = new HashMap<>();
params.put(OP_PARAM, Operation.GETECCODECS.toString());
Path path = new Path(getUri().toString(), "/");
HttpURLConnection conn =
getConnection(Operation.GETECCODECS.getMethod(), params, path, false);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return JsonUtilClient.getErasureCodeCodecs(json);
}
public Collection<FileStatus> getTrashRoots(boolean allUsers) {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETTRASHROOTS.toString());
params.put(ALLUSERS_PARAM, Boolean.toString(allUsers));
Path path = new Path(getUri().toString(), "/");
try {
HttpURLConnection conn = getConnection(Operation.GETTRASHROOTS.getMethod(),
params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return JsonUtilClient.getTrashRoots(json);
} catch (IOException e) {
return super.getTrashRoots(allUsers);
}
}
@VisibleForTesting
static BlockLocation[] toBlockLocations(JSONObject json) throws IOException {
ObjectMapper mapper = new ObjectMapper();
MapType subType = mapper.getTypeFactory().constructMapType(Map.class,
String.class, BlockLocation[].class);
MapType rootType = mapper.getTypeFactory().constructMapType(Map.class,
mapper.constructType(String.class), mapper.constructType(subType));
Map<String, Map<String, BlockLocation[]>> jsonMap =
mapper.readValue(json.toJSONString(), rootType);
Map<String, BlockLocation[]> locationMap = jsonMap.get(BLOCK_LOCATIONS_JSON);
return locationMap.get(BlockLocation.class.getSimpleName());
}
}
|
HttpFSDataOutputStream
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/collectionincompatibletype/ContainmentMatchers.java
|
{
"start": 4599,
"end": 5110
}
|
class ____ defines the method
0, // index of the owning class's type argument to extract
0, // index of the method argument whose type argument to extract
"java.util.Collection", // type of the method argument
0, // index of the method argument's type argument to extract
"removeAll", // method name
"java.util.Collection"), // method parameter
new TypeArgOfMethodArgMatcher(
"java.util.Collection", //
|
that
|
java
|
apache__flink
|
flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/api/java/typeutils/runtime/WritableSerializerUUIDTest.java
|
{
"start": 1085,
"end": 1717
}
|
class ____ extends SerializerTestBase<WritableID> {
@Override
protected TypeSerializer<WritableID> createSerializer() {
return new WritableSerializer<>(WritableID.class);
}
@Override
protected int getLength() {
return -1;
}
@Override
protected Class<WritableID> getTypeClass() {
return WritableID.class;
}
@Override
protected WritableID[] getTestData() {
return new WritableID[] {
new WritableID(new UUID(0, 0)),
new WritableID(new UUID(1, 0)),
new WritableID(new UUID(1, 1))
};
}
}
|
WritableSerializerUUIDTest
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AbfsTestUtils.java
|
{
"start": 2097,
"end": 5431
}
|
class ____ extends AbstractAbfsIntegrationTest {
private static final Logger LOG =
LoggerFactory.getLogger(AbfsTestUtils.class);
private static final int TOTAL_THREADS_IN_POOL = 5;
public AbfsTestUtils() throws Exception {
super();
}
/**
* If unit tests were interrupted and crushed accidentally, the test containers won't be deleted.
* In that case, dev can use this tool to list and delete all test containers.
* By default, all test container used in E2E tests sharing same prefix: "abfs-testcontainer-"
*/
public void checkContainers() throws Throwable {
assumeThat(this.getAuthType()).isEqualTo(AuthType.SharedKey);
int count = 0;
CloudStorageAccount storageAccount = AzureBlobStorageTestAccount.createTestAccount();
CloudBlobClient blobClient = storageAccount.createCloudBlobClient();
Iterable<CloudBlobContainer> containers
= blobClient.listContainers(TEST_CONTAINER_PREFIX);
for (CloudBlobContainer container : containers) {
count++;
LOG.info("Container {}, URI {}",
container.getName(),
container.getUri());
}
LOG.info("Found {} test containers", count);
}
public void deleteContainers() throws Throwable {
assumeThat(this.getAuthType()).isEqualTo(AuthType.SharedKey);
int count = 0;
CloudStorageAccount storageAccount = AzureBlobStorageTestAccount.createTestAccount();
CloudBlobClient blobClient = storageAccount.createCloudBlobClient();
Iterable<CloudBlobContainer> containers
= blobClient.listContainers(TEST_CONTAINER_PREFIX);
for (CloudBlobContainer container : containers) {
LOG.info("Container {} URI {}",
container.getName(),
container.getUri());
if (container.deleteIfExists()) {
count++;
}
}
LOG.info("Deleted {} test containers", count);
}
/**
* Turn off FS Caching: use if a filesystem with different options from
* the default is required.
* @param conf configuration to patch
*/
public static void disableFilesystemCaching(Configuration conf) {
// Disabling cache to make sure new configs are picked up.
conf.setBoolean(String.format("fs.%s.impl.disable.cache", ABFS_SCHEME), true);
conf.setBoolean(String.format("fs.%s.impl.disable.cache", ABFS_SECURE_SCHEME), true);
}
/**
* Helper method to create files in the given directory.
*
* @param fs The AzureBlobFileSystem instance to use for file creation.
* @param path The source path (directory).
* @param numFiles The number of files to create.
* @throws ExecutionException, InterruptedException If an error occurs during file creation.
*/
public static void createFiles(AzureBlobFileSystem fs, Path path, int numFiles)
throws ExecutionException, InterruptedException {
ExecutorService executorService =
Executors.newFixedThreadPool(TOTAL_THREADS_IN_POOL);
List<Future> futures = new ArrayList<>();
for (int i = 0; i < numFiles; i++) {
final int iter = i;
Future future = executorService.submit(() ->
fs.create(new Path(path, FILE + iter + ".txt")));
futures.add(future);
}
for (Future future : futures) {
future.get();
}
executorService.shutdown();
}
}
|
AbfsTestUtils
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/main/java/org/springframework/web/reactive/result/view/freemarker/FreeMarkerConfigurer.java
|
{
"start": 2607,
"end": 4314
}
|
class ____ extends FreeMarkerConfigurationFactory
implements FreeMarkerConfig, InitializingBean, ResourceLoaderAware {
private @Nullable Configuration configuration;
public FreeMarkerConfigurer() {
setDefaultCharset(StandardCharsets.UTF_8);
}
/**
* Set a preconfigured {@link Configuration} to use for the FreeMarker web
* config — for example, a shared one for web and email usage.
* <p>If this is not set, FreeMarkerConfigurationFactory's properties (inherited
* by this class) have to be specified.
*/
public void setConfiguration(Configuration configuration) {
this.configuration = configuration;
}
/**
* Initialize FreeMarkerConfigurationFactory's {@link Configuration}
* if not overridden by a pre-configured FreeMarker {@link Configuration}.
* <p>Indirectly sets up a {@link ClassTemplateLoader} to use for loading
* Spring macros.
* @see #createConfiguration
* @see #setConfiguration
*/
@Override
public void afterPropertiesSet() throws IOException, TemplateException {
if (this.configuration == null) {
this.configuration = createConfiguration();
}
}
/**
* This implementation registers an additional {@link ClassTemplateLoader}
* for the Spring-provided macros, added to the end of the list.
*/
@Override
protected void postProcessTemplateLoaders(List<TemplateLoader> templateLoaders) {
templateLoaders.add(new ClassTemplateLoader(FreeMarkerConfigurer.class, ""));
}
/**
* Return the {@link Configuration} object wrapped by this bean.
*/
@Override
public Configuration getConfiguration() {
Assert.state(this.configuration != null, "No Configuration available");
return this.configuration;
}
}
|
FreeMarkerConfigurer
|
java
|
spring-projects__spring-boot
|
module/spring-boot-opentelemetry/src/main/java/org/springframework/boot/opentelemetry/testcontainers/GrafanaOtlpLoggingContainerConnectionDetailsFactory.java
|
{
"start": 1483,
"end": 1956
}
|
class ____
extends ContainerConnectionDetailsFactory<LgtmStackContainer, OtlpLoggingConnectionDetails> {
GrafanaOtlpLoggingContainerConnectionDetailsFactory() {
super(ANY_CONNECTION_NAME);
}
@Override
protected OtlpLoggingConnectionDetails getContainerConnectionDetails(
ContainerConnectionSource<LgtmStackContainer> source) {
return new OtlpLoggingContainerConnectionDetails(source);
}
private static final
|
GrafanaOtlpLoggingContainerConnectionDetailsFactory
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/state/operator/restore/unkeyed/NonKeyedJob.java
|
{
"start": 5067,
"end": 6748
}
|
class ____ extends RichMapFunction<Integer, Integer>
implements ListCheckpointed<String> {
private static final long serialVersionUID = 6092985758425330235L;
private final ExecutionMode mode;
private final String valueToStore;
private StatefulStringStoringMap(ExecutionMode mode, String valueToStore) {
this.mode = mode;
this.valueToStore = valueToStore;
}
@Override
public Integer map(Integer value) throws Exception {
return value;
}
@Override
public List<String> snapshotState(long checkpointId, long timestamp) throws Exception {
return Arrays.asList(
valueToStore + getRuntimeContext().getTaskInfo().getIndexOfThisSubtask());
}
@Override
public void restoreState(List<String> state) throws Exception {
switch (mode) {
case GENERATE:
break;
case MIGRATE:
case RESTORE:
Assert.assertEquals(
"Failed for "
+ valueToStore
+ getRuntimeContext().getTaskInfo().getIndexOfThisSubtask(),
1,
state.size());
String value = state.get(0);
Assert.assertEquals(
valueToStore
+ getRuntimeContext().getTaskInfo().getIndexOfThisSubtask(),
value);
}
}
}
private static
|
StatefulStringStoringMap
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/ListAssertBaseTest.java
|
{
"start": 929,
"end": 1538
}
|
class ____ extends BaseTestTemplate<ListAssert<String>, List<? extends String>> {
protected Lists lists;
@Override
protected ListAssert<String> create_assertions() {
return new ListAssert<>(Collections.emptyList());
}
@Override
protected void inject_internal_objects() {
super.inject_internal_objects();
lists = mock(Lists.class);
assertions.lists = lists;
}
protected Lists getLists(ListAssert<String> assertions) {
return assertions.lists;
}
protected Iterables getIterables(ListAssert<String> assertions) {
return assertions.iterables;
}
}
|
ListAssertBaseTest
|
java
|
quarkusio__quarkus
|
integration-tests/maven/src/test/java/io/quarkus/maven/it/LaunchMojoTestBase.java
|
{
"start": 673,
"end": 3776
}
|
class ____ extends RunAndCheckMojoTestBase {
protected abstract ContinuousTestingMavenTestUtils getTestingTestUtils();
@Test
public void testThatTheTestsAreReRunMultiModule()
throws MavenInvocationException, IOException {
//we also check continuous testing
testDir = initProject("projects/multimodule", "projects/multimodule-with-deps");
runAndCheck();
ContinuousTestingMavenTestUtils testingTestUtils = getTestingTestUtils();
ContinuousTestingMavenTestUtils.TestStatus results = testingTestUtils.waitForNextCompletion();
//check that the tests in both modules run
assertEquals(2, results.getTestsPassed(),
"Did not meet expectation for number of tests passed, actual results " + results);
// Edit the "Hello" message.
File source = new File(testDir, "rest/src/main/java/org/acme/HelloResource.java");
final String uuid = UUID.randomUUID().toString();
filter(source, Collections.singletonMap("return \"hello\";", "return \"" + uuid + "\";"));
// Wait until we get "uuid"
// We can't poll, so just pause
try {
Thread.sleep(2 * 1000);
} catch (InterruptedException e) {
fail(e);
}
await()
.pollDelay(100, TimeUnit.MILLISECONDS)
.pollInterval(1, TimeUnit.SECONDS)
.until(source::isFile);
results = testingTestUtils.waitForNextCompletion();
//make sure the test is failing now
assertEquals(1, results.getTestsFailed());
//now modify the passing test
var testSource = new File(testDir, "rest/src/test/java/org/acme/test/SimpleTest.java");
filter(testSource, Collections.singletonMap("Assertions.assertTrue(true);", "Assertions.assertTrue(false);"));
results = testingTestUtils.waitForNextCompletion();
assertEquals(2, results.getTotalTestsFailed(),
"Did not meet expectation for number of tests failed, actual results " + results);
//fix it again
filter(testSource, Collections.singletonMap("Assertions.assertTrue(false);", "Assertions.assertTrue(true);"));
results = testingTestUtils.waitForNextCompletion();
assertEquals(1, results.getTotalTestsFailed(), "Failed, actual results " + results);
assertEquals(1, results.getTotalTestsPassed(), "Failed, actual results " + results);
}
@Test
public void testSelection() throws MavenInvocationException, IOException {
testDir = initProject("projects/test-selection");
run(true, "-Dtest=Ba*ic,Enabled?Test,NotEnabled*#executeAnyway*,!NotEnabledHardDisabled,#alwaysExecute,!#neverExecute");
if (getDefaultLaunchMode() == LaunchMode.DEVELOPMENT) {
// ignore outcome, just wait for the application to start
devModeClient.getHttpResponse();
}
ContinuousTestingMavenTestUtils.TestStatus tests = getTestingTestUtils().waitForNextCompletion();
assertEquals(7, tests.getTestsPassed());
}
}
|
LaunchMojoTestBase
|
java
|
apache__camel
|
components/camel-smooks/src/generated/java/org/apache/camel/component/smooks/converter/SourceConverterLoader.java
|
{
"start": 892,
"end": 4418
}
|
class ____ implements TypeConverterLoader, CamelContextAware {
private CamelContext camelContext;
public SourceConverterLoader() {
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void load(TypeConverterRegistry registry) throws TypeConverterLoaderException {
registerConverters(registry);
}
private void registerConverters(TypeConverterRegistry registry) {
addTypeConverter(registry, org.smooks.api.io.Source.class, org.apache.camel.WrappedFile.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.smooks.converter.SourceConverter.toURISource((org.apache.camel.WrappedFile) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, org.smooks.io.source.JavaSource.class, java.lang.Object.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.smooks.converter.SourceConverter.toJavaSource(value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, org.smooks.io.source.JavaSource.class, org.smooks.io.sink.JavaSink.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.smooks.converter.SourceConverter.toJavaSource((org.smooks.io.sink.JavaSink) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, org.smooks.io.source.JavaSourceWithoutEventStream.class, java.lang.Object.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.smooks.converter.SourceConverter.toJavaSourceWithoutEventStream(value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, org.smooks.io.source.StreamSource.class, java.io.InputStream.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.smooks.converter.SourceConverter.toStreamSource((java.io.InputStream) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, org.smooks.io.source.StringSource.class, java.lang.String.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.smooks.converter.SourceConverter.toStringSource((java.lang.String) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
}
private static void addTypeConverter(TypeConverterRegistry registry, Class<?> toType, Class<?> fromType, boolean allowNull, SimpleTypeConverter.ConversionMethod method) {
registry.addTypeConverter(toType, fromType, new SimpleTypeConverter(allowNull, method));
}
}
|
SourceConverterLoader
|
java
|
apache__avro
|
lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroKey.java
|
{
"start": 918,
"end": 1138
}
|
class ____<T> extends AvroWrapper<T> {
/** Wrap null. Construct {@link AvroKey} wrapping no key. */
public AvroKey() {
this(null);
}
/** Wrap a key. */
public AvroKey(T datum) {
super(datum);
}
}
|
AvroKey
|
java
|
apache__spark
|
sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedDeltaByteArrayReader.java
|
{
"start": 1521,
"end": 5822
}
|
class ____ extends VectorizedReaderBase
implements VectorizedValuesReader, RequiresPreviousReader {
private final VectorizedDeltaBinaryPackedReader prefixLengthReader;
private final VectorizedDeltaLengthByteArrayReader suffixReader;
private WritableColumnVector prefixLengthVector;
private ByteBuffer previous;
private int currentRow = 0;
// Temporary variable used by readBinary
private final WritableColumnVector binaryValVector;
// Temporary variable used by skipBinary
private final WritableColumnVector tempBinaryValVector;
VectorizedDeltaByteArrayReader() {
this.prefixLengthReader = new VectorizedDeltaBinaryPackedReader();
this.suffixReader = new VectorizedDeltaLengthByteArrayReader();
binaryValVector = new OnHeapColumnVector(1, BinaryType);
tempBinaryValVector = new OnHeapColumnVector(1, BinaryType);
}
@Override
public void initFromPage(int valueCount, ByteBufferInputStream in) throws IOException {
prefixLengthVector = new OnHeapColumnVector(valueCount, IntegerType);
prefixLengthReader.initFromPage(valueCount, in);
prefixLengthReader.readIntegers(prefixLengthReader.getTotalValueCount(),
prefixLengthVector, 0);
suffixReader.initFromPage(valueCount, in);
}
@Override
public Binary readBinary(int len) {
readValues(1, binaryValVector, 0);
return Binary.fromConstantByteArray(binaryValVector.getBinary(0));
}
private void readValues(int total, WritableColumnVector c, int rowId) {
for (int i = 0; i < total; i++) {
// NOTE: due to PARQUET-246, it is important that we
// respect prefixLength which was read from prefixLengthReader,
// even for the *first* value of a page. Even though the first
// value of the page should have an empty prefix, it may not
// because of PARQUET-246.
int prefixLength = prefixLengthVector.getInt(currentRow);
ByteBuffer suffix = suffixReader.getBytes(currentRow);
byte[] suffixArray = suffix.array();
int suffixLength = suffix.limit() - suffix.position();
int length = prefixLength + suffixLength;
// We have to do this to materialize the output
WritableColumnVector arrayData = c.arrayData();
int offset = arrayData.getElementsAppended();
if (prefixLength != 0) {
arrayData.appendBytes(prefixLength, previous.array(), previous.position());
}
arrayData.appendBytes(suffixLength, suffixArray, suffix.position());
c.putArray(rowId + i, offset, length);
previous = arrayData.getByteBuffer(offset, length);
currentRow++;
}
}
@Override
public void readBinary(int total, WritableColumnVector c, int rowId) {
readValues(total, c, rowId);
}
/**
* There was a bug (PARQUET-246) in which DeltaByteArrayWriter's reset() method did not clear the
* previous value state that it tracks internally. This resulted in the first value of all pages
* (except for the first page) to be a delta from the last value of the previous page. In order to
* read corrupted files written with this bug, when reading a new page we need to recover the
* previous page's last value to use it (if needed) to read the first value.
*/
@Override
public void setPreviousReader(ValuesReader reader) {
if (reader != null) {
this.previous = ((VectorizedDeltaByteArrayReader) reader).previous;
}
}
@Override
public void skipBinary(int total) {
WritableColumnVector c1 = tempBinaryValVector;
WritableColumnVector c2 = binaryValVector;
for (int i = 0; i < total; i++) {
int prefixLength = prefixLengthVector.getInt(currentRow);
ByteBuffer suffix = suffixReader.getBytes(currentRow);
byte[] suffixArray = suffix.array();
int suffixLength = suffix.limit() - suffix.position();
int length = prefixLength + suffixLength;
WritableColumnVector arrayData = c1.arrayData();
c1.reset();
if (prefixLength != 0) {
arrayData.appendBytes(prefixLength, previous.array(), previous.position());
}
arrayData.appendBytes(suffixLength, suffixArray, suffix.position());
previous = arrayData.getByteBuffer(0, length);
currentRow++;
WritableColumnVector tmp = c1;
c1 = c2;
c2 = tmp;
}
}
}
|
VectorizedDeltaByteArrayReader
|
java
|
quarkusio__quarkus
|
extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/QuarkusRecoveryService.java
|
{
"start": 202,
"end": 1588
}
|
class ____ extends RecoveryManagerService {
private static RecoveryManagerService recoveryManagerService;
private List<XAResourceRecovery> xaResources;
private boolean isCreated;
public static RecoveryManagerService getInstance() {
if (recoveryManagerService == null) {
recoveryManagerService = new QuarkusRecoveryService();
}
return recoveryManagerService;
}
private QuarkusRecoveryService() {
xaResources = new ArrayList<>();
isCreated = false;
}
@Override
public void addXAResourceRecovery(XAResourceRecovery xares) {
if (isCreated) {
super.addXAResourceRecovery(xares);
} else {
xaResources.add(xares);
}
}
@Override
public void removeXAResourceRecovery(XAResourceRecovery xares) {
if (isCreated) {
super.removeXAResourceRecovery(xares);
} else {
xaResources.remove(xares);
}
}
@Override
public void create() {
super.create();
isCreated = true;
for (XAResourceRecovery xares : xaResources) {
super.addXAResourceRecovery(xares);
}
xaResources.clear();
}
@Override
public void destroy() {
super.destroy();
isCreated = false;
recoveryManagerService = null;
}
}
|
QuarkusRecoveryService
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/recursive/assertion/RecursiveAssertionConfiguration.java
|
{
"start": 18988,
"end": 20480
}
|
class ____ {
* String s = "Don't look at me!";
* }
*
* assertThat(new Example()).usingRecursiveAssertion(...).allFieldsSatisfy(o -> myPredicate(o)); </code></pre>
*
* <p>With no recursion into Java Class Library types, <code>myPredicate()</code> is applied to the field <code>s</code>
* but not to the internal fields of {@link String}. With recursion into Java standard types active, the internal
* fields of String will be examined as well.</p>
* <p>By default, recursion into Java Class Library types is <em>disabled</em>.
*
* @param recursionIntoJavaClassLibraryTypes <code>true</code> to enable recursion into Java Class Library types, <code>false</code> to disable it. Defaults to <code>false</code>.
* @return This builder.
*/
public Builder withRecursionIntoJavaClassLibraryTypes(final boolean recursionIntoJavaClassLibraryTypes) {
this.skipJavaLibraryTypeObjects = !recursionIntoJavaClassLibraryTypes;
return this;
}
/**
* Makes the recursive assertion to ignore the object under test fields of the given types.
* The fields are ignored if their types <b>exactly match one of the ignored types</b>, for example if a field is a subtype of an ignored type it is not ignored.
* <p>
* If some object under test fields are null it is not possible to evaluate their types and thus these fields are not ignored.
* <p>
* Example:
* <pre><code class='java'>
|
Example
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/data/SerObject.java
|
{
"start": 244,
"end": 927
}
|
class ____ implements Serializable {
static final long serialVersionUID = 982352321924L;
private String data;
public SerObject() {
}
public SerObject(String data) {
this.data = data;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof SerObject) ) {
return false;
}
SerObject serObject = (SerObject) o;
if ( data != null ? !data.equals( serObject.data ) : serObject.data != null ) {
return false;
}
return true;
}
public int hashCode() {
return (data != null ? data.hashCode() : 0);
}
}
|
SerObject
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/rerank/ElasticInferenceServiceRerankModelTests.java
|
{
"start": 620,
"end": 1135
}
|
class ____ extends ESTestCase {
public static ElasticInferenceServiceRerankModel createModel(String url, String modelId) {
return new ElasticInferenceServiceRerankModel(
"id",
TaskType.RERANK,
"service",
new ElasticInferenceServiceRerankServiceSettings(modelId),
EmptyTaskSettings.INSTANCE,
EmptySecretSettings.INSTANCE,
ElasticInferenceServiceComponents.of(url)
);
}
}
|
ElasticInferenceServiceRerankModelTests
|
java
|
apache__flink
|
flink-queryable-state/flink-queryable-state-runtime/src/main/java/org/apache/flink/queryablestate/server/KvStateServerHandler.java
|
{
"start": 2286,
"end": 5771
}
|
class ____
extends AbstractServerHandler<KvStateInternalRequest, KvStateResponse> {
/** KvState registry holding references to the KvState instances. */
private final KvStateRegistry registry;
/**
* Create the handler used by the {@link KvStateServerImpl}.
*
* @param server the {@link KvStateServerImpl} using the handler.
* @param kvStateRegistry registry to query.
* @param serializer the {@link MessageSerializer} used to (de-) serialize the different
* messages.
* @param stats server statistics collector.
*/
public KvStateServerHandler(
final KvStateServerImpl server,
final KvStateRegistry kvStateRegistry,
final MessageSerializer<KvStateInternalRequest, KvStateResponse> serializer,
final KvStateRequestStats stats) {
super(server, serializer, stats);
this.registry = Preconditions.checkNotNull(kvStateRegistry);
}
@Override
public CompletableFuture<KvStateResponse> handleRequest(
final long requestId, final KvStateInternalRequest request) {
final CompletableFuture<KvStateResponse> responseFuture = new CompletableFuture<>();
try {
final KvStateEntry<?, ?, ?> kvState = registry.getKvState(request.getKvStateId());
if (kvState == null) {
responseFuture.completeExceptionally(
new UnknownKvStateIdException(getServerName(), request.getKvStateId()));
} else {
byte[] serializedKeyAndNamespace = request.getSerializedKeyAndNamespace();
byte[] serializedResult = getSerializedValue(kvState, serializedKeyAndNamespace);
if (serializedResult != null) {
responseFuture.complete(new KvStateResponse(serializedResult));
} else {
responseFuture.completeExceptionally(
new UnknownKeyOrNamespaceException(getServerName()));
}
}
return responseFuture;
} catch (Throwable t) {
String errMsg =
"Error while processing request with ID "
+ requestId
+ ". Caused by: "
+ ExceptionUtils.stringifyException(t);
responseFuture.completeExceptionally(new RuntimeException(errMsg));
return responseFuture;
}
}
private static <K, N, V> byte[] getSerializedValue(
final KvStateEntry<K, N, V> entry, final byte[] serializedKeyAndNamespace)
throws Exception {
return LambdaUtil.withContextClassLoader(
entry.getUserClassLoader(),
() -> {
final InternalKvState<K, N, V> state = entry.getState();
final KvStateInfo<K, N, V> infoForCurrentThread =
entry.getInfoForCurrentThread();
return state.getSerializedValue(
serializedKeyAndNamespace,
infoForCurrentThread.getKeySerializer(),
infoForCurrentThread.getNamespaceSerializer(),
infoForCurrentThread.getStateValueSerializer());
});
}
@Override
public CompletableFuture<Void> shutdown() {
return CompletableFuture.completedFuture(null);
}
}
|
KvStateServerHandler
|
java
|
spring-projects__spring-security
|
web/src/test/java/org/springframework/security/web/savedrequest/DefaultSavedRequestTests.java
|
{
"start": 878,
"end": 5258
}
|
class ____ {
// SEC-308, SEC-315
@Test
public void headersAreCaseInsensitive() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("USER-aGenT", "Mozilla");
DefaultSavedRequest saved = new DefaultSavedRequest(request);
assertThat(saved.getHeaderValues("user-agent").get(0)).isEqualTo("Mozilla");
}
// SEC-1412
@Test
public void discardsIfNoneMatchHeader() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("If-None-Match", "somehashvalue");
DefaultSavedRequest saved = new DefaultSavedRequest(request);
assertThat(saved.getHeaderValues("if-none-match")).isEmpty();
}
// SEC-3082
@Test
public void parametersAreCaseSensitive() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter("AnotHerTest", "Hi dad");
request.addParameter("thisisatest", "Hi mom");
DefaultSavedRequest saved = new DefaultSavedRequest(request);
assertThat(saved.getParameterValues("thisisatest")[0]).isEqualTo("Hi mom");
assertThat(saved.getParameterValues("anothertest")).isNull();
}
@Test
public void getRedirectUrlWhenNoQueryAndDefaultMatchingRequestParameterNameThenNoQuery() throws Exception {
DefaultSavedRequest savedRequest = new DefaultSavedRequest(new MockHttpServletRequest());
assertThat(savedRequest.getParameterMap()).doesNotContainKey("success");
assertThat(new URL(savedRequest.getRedirectUrl())).hasNoQuery();
}
@Test
public void getRedirectUrlWhenQueryAndDefaultMatchingRequestParameterNameNullThenNoQuery() throws Exception {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setQueryString("foo=bar");
DefaultSavedRequest savedRequest = new DefaultSavedRequest(request);
assertThat(savedRequest.getParameterMap()).doesNotContainKey("success");
assertThat(new URL(savedRequest.getRedirectUrl())).hasQuery("foo=bar");
}
@Test
public void getRedirectUrlWhenNoQueryAndNullMatchingRequestParameterNameThenNoQuery() throws Exception {
DefaultSavedRequest savedRequest = new DefaultSavedRequest(new MockHttpServletRequest());
assertThat(savedRequest.getParameterMap()).doesNotContainKey("success");
assertThat(new URL(savedRequest.getRedirectUrl())).hasNoQuery();
}
@Test
public void getRedirectUrlWhenNoQueryAndMatchingRequestParameterNameThenQuery() throws Exception {
DefaultSavedRequest savedRequest = new DefaultSavedRequest(new MockHttpServletRequest(), "success");
assertThat(savedRequest.getParameterMap()).doesNotContainKey("success");
assertThat(new URL(savedRequest.getRedirectUrl())).hasQuery("success");
}
@Test
public void getRedirectUrlWhenQueryEmptyAndMatchingRequestParameterNameThenQuery() throws Exception {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setQueryString("");
DefaultSavedRequest savedRequest = new DefaultSavedRequest(request, "success");
assertThat(savedRequest.getParameterMap()).doesNotContainKey("success");
assertThat(new URL(savedRequest.getRedirectUrl())).hasQuery("success");
}
@Test
public void getRedirectUrlWhenQueryEndsAmpersandAndMatchingRequestParameterNameThenQuery() throws Exception {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setQueryString("foo=bar&");
DefaultSavedRequest savedRequest = new DefaultSavedRequest(request, "success");
assertThat(savedRequest.getParameterMap()).doesNotContainKey("success");
assertThat(new URL(savedRequest.getRedirectUrl())).hasQuery("foo=bar&success");
}
@Test
public void getRedirectUrlWhenQueryDoesNotEndAmpersandAndMatchingRequestParameterNameThenQuery() throws Exception {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setQueryString("foo=bar");
DefaultSavedRequest savedRequest = new DefaultSavedRequest(request, "success");
assertThat(savedRequest.getParameterMap()).doesNotContainKey("success");
assertThat(new URL(savedRequest.getRedirectUrl())).hasQuery("foo=bar&success");
}
// gh-13438
@Test
public void getRedirectUrlWhenQueryAlreadyHasSuccessThenDoesNotAdd() throws Exception {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setQueryString("foo=bar&success");
DefaultSavedRequest savedRequest = new DefaultSavedRequest(request, "success");
assertThat(savedRequest.getRedirectUrl()).contains("foo=bar&success");
}
}
|
DefaultSavedRequestTests
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/customproviders/UniResponseFilter.java
|
{
"start": 291,
"end": 820
}
|
class ____ {
@ServerResponseFilter
Uni<Void> filter(SimpleResourceInfo simplifiedResourceInfo,
ContainerResponseContext responseContext) {
if (simplifiedResourceInfo.getMethodName() != null) {
return Uni.createFrom().deferred(() -> {
responseContext.getHeaders().putSingle("java-method", simplifiedResourceInfo.getMethodName());
return Uni.createFrom().nullItem();
});
}
return Uni.createFrom().nullItem();
}
}
|
UniResponseFilter
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/StructuredFunctionsITCase.java
|
{
"start": 19332,
"end": 20217
}
|
class ____ {
private static final String TYPE_STRING =
String.format("STRUCTURED<'%s', a INT, b STRING>", Type1.class.getName());
public Integer a;
public String b;
public static Type1 of(final Integer a, final String b) {
final Type1 t = new Type1();
t.a = a;
t.b = b;
return t;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Type1 that = (Type1) o;
return Objects.equals(this.a, that.a) && Objects.equals(this.b, that.b);
}
@Override
public int hashCode() {
return Objects.hash(a, b);
}
public static
|
Type1
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/AlreadyCheckedTest.java
|
{
"start": 11651,
"end": 12024
}
|
class ____ {
abstract String bar();
}
}
""")
.doTest();
}
@Test
public void autoValue_withEnum() {
helper
.addSourceLines(
"Test.java",
"""
import com.google.auto.value.AutoValue;
import com.google.errorprone.annotations.Immutable;
|
Foo
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/delegation/ParserFactory.java
|
{
"start": 1267,
"end": 1457
}
|
interface ____ extends Factory {
/** Creates a new parser. */
Parser create(Context context);
/** Context provided when a parser is created. */
@PublicEvolving
|
ParserFactory
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/providers/serialisers/FilePartBodyHandler.java
|
{
"start": 480,
"end": 1365
}
|
class ____ implements MessageBodyWriter<FilePart> {
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return FilePart.class.isAssignableFrom(type);
}
public void writeTo(FilePart uploadFile, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders,
OutputStream entityStream) throws IOException {
httpHeaders.add(HttpHeaders.CONTENT_LENGTH, String.valueOf(uploadFile.count));
doWrite(uploadFile, entityStream);
}
protected void doWrite(FilePart uploadFile, OutputStream out) throws IOException {
PathPartBodyHandler.doWrite(new BufferedInputStream(new FileInputStream(uploadFile.file)), uploadFile.offset,
uploadFile.count, out);
}
}
|
FilePartBodyHandler
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java
|
{
"start": 1138,
"end": 4184
}
|
class ____ extends AcknowledgedRequest<CloseIndexRequest> implements IndicesRequest.Replaceable {
private String[] indices;
private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen();
private ActiveShardCount waitForActiveShards = ActiveShardCount.DEFAULT;
public CloseIndexRequest(StreamInput in) throws IOException {
super(in);
indices = in.readStringArray();
indicesOptions = IndicesOptions.readIndicesOptions(in);
waitForActiveShards = ActiveShardCount.readFrom(in);
}
public CloseIndexRequest() {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT);
}
/**
* Constructs a new close index request for the specified index.
*/
public CloseIndexRequest(String... indices) {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT);
this.indices = indices;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (CollectionUtils.isEmpty(indices)) {
validationException = addValidationError("index is missing", validationException);
}
return validationException;
}
/**
* The indices to be closed
* @return the indices to be closed
*/
@Override
public String[] indices() {
return indices;
}
/**
* Sets the indices to be closed
* @param indices the indices to be closed
* @return the request itself
*/
@Override
public CloseIndexRequest indices(String... indices) {
this.indices = indices;
return this;
}
/**
* Specifies what type of requested indices to ignore and how to deal with wildcard expressions.
* For example indices that don't exist.
*
* @return the desired behaviour regarding indices to ignore and wildcard indices expressions
*/
@Override
public IndicesOptions indicesOptions() {
return indicesOptions;
}
/**
* Specifies what type of requested indices to ignore and how to deal wild wildcard expressions.
* For example indices that don't exist.
*
* @param indicesOptions the desired behaviour regarding indices to ignore and wildcard indices expressions
* @return the request itself
*/
public CloseIndexRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
public ActiveShardCount waitForActiveShards() {
return waitForActiveShards;
}
public CloseIndexRequest waitForActiveShards(final ActiveShardCount waitForActiveShards) {
this.waitForActiveShards = waitForActiveShards;
return this;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArray(indices);
indicesOptions.writeIndicesOptions(out);
waitForActiveShards.writeTo(out);
}
}
|
CloseIndexRequest
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/protocol/ReconnectionHandler.java
|
{
"start": 1750,
"end": 7587
}
|
class ____ {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(ReconnectionHandler.class);
private static final Set<Class<?>> EXECUTION_EXCEPTION_TYPES = LettuceSets.unmodifiableSet(TimeoutException.class,
CancellationException.class, RedisCommandTimeoutException.class, ConnectException.class);
private final ClientOptions clientOptions;
private final Bootstrap bootstrap;
protected Mono<SocketAddress> socketAddressSupplier;
private final ConnectionFacade connectionFacade;
private volatile CompletableFuture<Channel> currentFuture;
private volatile boolean reconnectSuspended;
ReconnectionHandler(ClientOptions clientOptions, Bootstrap bootstrap, Mono<SocketAddress> socketAddressSupplier,
Timer timer, ExecutorService reconnectWorkers, ConnectionFacade connectionFacade) {
LettuceAssert.notNull(socketAddressSupplier, "SocketAddressSupplier must not be null");
LettuceAssert.notNull(bootstrap, "Bootstrap must not be null");
LettuceAssert.notNull(timer, "Timer must not be null");
LettuceAssert.notNull(reconnectWorkers, "ExecutorService must not be null");
LettuceAssert.notNull(connectionFacade, "ConnectionFacade must not be null");
this.socketAddressSupplier = socketAddressSupplier;
this.bootstrap = bootstrap;
this.clientOptions = clientOptions;
this.connectionFacade = connectionFacade;
}
/**
* Initiate reconnect and return a {@link ChannelFuture} for synchronization. The resulting future either succeeds or fails.
* It can be {@link ChannelFuture#cancel(boolean) canceled} to interrupt reconnection and channel initialization. A failed
* {@link ChannelFuture} will close the channel.
*
* @return reconnect {@link ChannelFuture}.
*/
protected Tuple2<CompletableFuture<Channel>, CompletableFuture<SocketAddress>> reconnect() {
CompletableFuture<Channel> future = new CompletableFuture<>();
CompletableFuture<SocketAddress> address = new CompletableFuture<>();
socketAddressSupplier.subscribe(remoteAddress -> {
address.complete(remoteAddress);
if (future.isCancelled()) {
return;
}
reconnect0(future, remoteAddress);
}, ex -> {
if (!address.isDone()) {
address.completeExceptionally(ex);
}
future.completeExceptionally(ex);
});
this.currentFuture = future;
return Tuples.of(future, address);
}
private void reconnect0(CompletableFuture<Channel> result, SocketAddress remoteAddress) {
ChannelHandler handler = bootstrap.config().handler();
// reinitialize SslChannelInitializer if Redis - SSL connection.
if (SslConnectionBuilder.isSslChannelInitializer(handler)) {
bootstrap.handler(SslConnectionBuilder.withSocketAddress(handler, remoteAddress));
}
ChannelFuture connectFuture = bootstrap.connect(remoteAddress);
logger.debug("Reconnecting to Redis at {}", remoteAddress);
result.whenComplete((c, t) -> {
if (t instanceof CancellationException) {
connectFuture.cancel(true);
}
});
connectFuture.addListener(future -> {
if (!future.isSuccess()) {
result.completeExceptionally(future.cause());
return;
}
RedisHandshakeHandler handshakeHandler = connectFuture.channel().pipeline().get(RedisHandshakeHandler.class);
if (handshakeHandler == null) {
result.completeExceptionally(new IllegalStateException("RedisHandshakeHandler not registered"));
return;
}
handshakeHandler.channelInitialized().whenComplete((success, throwable) -> {
if (throwable != null) {
if (isExecutionException(throwable)) {
result.completeExceptionally(throwable);
return;
}
if (clientOptions.isSuspendReconnectOnProtocolFailure()) {
logger.error("Disabling autoReconnect due to initialization failure", throwable);
setReconnectSuspended(true);
}
result.completeExceptionally(throwable);
return;
}
if (logger.isDebugEnabled()) {
logger.info("Reconnected to {}, Channel {}", remoteAddress,
ChannelLogDescriptor.logDescriptor(connectFuture.channel()));
} else {
logger.info("Reconnected to {}", remoteAddress);
}
result.complete(connectFuture.channel());
});
});
}
boolean isReconnectSuspended() {
return reconnectSuspended;
}
void setReconnectSuspended(boolean reconnectSuspended) {
this.reconnectSuspended = reconnectSuspended;
}
void prepareClose() {
CompletableFuture<?> currentFuture = this.currentFuture;
if (currentFuture != null && !currentFuture.isDone()) {
currentFuture.cancel(true);
}
}
/**
* @param throwable
* @return {@code true} if {@code throwable} is an execution {@link Exception}.
*/
public static boolean isExecutionException(Throwable throwable) {
for (Class<?> type : EXECUTION_EXCEPTION_TYPES) {
if (type.isAssignableFrom(throwable.getClass())) {
return true;
}
}
return false;
}
ClientOptions getClientOptions() {
return clientOptions;
}
}
|
ReconnectionHandler
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/NonCanonicalStaticImport.java
|
{
"start": 1351,
"end": 1819
}
|
class ____ extends BugChecker implements ImportTreeMatcher {
@Override
public Description matchImport(ImportTree tree, VisitorState state) {
StaticImportInfo importInfo = StaticImports.tryCreate(tree, state);
if (importInfo == null || importInfo.isCanonical() || !importInfo.members().isEmpty()) {
return Description.NO_MATCH;
}
return describeMatch(tree, SuggestedFix.replace(tree, importInfo.importStatement()));
}
}
|
NonCanonicalStaticImport
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/convert/ConvertingSerializerTest.java
|
{
"start": 4252,
"end": 4552
}
|
class ____ extends ValueSerializer<Target>
{
@Override
public void serialize(Target a, JsonGenerator g, SerializationContext provider) {
g.writeString("Target");
}
}
// [databind#731]
@JsonPropertyOrder({ "a", "b" })
public static
|
TargetSerializer
|
java
|
apache__camel
|
tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/generics/OwbGenericArrayTypeImpl.java
|
{
"start": 937,
"end": 1900
}
|
class ____ implements GenericArrayType {
private final Type componentType;
public OwbGenericArrayTypeImpl(Type componentType) {
this.componentType = componentType;
}
@Override
public Type getGenericComponentType() {
return componentType;
}
/*
* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return componentType.hashCode();
}
/*
* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj instanceof GenericArrayType) {
return ((GenericArrayType) obj).getGenericComponentType().equals(componentType);
} else {
return false;
}
}
@Override
public String toString() {
return componentType + "[]";
}
}
|
OwbGenericArrayTypeImpl
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/QueryParameterSetter.java
|
{
"start": 8488,
"end": 9699
}
|
class ____ extends QueryMetadata {
private final Query query;
private final Query unwrapped;
BindableQuery(Query query) {
super(query);
this.query = query;
this.unwrapped = Proxy.isProxyClass(query.getClass()) ? query.unwrap(null) : query;
}
public static BindableQuery from(Query query) {
return new BindableQuery(query);
}
public Query getQuery() {
return query;
}
public <T> Query setParameter(Parameter<T> param, T value) {
return unwrapped.setParameter(param, value);
}
public Query setParameter(Parameter<Date> param, Date value, TemporalType temporalType) {
return unwrapped.setParameter(param, value, temporalType);
}
public Query setParameter(String name, Object value) {
return unwrapped.setParameter(name, value);
}
public Query setParameter(String name, Date value, TemporalType temporalType) {
return query.setParameter(name, value, temporalType);
}
public Query setParameter(int position, Object value) {
return unwrapped.setParameter(position, value);
}
public Query setParameter(int position, Date value, TemporalType temporalType) {
return unwrapped.setParameter(position, value, temporalType);
}
}
}
|
BindableQuery
|
java
|
apache__camel
|
components/camel-caffeine/src/test/java/org/apache/camel/component/caffeine/cache/CaffeineCacheFromScratchStatsCounterTest.java
|
{
"start": 1259,
"end": 3724
}
|
class ____ extends CamelTestSupport {
private final MetricRegistry metricRegistry = new MetricRegistry();
@BindToRegistry("statsCounter")
private MetricsStatsCounter msc = new MetricsStatsCounter(metricRegistry);
@Test
void testCacheStatsCounter() {
int key = 0;
int val = 0;
for (int i = 0; i < 10; i++) {
key++;
val++;
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_PUT)
.withHeader(CaffeineConstants.KEY, key).withBody(val).to("direct://start").send();
}
MockEndpoint mock1 = getMockEndpoint("mock:result");
mock1.expectedMinimumMessageCount(1);
mock1.expectedBodiesReceived(val);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, true);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_GET)
.withHeader(CaffeineConstants.KEY, key).withBody(val).to("direct://get").send();
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_GET)
.withHeader(CaffeineConstants.KEY, key).withBody(val).to("direct://get").send();
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_GET)
.withHeader(CaffeineConstants.KEY, 12).withBody(3).to("direct://get").send();
assertEquals(2, metricRegistry.counter("camelcache.hits").getCount());
assertEquals(1, metricRegistry.counter("camelcache.misses").getCount());
}
// ****************************
// Route
// ****************************
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct://start").toF("caffeine-cache://%s?statsEnabled=true&statsCounter=#statsCounter", "test")
.to("log:org.apache.camel.component.caffeine?level=INFO&showAll=true&multiline=true").to("mock:result");
from("direct://get").toF("caffeine-cache://%s?statsEnabled=true&statsCounter=#statsCounter", "test")
.to("log:org.apache.camel.component.caffeine?level=INFO&showAll=true&multiline=true")
.to("mock:result-get");
}
};
}
}
|
CaffeineCacheFromScratchStatsCounterTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/test/generic/GenericTypeTest.java
|
{
"start": 1107,
"end": 1247
}
|
class ____<G> implements Serializable{
G b;
public G getB() {
return b;
}
public void setB(G b) {
this.b = b;
}
}
static
|
Gen
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webflux/src/test/java/org/springframework/boot/webflux/autoconfigure/actuate/web/WebFluxEndpointIntegrationTests.java
|
{
"start": 2406,
"end": 4675
}
|
class ____ {
private final ReactiveWebApplicationContextRunner contextRunner = new ReactiveWebApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class, CodecsAutoConfiguration.class,
WebFluxAutoConfiguration.class, HttpHandlerAutoConfiguration.class, EndpointAutoConfiguration.class,
WebEndpointAutoConfiguration.class, ManagementContextAutoConfiguration.class,
BeansEndpointAutoConfiguration.class))
.withUserConfiguration(EndpointsConfiguration.class);
@Test
void linksAreProvidedToAllEndpointTypes() {
this.contextRunner.withPropertyValues("management.endpoints.web.exposure.include:*").run((context) -> {
WebTestClient client = createWebTestClient(context);
client.get()
.uri("/actuator")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("_links.beans")
.isNotEmpty()
.jsonPath("_links.restcontroller")
.isNotEmpty()
.jsonPath("_links.controller")
.isNotEmpty();
});
}
@Test
void linksPageIsNotAvailableWhenDisabled() {
this.contextRunner.withPropertyValues("management.endpoints.web.discovery.enabled=false").run((context) -> {
WebTestClient client = createWebTestClient(context);
client.get().uri("/actuator").exchange().expectStatus().isNotFound();
});
}
@Test
void endpointJsonMapperCanBeApplied() {
this.contextRunner.withUserConfiguration(EndpointJsonMapperConfiguration.class)
.withPropertyValues("management.endpoints.web.exposure.include:*")
.run((context) -> {
WebTestClient client = createWebTestClient(context);
client.get()
.uri("/actuator/beans")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.consumeWith((result) -> {
String json = new String(result.getResponseBody(), StandardCharsets.UTF_8);
assertThat(json).contains("\"scope\":\"notelgnis\"");
});
});
}
private WebTestClient createWebTestClient(ApplicationContext context) {
return WebTestClient.bindToApplicationContext(context)
.configureClient()
.baseUrl("https://spring.example.org")
.build();
}
@org.springframework.boot.actuate.endpoint.web.annotation.ControllerEndpoint(id = "controller")
@SuppressWarnings("removal")
static
|
WebFluxEndpointIntegrationTests
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java
|
{
"start": 67393,
"end": 67629
}
|
class ____ {
@Bean
GenericConverter genericPersonConverter() {
return new GenericPersonConverter();
}
}
@Configuration(proxyBeanMethods = false)
@EnableConfigurationProperties
static
|
NonQualifiedGenericConverterConfiguration
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSOutputStream.java
|
{
"start": 4044,
"end": 22786
}
|
class ____ {
static MiniDFSCluster cluster;
@BeforeAll
public static void setup() throws IOException {
Configuration conf = new Configuration();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
}
/**
* The close() method of DFSOutputStream should never throw the same exception
* twice. See HDFS-5335 for details.
*/
@Test
public void testCloseTwice() throws IOException {
DistributedFileSystem fs = cluster.getFileSystem();
FSDataOutputStream os = fs.create(new Path("/test"));
DFSOutputStream dos = (DFSOutputStream) Whitebox.getInternalState(os,
"wrappedStream");
DataStreamer streamer = (DataStreamer) Whitebox
.getInternalState(dos, "streamer");
@SuppressWarnings("unchecked")
LastExceptionInStreamer ex = (LastExceptionInStreamer) Whitebox
.getInternalState(streamer, "lastException");
Throwable thrown = (Throwable) Whitebox.getInternalState(ex, "thrown");
assertNull(thrown);
dos.close();
IOException dummy = new IOException("dummy");
ex.set(dummy);
try {
dos.close();
} catch (IOException e) {
assertEquals(e, dummy);
}
thrown = (Throwable) Whitebox.getInternalState(ex, "thrown");
assertNull(thrown);
dos.close();
}
/**
* The computePacketChunkSize() method of DFSOutputStream should set the actual
* packet size < 64kB. See HDFS-7308 for details.
*/
@Test
public void testComputePacketChunkSize() throws Exception {
DistributedFileSystem fs = cluster.getFileSystem();
FSDataOutputStream os = fs.create(new Path("/test"));
DFSOutputStream dos = (DFSOutputStream) Whitebox.getInternalState(os,
"wrappedStream");
final int packetSize = 64*1024;
final int bytesPerChecksum = 512;
Method method = dos.getClass().getDeclaredMethod("computePacketChunkSize",
int.class, int.class);
method.setAccessible(true);
method.invoke(dos, packetSize, bytesPerChecksum);
Field field = dos.getClass().getDeclaredField("packetSize");
field.setAccessible(true);
assertTrue((Integer) field.get(dos) + 33 < packetSize);
// If PKT_MAX_HEADER_LEN is 257, actual packet size come to over 64KB
// without a fix on HDFS-7308.
assertTrue((Integer) field.get(dos) + 257 < packetSize);
}
/**
* This tests preventing overflows of package size and bodySize.
* <p>
* See also https://issues.apache.org/jira/browse/HDFS-11608.
* </p>
* @throws IOException
* @throws SecurityException
* @throws NoSuchFieldException
* @throws InvocationTargetException
* @throws IllegalArgumentException
* @throws IllegalAccessException
* @throws NoSuchMethodException
*/
@Test
@Timeout(value = 60)
public void testPreventOverflow() throws IOException, NoSuchFieldException,
SecurityException, IllegalAccessException, IllegalArgumentException,
InvocationTargetException, NoSuchMethodException {
final int defaultWritePacketSize = DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT;
int configuredWritePacketSize = defaultWritePacketSize;
int finalWritePacketSize = defaultWritePacketSize;
/* test default WritePacketSize, e.g. 64*1024 */
runAdjustChunkBoundary(configuredWritePacketSize, finalWritePacketSize);
/* test large WritePacketSize, e.g. 1G */
configuredWritePacketSize = 1000 * 1024 * 1024;
finalWritePacketSize = PacketReceiver.MAX_PACKET_SIZE;
runAdjustChunkBoundary(configuredWritePacketSize, finalWritePacketSize);
}
/**
* @configuredWritePacketSize the configured WritePacketSize.
* @finalWritePacketSize the final WritePacketSize picked by
* {@link DFSOutputStream#adjustChunkBoundary}
*/
private void runAdjustChunkBoundary(
final int configuredWritePacketSize,
final int finalWritePacketSize) throws IOException, NoSuchFieldException,
SecurityException, IllegalAccessException, IllegalArgumentException,
InvocationTargetException, NoSuchMethodException {
final boolean appendChunk = false;
final long blockSize = 3221225500L;
final long bytesCurBlock = 1073741824L;
final int bytesPerChecksum = 512;
final int checksumSize = 4;
final int chunkSize = bytesPerChecksum + checksumSize;
final int packateMaxHeaderLength = 33;
MiniDFSCluster dfsCluster = null;
final File baseDir = new File(PathUtils.getTestDir(getClass()),
GenericTestUtils.getMethodName());
try {
final Configuration dfsConf = new Configuration();
dfsConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR,
baseDir.getAbsolutePath());
dfsConf.setInt(DFS_CLIENT_WRITE_PACKET_SIZE_KEY,
configuredWritePacketSize);
dfsCluster = new MiniDFSCluster.Builder(dfsConf).numDataNodes(1).build();
dfsCluster.waitActive();
final FSDataOutputStream os = dfsCluster.getFileSystem()
.create(new Path(baseDir.getPath(), "testPreventOverflow"));
final DFSOutputStream dos = (DFSOutputStream) Whitebox
.getInternalState(os, "wrappedStream");
/* set appendChunk */
final Method setAppendChunkMethod = dos.getClass()
.getDeclaredMethod("setAppendChunk", boolean.class);
setAppendChunkMethod.setAccessible(true);
setAppendChunkMethod.invoke(dos, appendChunk);
/* set bytesCurBlock */
final Method setBytesCurBlockMethod = dos.getClass()
.getDeclaredMethod("setBytesCurBlock", long.class);
setBytesCurBlockMethod.setAccessible(true);
setBytesCurBlockMethod.invoke(dos, bytesCurBlock);
/* set blockSize */
final Field blockSizeField = dos.getClass().getDeclaredField("blockSize");
blockSizeField.setAccessible(true);
blockSizeField.setLong(dos, blockSize);
/* call adjustChunkBoundary */
final Method method = dos.getClass()
.getDeclaredMethod("adjustChunkBoundary");
method.setAccessible(true);
method.invoke(dos);
/* get and verify writePacketSize */
final Field writePacketSizeField = dos.getClass()
.getDeclaredField("writePacketSize");
writePacketSizeField.setAccessible(true);
assertEquals(writePacketSizeField.getInt(dos),
finalWritePacketSize);
/* get and verify chunksPerPacket */
final Field chunksPerPacketField = dos.getClass()
.getDeclaredField("chunksPerPacket");
chunksPerPacketField.setAccessible(true);
assertEquals(chunksPerPacketField.getInt(dos),
(finalWritePacketSize - packateMaxHeaderLength) / chunkSize);
/* get and verify packetSize */
final Field packetSizeField = dos.getClass()
.getDeclaredField("packetSize");
packetSizeField.setAccessible(true);
assertEquals(packetSizeField.getInt(dos),
chunksPerPacketField.getInt(dos) * chunkSize);
} finally {
if (dfsCluster != null) {
dfsCluster.shutdown();
}
}
}
@Test
public void testCongestionBackoff() throws IOException {
DfsClientConf dfsClientConf = mock(DfsClientConf.class);
DFSClient client = mock(DFSClient.class);
Configuration conf = mock(Configuration.class);
when(client.getConfiguration()).thenReturn(conf);
when(client.getConf()).thenReturn(dfsClientConf);
when(client.getTracer()).thenReturn(FsTracer.get(new Configuration()));
client.clientRunning = true;
DataStreamer stream = new DataStreamer(
mock(HdfsFileStatus.class),
mock(ExtendedBlock.class),
client,
"foo", null, null, null, null, null, null);
DataOutputStream blockStream = mock(DataOutputStream.class);
doThrow(new IOException()).when(blockStream).flush();
Whitebox.setInternalState(stream, "blockStream", blockStream);
Whitebox.setInternalState(stream, "stage",
BlockConstructionStage.PIPELINE_CLOSE);
@SuppressWarnings("unchecked")
LinkedList<DFSPacket> dataQueue = (LinkedList<DFSPacket>)
Whitebox.getInternalState(stream, "dataQueue");
@SuppressWarnings("unchecked")
ArrayList<DatanodeInfo> congestedNodes = (ArrayList<DatanodeInfo>)
Whitebox.getInternalState(stream, "congestedNodes");
congestedNodes.add(mock(DatanodeInfo.class));
DFSPacket packet = mock(DFSPacket.class);
dataQueue.add(packet);
stream.run();
assertTrue(congestedNodes.isEmpty());
}
@Test
@Timeout(value = 60)
public void testCongestionAckDelay() {
DfsClientConf dfsClientConf = mock(DfsClientConf.class);
DFSClient client = mock(DFSClient.class);
Configuration conf = mock(Configuration.class);
when(client.getConfiguration()).thenReturn(conf);
when(client.getConf()).thenReturn(dfsClientConf);
when(client.getTracer()).thenReturn(FsTracer.get(new Configuration()));
client.clientRunning = true;
DataStreamer stream = new DataStreamer(
mock(HdfsFileStatus.class),
mock(ExtendedBlock.class),
client,
"foo", null, null, null, null, null, null);
DataOutputStream blockStream = mock(DataOutputStream.class);
Whitebox.setInternalState(stream, "blockStream", blockStream);
Whitebox.setInternalState(stream, "stage",
BlockConstructionStage.PIPELINE_CLOSE);
@SuppressWarnings("unchecked")
LinkedList<DFSPacket> dataQueue = (LinkedList<DFSPacket>)
Whitebox.getInternalState(stream, "dataQueue");
@SuppressWarnings("unchecked")
ArrayList<DatanodeInfo> congestedNodes = (ArrayList<DatanodeInfo>)
Whitebox.getInternalState(stream, "congestedNodes");
int backOffMaxTime = (int)
Whitebox.getInternalState(stream, "congestionBackOffMaxTimeInMs");
DFSPacket[] packet = new DFSPacket[100];
AtomicBoolean isDelay = new AtomicBoolean(true);
// ResponseProcessor needs the dataQueue for the next step.
new SubjectInheritingThread(() -> {
for (int i = 0; i < 10; i++) {
// In order to ensure that other threads run for a period of time to prevent affecting
// the results.
try {
Thread.sleep(backOffMaxTime / 50);
} catch (InterruptedException e) {
e.printStackTrace();
}
synchronized (dataQueue) {
congestedNodes.add(mock(DatanodeInfo.class));
// The DataStreamer releases the dataQueue before sleeping, and the ResponseProcessor
// has time to hold the dataQueue to continuously accept ACKs and add congestedNodes
// to the list. Therefore, congestedNodes.size() is greater than 1.
if (congestedNodes.size() > 1){
isDelay.set(false);
try {
doThrow(new IOException()).when(blockStream).flush();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
try {
doThrow(new IOException()).when(blockStream).flush();
} catch (Exception e) {
e.printStackTrace();
}
// Prevent the DataStreamer from always waiting because the
// dataQueue may be empty, so that the unit test cannot exit.
DFSPacket endPacket = mock(DFSPacket.class);
dataQueue.add(endPacket);
}).start();
// The purpose of adding packets to the dataQueue is to make the DataStreamer run
// normally and judge whether to enter the sleep state according to the congestion.
new SubjectInheritingThread(() -> {
for (int i = 0; i < 100; i++) {
packet[i] = mock(DFSPacket.class);
dataQueue.add(packet[i]);
try {
Thread.sleep(backOffMaxTime / 100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}).start();
stream.run();
assertFalse(isDelay.get());
}
@Test
public void testNoLocalWriteFlag() throws IOException {
DistributedFileSystem fs = cluster.getFileSystem();
EnumSet<CreateFlag> flags = EnumSet.of(CreateFlag.NO_LOCAL_WRITE,
CreateFlag.CREATE);
BlockManager bm = cluster.getNameNode().getNamesystem().getBlockManager();
DatanodeManager dm = bm.getDatanodeManager();
try(FSDataOutputStream os = fs.create(new Path("/test-no-local"),
FsPermission.getDefault(),
flags, 512, (short)2, 512, null)) {
// Inject a DatanodeManager that returns one DataNode as local node for
// the client.
DatanodeManager spyDm = spy(dm);
DatanodeDescriptor dn1 = dm.getDatanodeListForReport
(HdfsConstants.DatanodeReportType.LIVE).get(0);
doReturn(dn1).when(spyDm).getDatanodeByHost("127.0.0.1");
Whitebox.setInternalState(bm, "datanodeManager", spyDm);
byte[] buf = new byte[512 * 16];
new Random().nextBytes(buf);
os.write(buf);
} finally {
Whitebox.setInternalState(bm, "datanodeManager", dm);
}
cluster.triggerBlockReports();
final String bpid = cluster.getNamesystem().getBlockPoolId();
// Total number of DataNodes is 3.
assertEquals(3, cluster.getAllBlockReports(bpid).size());
int numDataNodesWithData = 0;
for (Map<DatanodeStorage, BlockListAsLongs> dnBlocks :
cluster.getAllBlockReports(bpid)) {
for (BlockListAsLongs blocks : dnBlocks.values()) {
if (blocks.getNumberOfBlocks() > 0) {
numDataNodesWithData++;
break;
}
}
}
// Verify that only one DN has no data.
assertEquals(1, 3 - numDataNodesWithData);
}
@Test
public void testEndLeaseCall() throws Exception {
Configuration conf = new Configuration();
DFSClient client = new DFSClient(cluster.getNameNode(0)
.getNameNodeAddress(), conf);
DFSClient spyClient = Mockito.spy(client);
DFSOutputStream dfsOutputStream = spyClient.create("/file2",
FsPermission.getFileDefault(),
EnumSet.of(CreateFlag.CREATE), (short) 3, 1024, null , 1024, null);
DFSOutputStream spyDFSOutputStream = Mockito.spy(dfsOutputStream);
spyDFSOutputStream.closeThreads(anyBoolean());
verify(spyClient, times(1)).endFileLease(anyString());
}
@Test
public void testStreamFlush() throws Exception {
FileSystem fs = cluster.getFileSystem();
FSDataOutputStream os = fs.create(new Path("/normal-file"));
// Verify output stream supports hsync() and hflush().
assertTrue(os.hasCapability(StreamCapability.HFLUSH.getValue()),
"DFSOutputStream should support hflush()!");
assertTrue(os.hasCapability(StreamCapability.HSYNC.getValue()),
"DFSOutputStream should support hsync()!");
byte[] bytes = new byte[1024];
InputStream is = new ByteArrayInputStream(bytes);
IOUtils.copyBytes(is, os, bytes.length);
os.hflush();
IOUtils.copyBytes(is, os, bytes.length);
os.hsync();
os.close();
}
@Test
public void testExceptionInCloseWithRecoverLease() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(RECOVER_LEASE_ON_CLOSE_EXCEPTION_KEY, true);
DFSClient client =
new DFSClient(cluster.getNameNode(0).getNameNodeAddress(), conf);
DFSClient spyClient = Mockito.spy(client);
DFSOutputStream dfsOutputStream = spyClient.create(
"/testExceptionInCloseWithRecoverLease", FsPermission.getFileDefault(),
EnumSet.of(CreateFlag.CREATE), (short) 3, 1024, null, 1024, null);
DFSOutputStream spyDFSOutputStream = Mockito.spy(dfsOutputStream);
doThrow(new IOException("Emulated IOException in close"))
.when(spyDFSOutputStream).completeFile();
try {
spyDFSOutputStream.close();
fail();
} catch (IOException ioe) {
assertTrue(spyDFSOutputStream.isLeaseRecovered());
waitForFileClosed("/testExceptionInCloseWithRecoverLease");
assertTrue(isFileClosed("/testExceptionInCloseWithRecoverLease"));
}
}
@Test
public void testExceptionInCloseWithoutRecoverLease() throws Exception {
Configuration conf = new Configuration();
DFSClient client =
new DFSClient(cluster.getNameNode(0).getNameNodeAddress(), conf);
DFSClient spyClient = Mockito.spy(client);
DFSOutputStream dfsOutputStream =
spyClient.create("/testExceptionInCloseWithoutRecoverLease",
FsPermission.getFileDefault(), EnumSet.of(CreateFlag.CREATE),
(short) 3, 1024, null, 1024, null);
DFSOutputStream spyDFSOutputStream = Mockito.spy(dfsOutputStream);
doThrow(new IOException("Emulated IOException in close"))
.when(spyDFSOutputStream).completeFile();
try {
spyDFSOutputStream.close();
fail();
} catch (IOException ioe) {
assertFalse(spyDFSOutputStream.isLeaseRecovered());
try {
waitForFileClosed("/testExceptionInCloseWithoutRecoverLease");
} catch (TimeoutException e) {
assertFalse(isFileClosed("/testExceptionInCloseWithoutRecoverLease"));
}
}
}
@Test
@Timeout(value = 60)
public void testFirstPacketSizeInNewBlocks() throws IOException {
final long blockSize = (long) 1024 * 1024;
MiniDFSCluster dfsCluster = cluster;
DistributedFileSystem fs = dfsCluster.getFileSystem();
Configuration dfsConf = fs.getConf();
EnumSet<CreateFlag> flags = EnumSet.of(CreateFlag.CREATE);
try(FSDataOutputStream fos = fs.create(new Path("/testfile.dat"),
FsPermission.getDefault(),
flags, 512, (short)3, blockSize, null)) {
DataChecksum crc32c = DataChecksum.newDataChecksum(
DataChecksum.Type.CRC32C, 512);
long loop = 0;
Random r = new Random();
byte[] buf = new byte[(int) blockSize];
r.nextBytes(buf);
fos.write(buf);
fos.hflush();
int chunkSize = crc32c.getBytesPerChecksum() + crc32c.getChecksumSize();
int packetContentSize = (dfsConf.getInt(DFS_CLIENT_WRITE_PACKET_SIZE_KEY,
DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT) -
PacketHeader.PKT_MAX_HEADER_LEN) / chunkSize * chunkSize;
while (loop < 20) {
r.nextBytes(buf);
fos.write(buf);
fos.hflush();
loop++;
assertEquals(((DFSOutputStream) fos.getWrappedStream()).packetSize,
packetContentSize);
}
}
fs.delete(new Path("/testfile.dat"), true);
}
@AfterAll
public static void tearDown() {
if (cluster != null) {
cluster.shutdown();
}
}
private boolean isFileClosed(String path) throws IOException {
return cluster.getFileSystem().isFileClosed(new Path(path));
}
private void waitForFileClosed(String path) throws Exception {
GenericTestUtils.waitFor(() -> {
boolean closed;
try {
closed = isFileClosed(path);
} catch (IOException e) {
return false;
}
return closed;
}, 1000, 5000);
}
}
|
TestDFSOutputStream
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/registrations/AnotherEntity.java
|
{
"start": 552,
"end": 939
}
|
class ____ {
@Id
private Integer id;
private String name;
private Thing thing;
private AnotherEntity() {
// for Hibernate use
}
public AnotherEntity(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
AnotherEntity
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java
|
{
"start": 1054,
"end": 6673
}
|
class ____ implements AggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("mean", ElementType.DOUBLE),
new IntermediateStateDesc("m2", ElementType.DOUBLE),
new IntermediateStateDesc("count", ElementType.LONG) );
private final DriverContext driverContext;
private final VarianceStates.SingleState state;
private final List<Integer> channels;
private final boolean stdDev;
public StdDevDoubleAggregatorFunction(DriverContext driverContext, List<Integer> channels,
VarianceStates.SingleState state, boolean stdDev) {
this.driverContext = driverContext;
this.channels = channels;
this.state = state;
this.stdDev = stdDev;
}
public static StdDevDoubleAggregatorFunction create(DriverContext driverContext,
List<Integer> channels, boolean stdDev) {
return new StdDevDoubleAggregatorFunction(driverContext, channels, StdDevDoubleAggregator.initSingle(stdDev), stdDev);
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public void addRawInput(Page page, BooleanVector mask) {
if (mask.allFalse()) {
// Entire page masked away
} else if (mask.allTrue()) {
addRawInputNotMasked(page);
} else {
addRawInputMasked(page, mask);
}
}
private void addRawInputMasked(Page page, BooleanVector mask) {
DoubleBlock valueBlock = page.getBlock(channels.get(0));
DoubleVector valueVector = valueBlock.asVector();
if (valueVector == null) {
addRawBlock(valueBlock, mask);
return;
}
addRawVector(valueVector, mask);
}
private void addRawInputNotMasked(Page page) {
DoubleBlock valueBlock = page.getBlock(channels.get(0));
DoubleVector valueVector = valueBlock.asVector();
if (valueVector == null) {
addRawBlock(valueBlock);
return;
}
addRawVector(valueVector);
}
private void addRawVector(DoubleVector valueVector) {
for (int valuesPosition = 0; valuesPosition < valueVector.getPositionCount(); valuesPosition++) {
double valueValue = valueVector.getDouble(valuesPosition);
StdDevDoubleAggregator.combine(state, valueValue);
}
}
private void addRawVector(DoubleVector valueVector, BooleanVector mask) {
for (int valuesPosition = 0; valuesPosition < valueVector.getPositionCount(); valuesPosition++) {
if (mask.getBoolean(valuesPosition) == false) {
continue;
}
double valueValue = valueVector.getDouble(valuesPosition);
StdDevDoubleAggregator.combine(state, valueValue);
}
}
private void addRawBlock(DoubleBlock valueBlock) {
for (int p = 0; p < valueBlock.getPositionCount(); p++) {
int valueValueCount = valueBlock.getValueCount(p);
if (valueValueCount == 0) {
continue;
}
int valueStart = valueBlock.getFirstValueIndex(p);
int valueEnd = valueStart + valueValueCount;
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
double valueValue = valueBlock.getDouble(valueOffset);
StdDevDoubleAggregator.combine(state, valueValue);
}
}
}
private void addRawBlock(DoubleBlock valueBlock, BooleanVector mask) {
for (int p = 0; p < valueBlock.getPositionCount(); p++) {
if (mask.getBoolean(p) == false) {
continue;
}
int valueValueCount = valueBlock.getValueCount(p);
if (valueValueCount == 0) {
continue;
}
int valueStart = valueBlock.getFirstValueIndex(p);
int valueEnd = valueStart + valueValueCount;
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
double valueValue = valueBlock.getDouble(valueOffset);
StdDevDoubleAggregator.combine(state, valueValue);
}
}
}
@Override
public void addIntermediateInput(Page page) {
assert channels.size() == intermediateBlockCount();
assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size();
Block meanUncast = page.getBlock(channels.get(0));
if (meanUncast.areAllValuesNull()) {
return;
}
DoubleVector mean = ((DoubleBlock) meanUncast).asVector();
assert mean.getPositionCount() == 1;
Block m2Uncast = page.getBlock(channels.get(1));
if (m2Uncast.areAllValuesNull()) {
return;
}
DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector();
assert m2.getPositionCount() == 1;
Block countUncast = page.getBlock(channels.get(2));
if (countUncast.areAllValuesNull()) {
return;
}
LongVector count = ((LongBlock) countUncast).asVector();
assert count.getPositionCount() == 1;
StdDevDoubleAggregator.combineIntermediate(state, mean.getDouble(0), m2.getDouble(0), count.getLong(0));
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
state.toIntermediate(blocks, offset, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) {
blocks[offset] = StdDevDoubleAggregator.evaluateFinal(state, driverContext);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
|
StdDevDoubleAggregatorFunction
|
java
|
micronaut-projects__micronaut-core
|
aop/src/main/java/io/micronaut/aop/internal/InterceptorRegistryBean.java
|
{
"start": 1510,
"end": 3676
}
|
class ____ implements InstantiatableBeanDefinition<InterceptorRegistry>, BeanDefinitionReference<InterceptorRegistry> {
public static final AnnotationMetadata ANNOTATION_METADATA;
static {
MutableAnnotationMetadata metadata = new MutableAnnotationMetadata();
metadata.addDeclaredAnnotation(BootstrapContextCompatible.class.getName(), Collections.emptyMap());
ANNOTATION_METADATA = metadata;
}
@Override
public @NonNull Class<?>[] getIndexes() {
return new Class[]{InterceptorRegistry.class};
}
@Override
public Set<Class<?>> getExposedTypes() {
return Set.of(
InterceptorRegistry.class,
DefaultInterceptorRegistry.class
);
}
@Override
public int getOrder() {
return 0;
}
@Override
public boolean isPrimary() {
return false;
}
@Override
public boolean isParallel() {
return false;
}
@Override
public boolean isEnabled(@NonNull BeanContext context, BeanResolutionContext resolutionContext) {
return true;
}
@NonNull
@Override
public Class<InterceptorRegistry> getBeanType() {
return InterceptorRegistry.class;
}
@Override
public String getBeanDefinitionName() {
return InterceptorRegistryBean.class.getName();
}
@Override
public BeanDefinition<InterceptorRegistry> load() {
return this;
}
@Override
public boolean isPresent() {
return true;
}
@Override
public boolean isSingleton() {
return true;
}
@Override
public boolean isConfigurationProperties() {
return false;
}
@Override
public boolean isAbstract() {
return false;
}
@NonNull
@Override
public InterceptorRegistry instantiate(@NonNull BeanResolutionContext resolutionContext, @NonNull BeanContext context) throws BeanInstantiationException {
return new DefaultInterceptorRegistry(context);
}
@NonNull
@Override
public AnnotationMetadata getAnnotationMetadata() {
return ANNOTATION_METADATA;
}
}
|
InterceptorRegistryBean
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/load/engine/DiskCacheStrategy.java
|
{
"start": 202,
"end": 5606
}
|
class ____ {
/**
* Caches remote data with both {@link #DATA} and {@link #RESOURCE}, and local data with {@link
* #RESOURCE} only.
*/
public static final DiskCacheStrategy ALL =
new DiskCacheStrategy() {
@Override
public boolean isDataCacheable(DataSource dataSource) {
return dataSource == DataSource.REMOTE;
}
@Override
public boolean isResourceCacheable(
boolean isFromAlternateCacheKey, DataSource dataSource, EncodeStrategy encodeStrategy) {
return dataSource != DataSource.RESOURCE_DISK_CACHE
&& dataSource != DataSource.MEMORY_CACHE;
}
@Override
public boolean decodeCachedResource() {
return true;
}
@Override
public boolean decodeCachedData() {
return true;
}
};
/** Saves no data to cache. */
public static final DiskCacheStrategy NONE =
new DiskCacheStrategy() {
@Override
public boolean isDataCacheable(DataSource dataSource) {
return false;
}
@Override
public boolean isResourceCacheable(
boolean isFromAlternateCacheKey, DataSource dataSource, EncodeStrategy encodeStrategy) {
return false;
}
@Override
public boolean decodeCachedResource() {
return false;
}
@Override
public boolean decodeCachedData() {
return false;
}
};
/** Writes retrieved data directly to the disk cache before it's decoded. */
public static final DiskCacheStrategy DATA =
new DiskCacheStrategy() {
@Override
public boolean isDataCacheable(DataSource dataSource) {
return dataSource != DataSource.DATA_DISK_CACHE && dataSource != DataSource.MEMORY_CACHE;
}
@Override
public boolean isResourceCacheable(
boolean isFromAlternateCacheKey, DataSource dataSource, EncodeStrategy encodeStrategy) {
return false;
}
@Override
public boolean decodeCachedResource() {
return false;
}
@Override
public boolean decodeCachedData() {
return true;
}
};
/** Writes resources to disk after they've been decoded. */
public static final DiskCacheStrategy RESOURCE =
new DiskCacheStrategy() {
@Override
public boolean isDataCacheable(DataSource dataSource) {
return false;
}
@Override
public boolean isResourceCacheable(
boolean isFromAlternateCacheKey, DataSource dataSource, EncodeStrategy encodeStrategy) {
return dataSource != DataSource.RESOURCE_DISK_CACHE
&& dataSource != DataSource.MEMORY_CACHE;
}
@Override
public boolean decodeCachedResource() {
return true;
}
@Override
public boolean decodeCachedData() {
return false;
}
};
/**
* Tries to intelligently choose a strategy based on the data source of the {@link
* com.bumptech.glide.load.data.DataFetcher} and the {@link
* com.bumptech.glide.load.EncodeStrategy} of the {@link com.bumptech.glide.load.ResourceEncoder}
* (if an {@link com.bumptech.glide.load.ResourceEncoder} is available).
*/
public static final DiskCacheStrategy AUTOMATIC =
new DiskCacheStrategy() {
@Override
public boolean isDataCacheable(DataSource dataSource) {
return dataSource == DataSource.REMOTE;
}
@SuppressWarnings("checkstyle:UnnecessaryParentheses") // Readability
@Override
public boolean isResourceCacheable(
boolean isFromAlternateCacheKey, DataSource dataSource, EncodeStrategy encodeStrategy) {
return ((isFromAlternateCacheKey && dataSource == DataSource.DATA_DISK_CACHE)
|| dataSource == DataSource.LOCAL)
&& encodeStrategy == EncodeStrategy.TRANSFORMED;
}
@Override
public boolean decodeCachedResource() {
return true;
}
@Override
public boolean decodeCachedData() {
return true;
}
};
/**
* Returns true if this request should cache the original unmodified data.
*
* @param dataSource Indicates where the data was originally retrieved.
*/
public abstract boolean isDataCacheable(DataSource dataSource);
/**
* Returns true if this request should cache the final transformed resource.
*
* @param isFromAlternateCacheKey {@code true} if the resource we've decoded was loaded using an
* alternative, rather than the primary, cache key.
* @param dataSource Indicates where the data used to decode the resource was originally
* retrieved.
* @param encodeStrategy The {@link EncodeStrategy} the {@link
* com.bumptech.glide.load.ResourceEncoder} will use to encode the resource.
*/
public abstract boolean isResourceCacheable(
boolean isFromAlternateCacheKey, DataSource dataSource, EncodeStrategy encodeStrategy);
/** Returns true if this request should attempt to decode cached resource data. */
public abstract boolean decodeCachedResource();
/** Returns true if this request should attempt to decode cached source data. */
public abstract boolean decodeCachedData();
}
|
DiskCacheStrategy
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/ForwardingCollection.java
|
{
"start": 1655,
"end": 2130
}
|
class ____ <i>not</i> forward calls to {@code
* default} methods. Instead, it inherits their default implementations. When those implementations
* invoke methods, they invoke methods on the {@code ForwardingCollection}.
*
* <p>The {@code standard} methods are not guaranteed to be thread-safe, even when all of the
* methods that they depend on are thread-safe.
*
* @author Kevin Bourrillion
* @author Louis Wasserman
* @since 2.0
*/
@GwtCompatible
public abstract
|
does
|
java
|
apache__kafka
|
test-common/test-common-internal-api/src/main/java/org/apache/kafka/common/test/api/ClusterTests.java
|
{
"start": 1295,
"end": 1349
}
|
interface ____ {
ClusterTest[] value();
}
|
ClusterTests
|
java
|
grpc__grpc-java
|
xds/src/main/java/io/grpc/xds/XdsListenerResource.java
|
{
"start": 26766,
"end": 27541
}
|
class ____ implements ResourceUpdate {
// Http level api listener configuration.
@Nullable
abstract io.grpc.xds.HttpConnectionManager httpConnectionManager();
// Tcp level listener configuration.
@Nullable
abstract EnvoyServerProtoData.Listener listener();
static LdsUpdate forApiListener(io.grpc.xds.HttpConnectionManager httpConnectionManager) {
checkNotNull(httpConnectionManager, "httpConnectionManager");
return new io.grpc.xds.AutoValue_XdsListenerResource_LdsUpdate(httpConnectionManager, null);
}
static LdsUpdate forTcpListener(EnvoyServerProtoData.Listener listener) {
checkNotNull(listener, "listener");
return new io.grpc.xds.AutoValue_XdsListenerResource_LdsUpdate(null, listener);
}
}
}
|
LdsUpdate
|
java
|
spring-projects__spring-security
|
saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/registration/RelyingPartyRegistration.java
|
{
"start": 2636,
"end": 16543
}
|
class ____ implements Serializable {
private static final long serialVersionUID = -2718908121120942813L;
private final String registrationId;
private final String entityId;
private final String assertionConsumerServiceLocation;
private final Saml2MessageBinding assertionConsumerServiceBinding;
private final String singleLogoutServiceLocation;
private final String singleLogoutServiceResponseLocation;
private final Collection<Saml2MessageBinding> singleLogoutServiceBindings;
private final String nameIdFormat;
private final boolean authnRequestsSigned;
private final AssertingPartyMetadata assertingPartyMetadata;
private final Collection<Saml2X509Credential> decryptionX509Credentials;
private final Collection<Saml2X509Credential> signingX509Credentials;
protected RelyingPartyRegistration(String registrationId, String entityId, String assertionConsumerServiceLocation,
Saml2MessageBinding assertionConsumerServiceBinding, String singleLogoutServiceLocation,
String singleLogoutServiceResponseLocation, Collection<Saml2MessageBinding> singleLogoutServiceBindings,
AssertingPartyDetails assertingPartyDetails, String nameIdFormat, boolean authnRequestsSigned,
Collection<Saml2X509Credential> decryptionX509Credentials,
Collection<Saml2X509Credential> signingX509Credentials) {
Assert.hasText(registrationId, "registrationId cannot be empty");
Assert.hasText(entityId, "entityId cannot be empty");
Assert.hasText(assertionConsumerServiceLocation, "assertionConsumerServiceLocation cannot be empty");
Assert.notNull(assertionConsumerServiceBinding, "assertionConsumerServiceBinding cannot be null");
Assert.isTrue(singleLogoutServiceLocation == null || !CollectionUtils.isEmpty(singleLogoutServiceBindings),
"singleLogoutServiceBindings cannot be null or empty when singleLogoutServiceLocation is set");
Assert.notNull(assertingPartyDetails, "assertingPartyDetails cannot be null");
Assert.notNull(decryptionX509Credentials, "decryptionX509Credentials cannot be null");
for (Saml2X509Credential c : decryptionX509Credentials) {
Assert.notNull(c, "decryptionX509Credentials cannot contain null elements");
Assert.isTrue(c.isDecryptionCredential(),
"All decryptionX509Credentials must have a usage of DECRYPTION set");
}
Assert.notNull(signingX509Credentials, "signingX509Credentials cannot be null");
for (Saml2X509Credential c : signingX509Credentials) {
Assert.notNull(c, "signingX509Credentials cannot contain null elements");
Assert.isTrue(c.isSigningCredential(), "All signingX509Credentials must have a usage of SIGNING set");
}
this.registrationId = registrationId;
this.entityId = entityId;
this.assertionConsumerServiceLocation = assertionConsumerServiceLocation;
this.assertionConsumerServiceBinding = assertionConsumerServiceBinding;
this.singleLogoutServiceLocation = singleLogoutServiceLocation;
this.singleLogoutServiceResponseLocation = singleLogoutServiceResponseLocation;
this.singleLogoutServiceBindings = Collections.unmodifiableList(new LinkedList<>(singleLogoutServiceBindings));
this.nameIdFormat = nameIdFormat;
this.authnRequestsSigned = authnRequestsSigned;
this.assertingPartyMetadata = assertingPartyDetails;
this.decryptionX509Credentials = Collections.unmodifiableList(new LinkedList<>(decryptionX509Credentials));
this.signingX509Credentials = Collections.unmodifiableList(new LinkedList<>(signingX509Credentials));
}
private RelyingPartyRegistration(String registrationId, String entityId, String assertionConsumerServiceLocation,
Saml2MessageBinding assertionConsumerServiceBinding, String singleLogoutServiceLocation,
String singleLogoutServiceResponseLocation, Collection<Saml2MessageBinding> singleLogoutServiceBindings,
AssertingPartyMetadata assertingPartyMetadata, String nameIdFormat, boolean authnRequestsSigned,
Collection<Saml2X509Credential> decryptionX509Credentials,
Collection<Saml2X509Credential> signingX509Credentials) {
Assert.hasText(registrationId, "registrationId cannot be empty");
Assert.hasText(entityId, "entityId cannot be empty");
Assert.hasText(assertionConsumerServiceLocation, "assertionConsumerServiceLocation cannot be empty");
Assert.notNull(assertionConsumerServiceBinding, "assertionConsumerServiceBinding cannot be null");
Assert.isTrue(singleLogoutServiceLocation == null || !CollectionUtils.isEmpty(singleLogoutServiceBindings),
"singleLogoutServiceBindings cannot be null or empty when singleLogoutServiceLocation is set");
Assert.notNull(assertingPartyMetadata, "assertingPartyMetadata cannot be null");
Assert.notNull(decryptionX509Credentials, "decryptionX509Credentials cannot be null");
for (Saml2X509Credential c : decryptionX509Credentials) {
Assert.notNull(c, "decryptionX509Credentials cannot contain null elements");
Assert.isTrue(c.isDecryptionCredential(),
"All decryptionX509Credentials must have a usage of DECRYPTION set");
}
Assert.notNull(signingX509Credentials, "signingX509Credentials cannot be null");
for (Saml2X509Credential c : signingX509Credentials) {
Assert.notNull(c, "signingX509Credentials cannot contain null elements");
Assert.isTrue(c.isSigningCredential(), "All signingX509Credentials must have a usage of SIGNING set");
}
this.registrationId = registrationId;
this.entityId = entityId;
this.assertionConsumerServiceLocation = assertionConsumerServiceLocation;
this.assertionConsumerServiceBinding = assertionConsumerServiceBinding;
this.singleLogoutServiceLocation = singleLogoutServiceLocation;
this.singleLogoutServiceResponseLocation = singleLogoutServiceResponseLocation;
this.singleLogoutServiceBindings = Collections.unmodifiableList(new LinkedList<>(singleLogoutServiceBindings));
this.nameIdFormat = nameIdFormat;
this.authnRequestsSigned = authnRequestsSigned;
this.assertingPartyMetadata = assertingPartyMetadata;
this.decryptionX509Credentials = Collections.unmodifiableList(new LinkedList<>(decryptionX509Credentials));
this.signingX509Credentials = Collections.unmodifiableList(new LinkedList<>(signingX509Credentials));
}
/**
* Copy the properties in this {@link RelyingPartyRegistration} into a {@link Builder}
* @return a {@link Builder} based off of the properties in this
* {@link RelyingPartyRegistration}
* @since 6.1
*/
public Builder mutate() {
return new Builder(this.registrationId, this.assertingPartyMetadata.mutate()).entityId(this.entityId)
.signingX509Credentials((c) -> c.addAll(this.signingX509Credentials))
.decryptionX509Credentials((c) -> c.addAll(this.decryptionX509Credentials))
.assertionConsumerServiceLocation(this.assertionConsumerServiceLocation)
.assertionConsumerServiceBinding(this.assertionConsumerServiceBinding)
.singleLogoutServiceLocation(this.singleLogoutServiceLocation)
.singleLogoutServiceResponseLocation(this.singleLogoutServiceResponseLocation)
.singleLogoutServiceBindings((c) -> c.addAll(this.singleLogoutServiceBindings))
.nameIdFormat(this.nameIdFormat)
.authnRequestsSigned(this.authnRequestsSigned);
}
/**
* Get the unique registration id for this RP/AP pair
* @return the unique registration id for this RP/AP pair
*/
public String getRegistrationId() {
return this.registrationId;
}
/**
* Get the relying party's <a href=
* "https://www.oasis-open.org/committees/download.php/51890/SAML%20MD%20simplified%20overview.pdf#2.9%20EntityDescriptor">EntityID</a>.
*
* <p>
* Equivalent to the value found in the relying party's <EntityDescriptor
* EntityID="..."/>
*
* <p>
* This value may contain a number of placeholders, which need to be resolved before
* use. They are {@code baseUrl}, {@code registrationId}, {@code baseScheme},
* {@code baseHost}, and {@code basePort}.
* @return the relying party's EntityID
* @since 5.4
*/
public String getEntityId() {
return this.entityId;
}
/**
* Get the AssertionConsumerService Location. Equivalent to the value found in
* <AssertionConsumerService Location="..."/> in the relying party's
* <SPSSODescriptor>.
*
* This value may contain a number of placeholders, which need to be resolved before
* use. They are {@code baseUrl}, {@code registrationId}, {@code baseScheme},
* {@code baseHost}, and {@code basePort}.
* @return the AssertionConsumerService Location
* @since 5.4
*/
public String getAssertionConsumerServiceLocation() {
return this.assertionConsumerServiceLocation;
}
/**
* Get the AssertionConsumerService Binding. Equivalent to the value found in
* <AssertionConsumerService Binding="..."/> in the relying party's
* <SPSSODescriptor>.
* @return the AssertionConsumerService Binding
* @since 5.4
*/
public Saml2MessageBinding getAssertionConsumerServiceBinding() {
return this.assertionConsumerServiceBinding;
}
/**
* Get the <a href=
* "https://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf#page=7">SingleLogoutService
* Binding</a>
*
*
* <p>
* Equivalent to the value found in <SingleLogoutService Binding="..."/> in the
* relying party's <SPSSODescriptor>.
* @return the SingleLogoutService Binding
* @since 5.6
*/
public Saml2MessageBinding getSingleLogoutServiceBinding() {
Assert.state(this.singleLogoutServiceBindings.size() == 1, "Method does not support multiple bindings.");
return this.singleLogoutServiceBindings.iterator().next();
}
/**
* Get the <a href=
* "https://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf#page=7">SingleLogoutService
* Binding</a>
* <p>
* Equivalent to the value found in <SingleLogoutService Binding="..."/> in the
* relying party's <SPSSODescriptor>.
* @return the SingleLogoutService Binding
* @since 5.8
*/
public Collection<Saml2MessageBinding> getSingleLogoutServiceBindings() {
return this.singleLogoutServiceBindings;
}
/**
* Get the <a href=
* "https://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf#page=7">SingleLogoutService
* Location</a>
*
* <p>
* Equivalent to the value found in <SingleLogoutService Location="..."/> in the
* relying party's <SPSSODescriptor>.
* @return the SingleLogoutService Location
* @since 5.6
*/
public String getSingleLogoutServiceLocation() {
return this.singleLogoutServiceLocation;
}
/**
* Get the <a href=
* "https://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf#page=7">SingleLogoutService
* Response Location</a>
*
* <p>
* Equivalent to the value found in <SingleLogoutService
* ResponseLocation="..."/> in the relying party's <SPSSODescriptor>.
* @return the SingleLogoutService Response Location
* @since 5.6
*/
public String getSingleLogoutServiceResponseLocation() {
return this.singleLogoutServiceResponseLocation;
}
/**
* Get the NameID format.
* @return the NameID format
* @since 5.7
*/
public String getNameIdFormat() {
return this.nameIdFormat;
}
/**
* Get the <a href=
* "https://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf#page=18">
* AuthnRequestsSigned</a> setting. If {@code true}, the relying party will sign all
* AuthnRequests, regardless of asserting party preference.
*
* <p>
* Note that Spring Security will sign the request if either
* {@link #isAuthnRequestsSigned()} is {@code true} or
* {@link AssertingPartyDetails#getWantAuthnRequestsSigned()} is {@code true}.
* @return the relying-party preference
* @since 6.1
*/
public boolean isAuthnRequestsSigned() {
return this.authnRequestsSigned;
}
/**
* Get the {@link Collection} of decryption {@link Saml2X509Credential}s associated
* with this relying party
* @return the {@link Collection} of decryption {@link Saml2X509Credential}s
* associated with this relying party
* @since 5.4
*/
public Collection<Saml2X509Credential> getDecryptionX509Credentials() {
return this.decryptionX509Credentials;
}
/**
* Get the {@link Collection} of signing {@link Saml2X509Credential}s associated with
* this relying party
* @return the {@link Collection} of signing {@link Saml2X509Credential}s associated
* with this relying party
* @since 5.4
*/
public Collection<Saml2X509Credential> getSigningX509Credentials() {
return this.signingX509Credentials;
}
/**
* Get the metadata for the Asserting Party
* @return the {@link AssertingPartyMetadata}
* @since 6.4
*/
public AssertingPartyMetadata getAssertingPartyMetadata() {
return this.assertingPartyMetadata;
}
/**
* Creates a {@code RelyingPartyRegistration} {@link Builder} with a known
* {@code registrationId}
* @param registrationId a string identifier for the {@code RelyingPartyRegistration}
* @return {@code Builder} to create a {@code RelyingPartyRegistration} object
*/
public static Builder withRegistrationId(String registrationId) {
Assert.hasText(registrationId, "registrationId cannot be empty");
return new Builder(registrationId, new AssertingPartyDetails.Builder());
}
/**
* Creates a {@code RelyingPartyRegistration} {@link Builder} with a
* {@code registrationId} equivalent to the asserting party entity id. Also
* initializes to the contents of the given {@link AssertingPartyMetadata}.
* @param metadata the metadata used to initialize the
* {@link RelyingPartyRegistration} {@link Builder}
* @return {@link Builder} to create a {@link RelyingPartyRegistration} object
* @since 6.4
*/
public static Builder withAssertingPartyMetadata(AssertingPartyMetadata metadata) {
Assert.notNull(metadata, "assertingPartyMetadata cannot be null");
return new Builder(metadata.getEntityId(), metadata.mutate());
}
/**
* The configuration metadata of the Asserting party
*
* @since 5.4
*/
public static
|
RelyingPartyRegistration
|
java
|
elastic__elasticsearch
|
test/fixtures/gcs-fixture/src/test/java/fixture/gcs/GoogleCloudStorageHttpHandlerTests.java
|
{
"start": 1844,
"end": 36217
}
|
class ____ extends ESTestCase {
private static final String HOST = "http://127.0.0.1:12345";
private static final int RESUME_INCOMPLETE = 308;
private static final Pattern GENERATION_PATTERN = Pattern.compile("\"generation\"\\s*:\\s*\"(\\d+)\"");
public void testRejectsBadUri() {
assertEquals(
RestStatus.NOT_FOUND.getStatus(),
handleRequest(new GoogleCloudStorageHttpHandler("bucket"), randomFrom("GET", "PUT", "POST", "DELETE", "HEAD"), "/not-in-bucket")
.status()
);
}
public void testCheckEndpoint() {
final var handler = new GoogleCloudStorageHttpHandler("bucket");
assertEquals(
RestStatus.OK,
handleRequest(handler, "GET", "/", BytesArray.EMPTY, Headers.of("Metadata-Flavor", "Google")).restStatus()
);
}
public void testSimpleObjectOperations() {
final var bucket = randomAlphaOfLength(10);
final var handler = new GoogleCloudStorageHttpHandler(bucket);
final var blobName = "path/" + randomAlphaOfLength(10);
assertEquals(RestStatus.NOT_FOUND, getBlobContents(handler, bucket, blobName, null, null).restStatus());
assertEquals(
new TestHttpResponse(RestStatus.OK, "{\"kind\":\"storage#objects\",\"items\":[],\"prefixes\":[]}"),
listBlobs(handler, bucket, null, null)
);
final var body = randomAlphaOfLength(50);
assertEquals(
RestStatus.OK,
executeUpload(handler, bucket, blobName, new BytesArray(body.getBytes(StandardCharsets.UTF_8)), null).restStatus()
);
assertEquals(new TestHttpResponse(RestStatus.OK, body), getBlobContents(handler, bucket, blobName, null, null));
assertEquals(new TestHttpResponse(RestStatus.OK, Strings.format("""
{"kind":"storage#objects","items":[{"kind":"storage#object","bucket":"%s","name":"%s","id":"%s","size":"50",\
"generation":"1"}],"prefixes":[]}""", bucket, blobName, blobName)), listBlobs(handler, bucket, null, null));
assertEquals(new TestHttpResponse(RestStatus.OK, Strings.format("""
{"kind":"storage#objects","items":[{"kind":"storage#object","bucket":"%s","name":"%s","id":"%s","size":"50",\
"generation":"1"}],"prefixes":[]}""", bucket, blobName, blobName)), listBlobs(handler, bucket, "path/", null));
assertEquals(new TestHttpResponse(RestStatus.OK, """
{"kind":"storage#objects","items":[],"prefixes":[]}"""), listBlobs(handler, bucket, "some/other/path", null));
var boundary = newMultipartBoundary();
assertEquals(
new TestHttpResponse(RestStatus.OK, """
--$boundary
Content-Length: 168
Content-Type: application/http
content-id: 1
content-transfer-encoding: binary
HTTP/1.1 204 NO_CONTENT
--$boundary--
""".replace("\n", "\r\n").replace("$boundary", boundary)),
handleRequest(
handler,
"POST",
"/batch/storage/v1",
createBatchDeleteRequest(bucket, boundary, blobName),
Headers.of("Content-Type", "mixed/multipart")
)
);
assertEquals(
RestStatus.OK,
handleRequest(
handler,
"POST",
"/batch/storage/v1",
createBatchDeleteRequest(bucket, boundary, blobName),
Headers.of("Content-Type", "mixed/multipart")
).restStatus()
);
assertEquals(new TestHttpResponse(RestStatus.OK, """
{"kind":"storage#objects","items":[],"prefixes":[]}"""), listBlobs(handler, bucket, "path/", null));
}
public void testGetWithBytesRange() {
final var bucket = randomIdentifier();
final var handler = new GoogleCloudStorageHttpHandler(bucket);
final var blobName = "blob_name_" + randomIdentifier();
final var blobBytes = randomBytesReference(256);
assertEquals(RestStatus.OK, executeUpload(handler, bucket, blobName, blobBytes, 0L).restStatus());
assertEquals(
"No Range",
new TestHttpResponse(RestStatus.OK, blobBytes, TestHttpExchange.EMPTY_HEADERS),
getBlobContents(handler, bucket, blobName, null, null)
);
var end = blobBytes.length() - 1;
assertEquals(
"Exact Range: bytes=0-" + end,
new TestHttpResponse(RestStatus.PARTIAL_CONTENT, blobBytes, TestHttpExchange.EMPTY_HEADERS),
getBlobContents(handler, bucket, blobName, null, new HttpHeaderParser.Range(0, end))
);
end = randomIntBetween(blobBytes.length() - 1, Integer.MAX_VALUE);
assertEquals(
"Larger Range: bytes=0-" + end,
new TestHttpResponse(RestStatus.PARTIAL_CONTENT, blobBytes, TestHttpExchange.EMPTY_HEADERS),
getBlobContents(handler, bucket, blobName, null, new HttpHeaderParser.Range(0, end))
);
var start = randomIntBetween(blobBytes.length(), Integer.MAX_VALUE - 1);
end = randomIntBetween(start, Integer.MAX_VALUE);
assertEquals(
"Invalid Range: bytes=" + start + '-' + end,
new TestHttpResponse(RestStatus.REQUESTED_RANGE_NOT_SATISFIED, BytesArray.EMPTY, TestHttpExchange.EMPTY_HEADERS),
getBlobContents(handler, bucket, blobName, null, new HttpHeaderParser.Range(start, end))
);
start = randomIntBetween(0, blobBytes.length() - 1);
var length = randomIntBetween(1, blobBytes.length() - start);
end = start + length - 1;
assertEquals(
"Range: bytes=" + start + '-' + end,
new TestHttpResponse(RestStatus.PARTIAL_CONTENT, blobBytes.slice(start, length), TestHttpExchange.EMPTY_HEADERS),
getBlobContents(handler, bucket, blobName, null, new HttpHeaderParser.Range(start, end))
);
}
public void testZeroLengthObjectGets() {
final var bucket = randomIdentifier();
final var handler = new GoogleCloudStorageHttpHandler(bucket);
final var blobName = "blob_name_" + randomIdentifier();
final var blobBytes = BytesArray.EMPTY;
assertEquals(RestStatus.OK, executeMultipartUpload(handler, bucket, blobName, blobBytes, 0L).restStatus());
assertEquals(
"No Range",
new TestHttpResponse(RestStatus.OK, blobBytes, TestHttpExchange.EMPTY_HEADERS),
getBlobContents(handler, bucket, blobName, null, null)
);
assertEquals(
"Range 0-0",
new TestHttpResponse(RestStatus.REQUESTED_RANGE_NOT_SATISFIED, BytesArray.EMPTY, TestHttpExchange.EMPTY_HEADERS),
getBlobContents(handler, bucket, blobName, null, new HttpHeaderParser.Range(0, 0))
);
assertEquals(
"Random range x-y",
new TestHttpResponse(RestStatus.REQUESTED_RANGE_NOT_SATISFIED, BytesArray.EMPTY, TestHttpExchange.EMPTY_HEADERS),
getBlobContents(handler, bucket, blobName, null, new HttpHeaderParser.Range(randomIntBetween(0, 30), randomIntBetween(31, 100)))
);
}
public void testResumableUpload() {
final var bucket = randomIdentifier();
final var handler = new GoogleCloudStorageHttpHandler(bucket);
final var blobName = "blob_name_" + randomIdentifier();
final var createUploadResponse = handleRequest(
handler,
"POST",
"/upload/storage/v1/b/" + bucket + "/?uploadType=resumable&name=" + blobName
);
final var locationHeader = createUploadResponse.headers.getFirst("Location");
final var sessionURI = locationHeader.substring(locationHeader.indexOf(HOST) + HOST.length());
assertEquals(RestStatus.OK, createUploadResponse.restStatus());
// status check
assertEquals(
new TestHttpResponse(RESUME_INCOMPLETE, TestHttpExchange.EMPTY_HEADERS),
handleRequest(handler, "PUT", sessionURI, BytesArray.EMPTY, contentRangeHeader(null, null, null))
);
final var part1 = randomAlphaOfLength(50);
final var uploadPart1Response = handleRequest(handler, "PUT", sessionURI, part1, contentRangeHeader(0, 50, null));
assertEquals(new TestHttpResponse(RESUME_INCOMPLETE, rangeHeader(0, 49)), uploadPart1Response);
// status check
assertEquals(
new TestHttpResponse(RESUME_INCOMPLETE, rangeHeader(0, 49)),
handleRequest(handler, "PUT", sessionURI, BytesArray.EMPTY, contentRangeHeader(null, null, null))
);
final var part2 = randomAlphaOfLength(50);
final var uploadPart2Response = handleRequest(handler, "PUT", sessionURI, part2, contentRangeHeader(50, 99, null));
assertEquals(new TestHttpResponse(RESUME_INCOMPLETE, rangeHeader(0, 99)), uploadPart2Response);
// incomplete upload should not be visible yet
assertEquals(RestStatus.NOT_FOUND, getBlobContents(handler, bucket, blobName, null, null).restStatus());
final var part3 = randomAlphaOfLength(30);
final var uploadPart3Response = handleRequest(handler, "PUT", sessionURI, part3, contentRangeHeader(100, 129, 130));
assertEquals(new TestHttpResponse(RestStatus.OK, rangeHeader(0, 129)), uploadPart3Response);
// status check
assertEquals(
new TestHttpResponse(RestStatus.OK, rangeHeader(0, 129)),
handleRequest(handler, "PUT", sessionURI, BytesArray.EMPTY, contentRangeHeader(null, null, null))
);
// complete upload should be visible now
// can download contents
assertEquals(
new TestHttpResponse(RestStatus.OK, part1 + part2 + part3),
handleRequest(handler, "GET", "/download/storage/v1/b/" + bucket + "/o/" + blobName)
);
// can see in listing
assertEquals(
new TestHttpResponse(RestStatus.OK, Strings.format("""
{"kind":"storage#objects","items":[{"kind":"storage#object","bucket":"%s","name":"%s","id":"%s","size":"130",\
"generation":"1"}],"prefixes":[]}""", bucket, blobName, blobName)),
handleRequest(handler, "GET", "/storage/v1/b/" + bucket + "/o")
);
// can get metadata
assertEquals(
new TestHttpResponse(
RestStatus.OK,
Strings.format(
"""
{"kind":"storage#object","bucket":"%s","name":"%s","id":"%s","size":"130","generation":"1"}""",
bucket,
blobName,
blobName
)
),
handleRequest(handler, "GET", "/storage/v1/b/" + bucket + "/o/" + blobName)
);
}
public void testIfGenerationMatch_MultipartUpload() {
final var bucket = randomIdentifier();
final var handler = new GoogleCloudStorageHttpHandler(bucket);
final var blobName = "blob_name_" + randomIdentifier();
assertEquals(
RestStatus.OK,
executeUpload(handler, bucket, blobName, randomBytesReference(randomIntBetween(100, 5_000)), null).restStatus()
);
// update, matched generation
assertEquals(
RestStatus.OK,
executeMultipartUpload(
handler,
bucket,
blobName,
randomBytesReference(randomIntBetween(100, 5_000)),
getCurrentGeneration(handler, bucket, blobName)
).restStatus()
);
// update, mismatched generation
assertEquals(
RestStatus.PRECONDITION_FAILED,
executeMultipartUpload(
handler,
bucket,
blobName,
randomBytesReference(randomIntBetween(100, 5_000)),
randomValueOtherThan(getCurrentGeneration(handler, bucket, blobName), ESTestCase::randomNonNegativeLong)
).restStatus()
);
// update, no generation
assertEquals(
RestStatus.OK,
executeMultipartUpload(handler, bucket, blobName, randomBytesReference(randomIntBetween(100, 5_000)), null).restStatus()
);
// update, zero generation
assertEquals(
RestStatus.PRECONDITION_FAILED,
executeMultipartUpload(handler, bucket, blobName, randomBytesReference(randomIntBetween(100, 5_000)), 0L).restStatus()
);
// new file, zero generation
assertEquals(
RestStatus.OK,
executeMultipartUpload(handler, bucket, blobName + "/new/1", randomBytesReference(randomIntBetween(100, 5_000)), 0L)
.restStatus()
);
// new file, non-zero generation
assertEquals(
RestStatus.PRECONDITION_FAILED,
executeMultipartUpload(
handler,
bucket,
blobName + "/new/2",
randomBytesReference(randomIntBetween(100, 5_000)),
randomLongBetween(1, Long.MAX_VALUE)
).restStatus()
);
}
public void testIfGenerationMatch_ResumableUpload() {
final var bucket = randomIdentifier();
final var handler = new GoogleCloudStorageHttpHandler(bucket);
final var blobName = "blob_name_" + randomIdentifier();
assertEquals(
RestStatus.OK,
executeUpload(handler, bucket, blobName, randomBytesReference(randomIntBetween(100, 5_000)), null).restStatus()
);
// update, matched generation
assertEquals(
RestStatus.OK,
executeResumableUpload(
handler,
bucket,
blobName,
randomBytesReference(randomIntBetween(100, 5_000)),
getCurrentGeneration(handler, bucket, blobName)
).restStatus()
);
// update, mismatched generation
assertEquals(
RestStatus.PRECONDITION_FAILED,
executeResumableUpload(
handler,
bucket,
blobName,
randomBytesReference(randomIntBetween(100, 5_000)),
randomValueOtherThan(getCurrentGeneration(handler, bucket, blobName), ESTestCase::randomNonNegativeLong)
).restStatus()
);
// update, no generation
assertEquals(
RestStatus.OK,
executeResumableUpload(handler, bucket, blobName, randomBytesReference(randomIntBetween(100, 5_000)), null).restStatus()
);
// update, zero generation
assertEquals(
RestStatus.PRECONDITION_FAILED,
executeResumableUpload(handler, bucket, blobName, randomBytesReference(randomIntBetween(100, 5_000)), 0L).restStatus()
);
// new file, zero generation
assertEquals(
RestStatus.OK,
executeResumableUpload(handler, bucket, blobName + "/new/1", randomBytesReference(randomIntBetween(100, 5_000)), 0L)
.restStatus()
);
// new file, non-zero generation
assertEquals(
RestStatus.PRECONDITION_FAILED,
executeResumableUpload(
handler,
bucket,
blobName + "/new/2",
randomBytesReference(randomIntBetween(100, 5_000)),
randomLongBetween(1, Long.MAX_VALUE)
).restStatus()
);
}
public void testIfGenerationMatch_GetObject() {
final var bucket = randomIdentifier();
final var handler = new GoogleCloudStorageHttpHandler(bucket);
final var blobName = "blob_name_" + randomIdentifier();
assertEquals(
RestStatus.OK,
executeUpload(handler, bucket, blobName, randomBytesReference(randomIntBetween(100, 5_000)), null).restStatus()
);
final long currentGeneration = getCurrentGeneration(handler, bucket, blobName);
// Get contents, matching generation
assertEquals(RestStatus.OK, getBlobContents(handler, bucket, blobName, currentGeneration, null).restStatus());
// Get contents, mismatched generation
assertEquals(
RestStatus.PRECONDITION_FAILED,
getBlobContents(handler, bucket, blobName, randomValueOtherThan(currentGeneration, ESTestCase::randomNonNegativeLong), null)
.restStatus()
);
// Get metadata, matching generation
assertEquals(RestStatus.OK, getBlobMetadata(handler, bucket, blobName, currentGeneration).restStatus());
// Get metadata, mismatched generation
assertEquals(
RestStatus.PRECONDITION_FAILED,
getBlobMetadata(handler, bucket, blobName, randomValueOtherThan(currentGeneration, ESTestCase::randomNonNegativeLong))
.restStatus()
);
}
public void testListObjectsWithPrefix() {
final var bucket = randomIdentifier();
final var handler = new GoogleCloudStorageHttpHandler(bucket);
final int numberOfFiles = randomIntBetween(1, 100);
final int numberWithMatchingPrefix = randomIntBetween(0, numberOfFiles);
final String prefix = randomIdentifier();
// Create expected state
for (int i = 0; i < numberOfFiles; i++) {
final String blobName;
if (i < numberWithMatchingPrefix) {
blobName = prefix + "blob_name_" + i;
} else {
final String nonMatchingPrefix = randomValueOtherThan(prefix, ESTestCase::randomIdentifier);
blobName = nonMatchingPrefix + "blob_name_" + i;
}
assertEquals(
RestStatus.OK,
executeUpload(handler, bucket, blobName, randomBytesReference(randomIntBetween(100, 5_000)), null).restStatus()
);
}
TestHttpResponse response = listBlobs(handler, bucket, prefix, null);
assertEquals(RestStatus.OK, response.restStatus());
XContentTestUtils.JsonMapView jsonMapView = XContentTestUtils.createJsonMapView(
new ByteArrayInputStream(BytesReference.toBytes(response.body()))
);
assertEquals(numberWithMatchingPrefix, ((List<?>) jsonMapView.get("items")).size());
}
public void testListObjectsWithPrefixAndDelimiter() {
final var bucket = randomIdentifier();
final var handler = new GoogleCloudStorageHttpHandler(bucket);
final var delimiter = randomFrom("/", ".", "+", "\\");
final var prefix = randomBoolean() ? "" : randomIdentifier() + delimiter;
final int numberOfFiles = randomIntBetween(1, 100);
final int numberWithDelimiter = randomIntBetween(0, numberOfFiles);
// Create expected state
final Set<String> topLevelDirectories = new HashSet<>();
for (int i = 0; i < numberOfFiles; i++) {
final String blobName;
if (i < numberWithDelimiter) {
final String directory = randomAlphaOfLength(3);
blobName = directory + delimiter + "blob_name_" + i;
topLevelDirectories.add(directory + delimiter);
} else {
blobName = randomIdentifier() + "_blob_name_" + i;
}
assertEquals(
RestStatus.OK,
executeUpload(handler, bucket, prefix + blobName, randomBytesReference(randomIntBetween(100, 5_000)), null).restStatus()
);
}
final TestHttpResponse response = listBlobs(handler, bucket, prefix, delimiter);
assertEquals(RestStatus.OK, response.restStatus());
XContentTestUtils.JsonMapView jsonMapView = XContentTestUtils.createJsonMapView(
new ByteArrayInputStream(BytesReference.toBytes(response.body()))
);
assertEquals(numberOfFiles - numberWithDelimiter, ((List<?>) jsonMapView.get("items")).size());
assertEquals(
topLevelDirectories.stream().map(d -> prefix + d).collect(Collectors.toSet()),
new HashSet<>(jsonMapView.get("prefixes"))
);
}
/**
* Tests the example from <a href="https://cloud.google.com/storage/docs/json_api/v1/objects/list">The docs</a>
*/
public void testListObjectsExampleFromDocumentation() {
final var bucket = randomIdentifier();
final var handler = new GoogleCloudStorageHttpHandler(bucket);
Stream.of("a/b", "a/c", "d", "e", "e/f", "e/g/h")
.forEach(
path -> assertEquals(
RestStatus.OK,
executeUpload(handler, bucket, path, randomBytesReference(randomIntBetween(100, 5_000)), null).restStatus()
)
);
TestHttpResponse response = listBlobs(handler, bucket, null, "/");
assertEquals(RestStatus.OK, response.restStatus());
XContentTestUtils.JsonMapView jsonMapView = XContentTestUtils.createJsonMapView(
new ByteArrayInputStream(BytesReference.toBytes(response.body()))
);
assertEquals(
Set.of("d", "e"),
((List<?>) jsonMapView.get("items")).stream().map(i -> ((Map<?, ?>) i).get("name")).collect(Collectors.toSet())
);
assertEquals(Set.of("a/", "e/"), new HashSet<>(jsonMapView.get("prefixes")));
response = listBlobs(handler, bucket, "e/", "/");
assertEquals(RestStatus.OK, response.restStatus());
jsonMapView = XContentTestUtils.createJsonMapView(new ByteArrayInputStream(BytesReference.toBytes(response.body())));
assertEquals(
Set.of("e/f"),
((List<?>) jsonMapView.get("items")).stream().map(i -> ((Map<?, ?>) i).get("name")).collect(Collectors.toSet())
);
// note this differs from the example, but third party test indicates this is what we get back
assertEquals(Set.of("e/g/"), new HashSet<>(jsonMapView.get("prefixes")));
}
private static TestHttpResponse executeUpload(
GoogleCloudStorageHttpHandler handler,
String bucket,
String blobName,
BytesReference bytes,
Long ifGenerationMatch
) {
if (randomBoolean()) {
return executeResumableUpload(handler, bucket, blobName, bytes, ifGenerationMatch);
} else {
return executeMultipartUpload(handler, bucket, blobName, bytes, ifGenerationMatch);
}
}
private static TestHttpResponse executeResumableUpload(
GoogleCloudStorageHttpHandler handler,
String bucket,
String blobName,
BytesReference bytes,
Long ifGenerationMatch
) {
assert bytes.length() >= 2 : "We can't split anything smaller than two";
final var createUploadResponse = handleRequest(
handler,
"POST",
"/upload/storage/v1/b/"
+ bucket
+ "/"
+ generateQueryString("uploadType", "resumable", "name", blobName, "ifGenerationMatch", ifGenerationMatch)
);
final var locationHeader = createUploadResponse.headers.getFirst("Location");
final var sessionURI = locationHeader.substring(locationHeader.indexOf(HOST) + HOST.length());
assertEquals(RestStatus.OK, createUploadResponse.restStatus());
final int partBoundary = randomIntBetween(1, bytes.length() - 1);
final var part1 = bytes.slice(0, partBoundary);
final var uploadPart1Response = handleRequest(handler, "PUT", sessionURI, part1, contentRangeHeader(0, partBoundary - 1, null));
assertEquals(RESUME_INCOMPLETE, uploadPart1Response.status());
final var part2 = bytes.slice(partBoundary, bytes.length() - partBoundary);
return handleRequest(handler, "PUT", sessionURI, part2, contentRangeHeader(partBoundary, bytes.length() - 1, bytes.length()));
}
private static TestHttpResponse executeMultipartUpload(
GoogleCloudStorageHttpHandler handler,
String bucket,
String blobName,
BytesReference bytes,
Long ifGenerationMatch
) {
var headers = new Headers();
// multipart upload is required to provide boundary header
var boundary = newMultipartBoundary();
headers.put("Content-Type", List.of("multipart/related; boundary=" + boundary));
return handleRequest(
handler,
"POST",
"/upload/storage/v1/b/" + bucket + "/" + generateQueryString("uploadType", "multipart", "ifGenerationMatch", ifGenerationMatch),
createGzipCompressedMultipartUploadBody(bucket, blobName, bytes, boundary),
headers
);
}
private static TestHttpResponse getBlobContents(
GoogleCloudStorageHttpHandler handler,
String bucket,
String blobName,
@Nullable Long ifGenerationMatch,
@Nullable HttpHeaderParser.Range range
) {
return handleRequest(
handler,
"GET",
"/download/storage/v1/b/" + bucket + "/o/" + blobName + generateQueryString("ifGenerationMatch", ifGenerationMatch),
BytesArray.EMPTY,
range != null ? rangeHeader(range.start(), range.end()) : TestHttpExchange.EMPTY_HEADERS
);
}
private static TestHttpResponse getBlobMetadata(
GoogleCloudStorageHttpHandler handler,
String bucket,
String blobName,
@Nullable Long ifGenerationMatch
) {
return handleRequest(
handler,
"GET",
"/storage/v1/b/" + bucket + "/o/" + blobName + generateQueryString("ifGenerationMatch", ifGenerationMatch)
);
}
private static long getCurrentGeneration(GoogleCloudStorageHttpHandler handler, String bucket, String blobName) {
final TestHttpResponse blobMetadata = getBlobMetadata(handler, bucket, blobName, null);
assertEquals(RestStatus.OK, blobMetadata.restStatus());
final Matcher matcher = GENERATION_PATTERN.matcher(blobMetadata.body.utf8ToString());
assertTrue(matcher.find());
return Long.parseLong(matcher.group(1));
}
private static TestHttpResponse listBlobs(GoogleCloudStorageHttpHandler handler, String bucket, String prefix, String delimiter) {
return handleRequest(
handler,
"GET",
"/storage/v1/b/" + bucket + "/o" + generateQueryString("prefix", prefix, "delimiter", delimiter)
);
}
private record TestHttpResponse(int status, BytesReference body, Headers headers) {
TestHttpResponse(RestStatus status, BytesReference body, Headers headers) {
this(status.getStatus(), body, headers);
}
TestHttpResponse(RestStatus status, String body) {
this(status.getStatus(), new BytesArray(body.getBytes(StandardCharsets.UTF_8)), TestHttpExchange.EMPTY_HEADERS);
}
TestHttpResponse(RestStatus status, Headers headers) {
this(status.getStatus(), BytesArray.EMPTY, headers);
}
TestHttpResponse(int statusCode, Headers headers) {
this(statusCode, BytesArray.EMPTY, headers);
}
RestStatus restStatus() {
return requireNonNull(RestStatus.fromCode(status));
}
@Override
public String toString() {
return "TestHttpResponse{" + "status=" + status + ", body={size=" + body.utf8ToString() + "}, headers=" + headers + '}';
}
}
private static TestHttpResponse handleRequest(GoogleCloudStorageHttpHandler handler, String method, String uri) {
return handleRequest(handler, method, uri, "");
}
private static TestHttpResponse handleRequest(GoogleCloudStorageHttpHandler handler, String method, String uri, String requestBody) {
return handleRequest(handler, method, uri, new BytesArray(requestBody.getBytes(StandardCharsets.UTF_8)));
}
private static TestHttpResponse handleRequest(
GoogleCloudStorageHttpHandler handler,
String method,
String uri,
String requestBody,
Headers headers
) {
return handleRequest(handler, method, uri, new BytesArray(requestBody.getBytes(StandardCharsets.UTF_8)), headers);
}
private static TestHttpResponse handleRequest(
GoogleCloudStorageHttpHandler handler,
String method,
String uri,
BytesReference requestBody
) {
return handleRequest(handler, method, uri, requestBody, TestHttpExchange.EMPTY_HEADERS);
}
private static TestHttpResponse handleRequest(
GoogleCloudStorageHttpHandler handler,
String method,
String uri,
BytesReference requestBody,
Headers requestHeaders
) {
final var httpExchange = new TestHttpExchange(method, uri, requestBody, requestHeaders);
try {
handler.handle(httpExchange);
} catch (IOException e) {
fail(e);
}
assertNotEquals(0, httpExchange.getResponseCode());
final var responseHeaders = new Headers();
httpExchange.getResponseHeaders().forEach((header, values) -> {
// com.sun.net.httpserver.Headers.Headers() normalize keys
if ("Range".equals(header) || "Content-range".equals(header) || "Location".equals(header)) {
responseHeaders.put(header, List.copyOf(values));
}
});
return new TestHttpResponse(httpExchange.getResponseCode(), httpExchange.getResponseBodyContents(), responseHeaders);
}
/**
* Generate a query string for the given parameters
*
* @param parameters The query parameters as alternating key, value pairs
* @return The query string including all parameters with a non-null value (e.g.
*/
public static String generateQueryString(Object... parameters) {
if (parameters.length % 2 != 0) {
final String message = "Parameters must be represented as alternating key, value pairs";
assert false : message;
throw new IllegalArgumentException(message);
}
final StringBuilder builder = new StringBuilder();
for (int i = 0; i < parameters.length; i += 2) {
final String key = String.valueOf(requireNonNull(parameters[i], "Parameter names must be non-null strings"));
final Object value = parameters[i + 1];
if (value != null) {
if (builder.isEmpty() == false) {
builder.append("&");
}
builder.append(key).append("=").append(URLEncoder.encode(String.valueOf(value), StandardCharsets.UTF_8));
}
}
if (builder.isEmpty() == false) {
return "?" + builder;
}
return "";
}
private static Headers contentRangeHeader(@Nullable Integer startInclusive, @Nullable Integer endInclusive, @Nullable Integer limit) {
final String rangeString = startInclusive != null && endInclusive != null ? startInclusive + "-" + endInclusive : "*";
final String limitString = limit == null ? "*" : limit.toString();
return Headers.of("Content-Range", "bytes " + rangeString + "/" + limitString);
}
private static Headers rangeHeader(long start, long end) {
return Headers.of("Range", Strings.format("bytes=%d-%d", start, end));
}
private static String newMultipartBoundary() {
return "__END_OF_PART__" + randomUUID();
}
private static BytesReference createGzipCompressedMultipartUploadBody(
String bucketName,
String path,
BytesReference content,
String boundary
) {
final String metadataString = Strings.format("{\"bucket\":\"%s\", \"name\":\"%s\"}", bucketName, path);
final String headerStr = """
--$boundary
Content-Length: $metadata-length
Content-Type: application/json; charset=UTF-8
content-transfer-encoding: binary
$metadata
--$boundary
Content-Type: application/octet-stream
content-transfer-encoding: binary
""".replace("\n", "\r\n")
.replace("$boundary", boundary)
.replace("$metadata-length", Integer.toString(metadataString.length()))
.replace("$metadata", metadataString);
final BytesReference header = new BytesArray(headerStr.getBytes(StandardCharsets.UTF_8));
final BytesReference footer = new BytesArray("""
--$boundary--
""".replace("\n", "\r\n").replace("$boundary", boundary));
final ByteArrayOutputStream out = new ByteArrayOutputStream();
try (GZIPOutputStream gzipOutputStream = new GZIPOutputStream(out)) {
gzipOutputStream.write(BytesReference.toBytes(CompositeBytesReference.of(header, content, footer)));
} catch (IOException e) {
fail(e);
}
return new BytesArray(out.toByteArray());
}
private static String createBatchDeleteRequest(String bucketName, String boundary, String... paths) {
final String deleteRequestTemplate = """
DELETE %s/storage/v1/b/%s/o/%s HTTP/1.1
Authorization: Bearer foo
x-goog-api-client: gl-java/23.0.0 gdcl/2.1.1 mac-os-x/15.2
""";
final String partTemplate = """
--$boundary
Content-Length: %d
Content-Type: application/http
content-id: %d
content-transfer-encoding: binary
%s
""".replace("$boundary", boundary);
StringBuilder builder = new StringBuilder();
AtomicInteger contentId = new AtomicInteger();
Arrays.stream(paths).forEach(p -> {
final String deleteRequest = Strings.format(deleteRequestTemplate, HOST, bucketName, p);
final String part = Strings.format(partTemplate, deleteRequest.length(), contentId.incrementAndGet(), deleteRequest);
builder.append(part);
});
builder.append("--").append(boundary).append("--");
return builder.toString();
}
private static
|
GoogleCloudStorageHttpHandlerTests
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/healthcheck/heartbeat/AbstractBeatCheckInterceptor.java
|
{
"start": 827,
"end": 1067
}
|
class ____ implements NacosNamingInterceptor<InstanceBeatCheckTask> {
@Override
public boolean isInterceptType(Class<?> type) {
return InstanceBeatCheckTask.class.isAssignableFrom(type);
}
}
|
AbstractBeatCheckInterceptor
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/services/AddingCompositeService.java
|
{
"start": 1586,
"end": 1914
}
|
class ____ extends CompositeService {
public AddingCompositeService(String name) {
super(name);
}
@Override
public void addService(Service service) {
super.addService(service);
}
@Override
public boolean removeService(Service service) {
return super.removeService(service);
}
}
|
AddingCompositeService
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/load/engine/cache/MemoryCacheAdapter.java
|
{
"start": 287,
"end": 1164
}
|
class ____ implements MemoryCache {
private ResourceRemovedListener listener;
@Override
public long getCurrentSize() {
return 0;
}
@Override
public long getMaxSize() {
return 0;
}
@Override
public void setSizeMultiplier(float multiplier) {
// Do nothing.
}
@Nullable
@Override
public Resource<?> remove(@NonNull Key key) {
return null;
}
@Nullable
@Override
public Resource<?> put(@NonNull Key key, @Nullable Resource<?> resource) {
if (resource != null) {
listener.onResourceRemoved(resource);
}
return null;
}
@Override
public void setResourceRemovedListener(@NonNull ResourceRemovedListener listener) {
this.listener = listener;
}
@Override
public void clearMemory() {
// Do nothing.
}
@Override
public void trimMemory(int level) {
// Do nothing.
}
}
|
MemoryCacheAdapter
|
java
|
quarkusio__quarkus
|
extensions/smallrye-graphql/deployment/src/test/java/io/quarkus/smallrye/graphql/deployment/HideCheckedExceptionMessageTest.java
|
{
"start": 670,
"end": 2928
}
|
class ____ extends AbstractGraphQLTest {
private static final String IOEXCEPTION_MESSAGE = "Something went wrong";
private static final String INTERRUPTED_EXCEPTION_MESSAGE = "Something else went wrong";
private static final String SQL_EXCEPTION_MESSAGE = "Something went really wrong, but should expect a message";
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(TestApi.class)
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml")
.addAsResource(
new StringAsset(
"quarkus.smallrye-graphql.hide-checked-exception-message=" +
"java.io.IOException," +
"java.lang.InterruptedException"),
"application.properties"));
@Test
void testExcludeNullFieldsInResponse() {
given()
.when()
.accept(MEDIATYPE_JSON)
.contentType(MEDIATYPE_JSON)
.body(getPayload("{ something }"))
.post("/graphql")
.then()
.assertThat()
.statusCode(OK)
.and()
.body(not(containsString(IOEXCEPTION_MESSAGE)));
given()
.when()
.accept(MEDIATYPE_JSON)
.contentType(MEDIATYPE_JSON)
.body(getPayload("{ somethingElse }"))
.post("/graphql")
.then()
.assertThat()
.statusCode(OK)
.and()
.body(not(containsString(INTERRUPTED_EXCEPTION_MESSAGE)));
given()
.when()
.accept(MEDIATYPE_JSON)
.contentType(MEDIATYPE_JSON)
.body(getPayload("{ somethingElseElse }"))
.post("/graphql")
.then()
.assertThat()
.statusCode(OK)
.and()
.body(containsString(SQL_EXCEPTION_MESSAGE));
}
@GraphQLApi
public static
|
HideCheckedExceptionMessageTest
|
java
|
elastic__elasticsearch
|
libs/core/src/test/java/org/elasticsearch/core/internal/provider/EmbeddedImplClassLoaderTests.java
|
{
"start": 4830,
"end": 5486
}
|
class ____ loaded, when the multi-release attribute
* is present and the versioned entry is less than or equal to the runtime version.
*/
public void testLoadWithMultiReleaseEnabled11() throws Exception {
assumeTrue("JDK version not greater than or equal to 11", Runtime.version().feature() >= 11);
Object foobar = newFooBar(true, 11);
// expect 11 version of FooBar to be loaded
assertThat(foobar.toString(), equalTo("FooBar " + 11));
foobar = newFooBar(true, 11, 10, 9, 8);
assertThat(foobar.toString(), equalTo("FooBar " + 11));
}
/*
* Tests that the specific, 17, version of a
|
is
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java
|
{
"start": 1529,
"end": 9315
}
|
class ____ extends ESIntegTestCase {
/**
* This test tries to simulate load while creating an index and indexing documents
* while the index is being created.
*/
public void testAutoGenerateIdNoDuplicates() throws Exception {
int numberOfIterations = scaledRandomIntBetween(10, 50);
for (int i = 0; i < numberOfIterations; i++) {
createIndex("test");
int numOfDocs = randomIntBetween(10, 100);
logger.info("indexing [{}] docs", numOfDocs);
List<IndexRequestBuilder> builders = new ArrayList<>(numOfDocs);
for (int j = 0; j < numOfDocs; j++) {
builders.add(prepareIndex("test").setSource("field", "value_" + j));
}
indexRandom(true, builders);
logger.info("verifying indexed content");
int numOfChecks = randomIntBetween(16, 24);
for (int j = 0; j < numOfChecks; j++) {
assertHitCount(prepareSearch("test"), numOfDocs);
}
internalCluster().wipeIndices("test");
}
}
public void testCreatedFlag() throws Exception {
createIndex("test");
ensureGreen();
DocWriteResponse indexResponse = prepareIndex("test").setId("1").setSource("field1", "value1_1").get();
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
indexResponse = prepareIndex("test").setId("1").setSource("field1", "value1_2").get();
assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult());
client().prepareDelete("test", "1").get();
indexResponse = prepareIndex("test").setId("1").setSource("field1", "value1_2").get();
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
}
public void testCreatedFlagWithFlush() throws Exception {
createIndex("test");
ensureGreen();
DocWriteResponse indexResponse = prepareIndex("test").setId("1").setSource("field1", "value1_1").get();
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
client().prepareDelete("test", "1").get();
flush();
indexResponse = prepareIndex("test").setId("1").setSource("field1", "value1_2").get();
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
}
public void testCreatedFlagParallelExecution() throws Exception {
createIndex("test");
ensureGreen();
int threadCount = 20;
final int docCount = 300;
int taskCount = docCount * threadCount;
final AtomicIntegerArray createdCounts = new AtomicIntegerArray(docCount);
ExecutorService threadPool = Executors.newFixedThreadPool(threadCount);
List<Callable<Void>> tasks = new ArrayList<>(taskCount);
final Random random = random();
for (int i = 0; i < taskCount; i++) {
tasks.add(() -> {
int docId = random.nextInt(docCount);
DocWriteResponse indexResponse = indexDoc("test", Integer.toString(docId), "field1", "value");
if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) {
createdCounts.incrementAndGet(docId);
}
return null;
});
}
threadPool.invokeAll(tasks);
for (int i = 0; i < docCount; i++) {
assertThat(createdCounts.get(i), lessThanOrEqualTo(1));
}
terminate(threadPool);
}
public void testCreatedFlagWithExternalVersioning() throws Exception {
createIndex("test");
ensureGreen();
DocWriteResponse indexResponse = prepareIndex("test").setId("1")
.setSource("field1", "value1_1")
.setVersion(123)
.setVersionType(VersionType.EXTERNAL)
.get();
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
}
public void testCreateFlagWithBulk() {
createIndex("test");
ensureGreen();
BulkResponse bulkResponse = client().prepareBulk().add(prepareIndex("test").setId("1").setSource("field1", "value1_1")).get();
assertThat(bulkResponse.hasFailures(), equalTo(false));
assertThat(bulkResponse.getItems().length, equalTo(1));
IndexResponse indexResponse = bulkResponse.getItems()[0].getResponse();
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
}
public void testCreateIndexWithLongName() {
int min = MetadataCreateIndexService.MAX_INDEX_NAME_BYTES + 1;
int max = MetadataCreateIndexService.MAX_INDEX_NAME_BYTES * 2;
try {
createIndex(randomAlphaOfLengthBetween(min, max).toLowerCase(Locale.ROOT));
fail("exception should have been thrown on too-long index name");
} catch (InvalidIndexNameException e) {
assertThat(
"exception contains message about index name too long: " + e.getMessage(),
e.getMessage().contains("index name is too long,"),
equalTo(true)
);
}
try {
prepareIndex(randomAlphaOfLengthBetween(min, max).toLowerCase(Locale.ROOT)).setSource("foo", "bar").get();
fail("exception should have been thrown on too-long index name");
} catch (InvalidIndexNameException e) {
assertThat(
"exception contains message about index name too long: " + e.getMessage(),
e.getMessage().contains("index name is too long,"),
equalTo(true)
);
}
try {
// Catch chars that are more than a single byte
prepareIndex(
randomAlphaOfLength(MetadataCreateIndexService.MAX_INDEX_NAME_BYTES - 1).toLowerCase(Locale.ROOT) + "Ϟ".toLowerCase(
Locale.ROOT
)
).setSource("foo", "bar").get();
fail("exception should have been thrown on too-long index name");
} catch (InvalidIndexNameException e) {
assertThat(
"exception contains message about index name too long: " + e.getMessage(),
e.getMessage().contains("index name is too long,"),
equalTo(true)
);
}
// we can create an index of max length
createIndex(randomAlphaOfLength(MetadataCreateIndexService.MAX_INDEX_NAME_BYTES).toLowerCase(Locale.ROOT));
}
public void testInvalidIndexName() {
try {
createIndex(".");
fail("exception should have been thrown on dot index name");
} catch (InvalidIndexNameException e) {
assertThat(
"exception contains message about index name is dot " + e.getMessage(),
e.getMessage().contains("Invalid index name [.], must not be \'.\' or '..'"),
equalTo(true)
);
}
try {
createIndex("..");
fail("exception should have been thrown on dot index name");
} catch (InvalidIndexNameException e) {
assertThat(
"exception contains message about index name is dot " + e.getMessage(),
e.getMessage().contains("Invalid index name [..], must not be \'.\' or '..'"),
equalTo(true)
);
}
}
public void testDocumentWithBlankFieldName() {
Exception e = expectThrows(DocumentParsingException.class, prepareIndex("test").setId("1").setSource("", "value1_2"));
assertThat(e.getMessage(), containsString("failed to parse"));
assertThat(e.getCause().getMessage(), containsString("field name cannot be an empty string"));
}
}
|
IndexActionIT
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/text/StrTokenizerTest.java
|
{
"start": 1506,
"end": 29007
}
|
class ____ extends AbstractLangTest {
private static final String CSV_SIMPLE_FIXTURE = "A,b,c";
private static final String TSV_SIMPLE_FIXTURE = "A\tb\tc";
private void checkClone(final StrTokenizer tokenizer) {
assertNotSame(StrTokenizer.getCSVInstance(), tokenizer);
assertNotSame(StrTokenizer.getTSVInstance(), tokenizer);
}
@Test
void test1() {
final String input = "a;b;c;\"d;\"\"e\";f; ; ; ";
final StrTokenizer tok = new StrTokenizer(input);
tok.setDelimiterChar(';');
tok.setQuoteChar('"');
tok.setIgnoredMatcher(StrMatcher.trimMatcher());
tok.setIgnoreEmptyTokens(false);
final String[] tokens = tok.getTokenArray();
final String[] expected = {"a", "b", "c", "d;\"e", "f", "", "", ""};
assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], tokens[i],
"token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
}
}
@Test
void test2() {
final String input = "a;b;c ;\"d;\"\"e\";f; ; ;";
final StrTokenizer tok = new StrTokenizer(input);
tok.setDelimiterChar(';');
tok.setQuoteChar('"');
tok.setIgnoredMatcher(StrMatcher.noneMatcher());
tok.setIgnoreEmptyTokens(false);
final String[] tokens = tok.getTokenArray();
final String[] expected = {"a", "b", "c ", "d;\"e", "f", " ", " ", ""};
assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], tokens[i],
"token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
}
}
@Test
void test3() {
final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
final StrTokenizer tok = new StrTokenizer(input);
tok.setDelimiterChar(';');
tok.setQuoteChar('"');
tok.setIgnoredMatcher(StrMatcher.noneMatcher());
tok.setIgnoreEmptyTokens(false);
final String[] tokens = tok.getTokenArray();
final String[] expected = {"a", "b", " c", "d;\"e", "f", " ", " ", ""};
assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], tokens[i],
"token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
}
}
@Test
void test4() {
final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
final StrTokenizer tok = new StrTokenizer(input);
tok.setDelimiterChar(';');
tok.setQuoteChar('"');
tok.setIgnoredMatcher(StrMatcher.trimMatcher());
tok.setIgnoreEmptyTokens(true);
final String[] tokens = tok.getTokenArray();
final String[] expected = {"a", "b", "c", "d;\"e", "f"};
assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], tokens[i],
"token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
}
}
@Test
void test5() {
final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
final StrTokenizer tok = new StrTokenizer(input);
tok.setDelimiterChar(';');
tok.setQuoteChar('"');
tok.setIgnoredMatcher(StrMatcher.trimMatcher());
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
final String[] tokens = tok.getTokenArray();
final String[] expected = {"a", "b", "c", "d;\"e", "f", null, null, null};
assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], tokens[i],
"token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
}
}
@Test
void test6() {
final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
final StrTokenizer tok = new StrTokenizer(input);
tok.setDelimiterChar(';');
tok.setQuoteChar('"');
tok.setIgnoredMatcher(StrMatcher.trimMatcher());
tok.setIgnoreEmptyTokens(false);
// tok.setTreatingEmptyAsNull(true);
final String[] tokens = tok.getTokenArray();
final String[] expected = {"a", "b", " c", "d;\"e", "f", null, null, null};
int nextCount = 0;
while (tok.hasNext()) {
tok.next();
nextCount++;
}
int prevCount = 0;
while (tok.hasPrevious()) {
tok.previous();
prevCount++;
}
assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
assertEquals(nextCount, expected.length, "could not cycle through entire token list using the 'hasNext' and 'next' methods");
assertEquals(prevCount, expected.length, "could not cycle through entire token list using the 'hasPrevious' and 'previous' methods");
}
@Test
void test7() {
final String input = "a b c \"d e\" f ";
final StrTokenizer tok = new StrTokenizer(input);
tok.setDelimiterMatcher(StrMatcher.spaceMatcher());
tok.setQuoteMatcher(StrMatcher.doubleQuoteMatcher());
tok.setIgnoredMatcher(StrMatcher.noneMatcher());
tok.setIgnoreEmptyTokens(false);
final String[] tokens = tok.getTokenArray();
final String[] expected = {"a", "", "", "b", "c", "d e", "f", ""};
assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], tokens[i],
"token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
}
}
@Test
void test8() {
final String input = "a b c \"d e\" f ";
final StrTokenizer tok = new StrTokenizer(input);
tok.setDelimiterMatcher(StrMatcher.spaceMatcher());
tok.setQuoteMatcher(StrMatcher.doubleQuoteMatcher());
tok.setIgnoredMatcher(StrMatcher.noneMatcher());
tok.setIgnoreEmptyTokens(true);
final String[] tokens = tok.getTokenArray();
final String[] expected = {"a", "b", "c", "d e", "f"};
assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], tokens[i],
"token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
}
}
@Test
void testBasic1() {
final String input = "a b c";
final StrTokenizer tok = new StrTokenizer(input);
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertEquals("c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasic2() {
final String input = "a \nb\fc";
final StrTokenizer tok = new StrTokenizer(input);
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertEquals("c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasic3() {
final String input = "a \nb\u0001\fc";
final StrTokenizer tok = new StrTokenizer(input);
assertEquals("a", tok.next());
assertEquals("b\u0001", tok.next());
assertEquals("c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasic4() {
final String input = "a \"b\" c";
final StrTokenizer tok = new StrTokenizer(input);
assertEquals("a", tok.next());
assertEquals("\"b\"", tok.next());
assertEquals("c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasic5() {
final String input = "a:b':c";
final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
assertEquals("a", tok.next());
assertEquals("b'", tok.next());
assertEquals("c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicDelim1() {
final String input = "a:b:c";
final StrTokenizer tok = new StrTokenizer(input, ':');
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertEquals("c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicDelim2() {
final String input = "a:b:c";
final StrTokenizer tok = new StrTokenizer(input, ',');
assertEquals("a:b:c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicEmpty1() {
final String input = "a b c";
final StrTokenizer tok = new StrTokenizer(input);
tok.setIgnoreEmptyTokens(false);
assertEquals("a", tok.next());
assertEquals("", tok.next());
assertEquals("b", tok.next());
assertEquals("c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicEmpty2() {
final String input = "a b c";
final StrTokenizer tok = new StrTokenizer(input);
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertNull(tok.next());
assertEquals("b", tok.next());
assertEquals("c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicIgnoreTrimmed1() {
final String input = "a: bIGNOREc : ";
final StrTokenizer tok = new StrTokenizer(input, ':');
tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
tok.setTrimmerMatcher(StrMatcher.trimMatcher());
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertEquals("bc", tok.next());
assertNull(tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicIgnoreTrimmed2() {
final String input = "IGNOREaIGNORE: IGNORE bIGNOREc IGNORE : IGNORE ";
final StrTokenizer tok = new StrTokenizer(input, ':');
tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
tok.setTrimmerMatcher(StrMatcher.trimMatcher());
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertEquals("bc", tok.next());
assertNull(tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicIgnoreTrimmed3() {
final String input = "IGNOREaIGNORE: IGNORE bIGNOREc IGNORE : IGNORE ";
final StrTokenizer tok = new StrTokenizer(input, ':');
tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertEquals(" bc ", tok.next());
assertEquals(" ", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicIgnoreTrimmed4() {
final String input = "IGNOREaIGNORE: IGNORE 'bIGNOREc'IGNORE'd' IGNORE : IGNORE ";
final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
tok.setTrimmerMatcher(StrMatcher.trimMatcher());
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertEquals("bIGNOREcd", tok.next());
assertNull(tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicQuoted1() {
final String input = "a 'b' c";
final StrTokenizer tok = new StrTokenizer(input, ' ', '\'');
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertEquals("c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicQuoted2() {
final String input = "a:'b':";
final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertNull(tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicQuoted3() {
final String input = "a:'b''c'";
final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertEquals("b'c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicQuoted4() {
final String input = "a: 'b' 'c' :d";
final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
tok.setTrimmerMatcher(StrMatcher.trimMatcher());
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertEquals("b c", tok.next());
assertEquals("d", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicQuoted5() {
final String input = "a: 'b'x'c' :d";
final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
tok.setTrimmerMatcher(StrMatcher.trimMatcher());
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertEquals("bxc", tok.next());
assertEquals("d", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicQuoted6() {
final String input = "a:'b'\"c':d";
final StrTokenizer tok = new StrTokenizer(input, ':');
tok.setQuoteMatcher(StrMatcher.quoteMatcher());
assertEquals("a", tok.next());
assertEquals("b\"c:d", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicQuoted7() {
final String input = "a:\"There's a reason here\":b";
final StrTokenizer tok = new StrTokenizer(input, ':');
tok.setQuoteMatcher(StrMatcher.quoteMatcher());
assertEquals("a", tok.next());
assertEquals("There's a reason here", tok.next());
assertEquals("b", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicQuotedTrimmed1() {
final String input = "a: 'b' :";
final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
tok.setTrimmerMatcher(StrMatcher.trimMatcher());
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertNull(tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicTrimmed1() {
final String input = "a: b : ";
final StrTokenizer tok = new StrTokenizer(input, ':');
tok.setTrimmerMatcher(StrMatcher.trimMatcher());
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertNull(tok.next());
assertFalse(tok.hasNext());
}
@Test
void testBasicTrimmed2() {
final String input = "a: b :";
final StrTokenizer tok = new StrTokenizer(input, ':');
tok.setTrimmerMatcher(StrMatcher.stringMatcher(" "));
tok.setIgnoreEmptyTokens(false);
tok.setEmptyTokenAsNull(true);
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertNull(tok.next());
assertFalse(tok.hasNext());
}
@Test
void testChaining() {
final StrTokenizer tok = new StrTokenizer();
assertEquals(tok, tok.reset());
assertEquals(tok, tok.reset(""));
assertEquals(tok, tok.reset(new char[0]));
assertEquals(tok, tok.setDelimiterChar(' '));
assertEquals(tok, tok.setDelimiterString(" "));
assertEquals(tok, tok.setDelimiterMatcher(null));
assertEquals(tok, tok.setQuoteChar(' '));
assertEquals(tok, tok.setQuoteMatcher(null));
assertEquals(tok, tok.setIgnoredChar(' '));
assertEquals(tok, tok.setIgnoredMatcher(null));
assertEquals(tok, tok.setTrimmerMatcher(null));
assertEquals(tok, tok.setEmptyTokenAsNull(false));
assertEquals(tok, tok.setIgnoreEmptyTokens(false));
}
/**
* Tests that the {@link StrTokenizer#clone()} clone method catches {@link CloneNotSupportedException} and returns
* {@code null}.
*/
@Test
void testCloneNotSupportedException() {
final Object notCloned = new StrTokenizer() {
@Override
Object cloneReset() throws CloneNotSupportedException {
throw new CloneNotSupportedException("test");
}
}.clone();
assertNull(notCloned);
}
@Test
void testCloneNull() {
final StrTokenizer tokenizer = new StrTokenizer((char[]) null);
// Start sanity check
assertNull(tokenizer.nextToken());
tokenizer.reset();
assertNull(tokenizer.nextToken());
// End sanity check
final StrTokenizer clonedTokenizer = (StrTokenizer) tokenizer.clone();
tokenizer.reset();
assertNull(tokenizer.nextToken());
assertNull(clonedTokenizer.nextToken());
}
@Test
void testCloneReset() {
final char[] input = {'a'};
final StrTokenizer tokenizer = new StrTokenizer(input);
// Start sanity check
assertEquals("a", tokenizer.nextToken());
tokenizer.reset(input);
assertEquals("a", tokenizer.nextToken());
// End sanity check
final StrTokenizer clonedTokenizer = (StrTokenizer) tokenizer.clone();
input[0] = 'b';
tokenizer.reset(input);
assertEquals("b", tokenizer.nextToken());
assertEquals("a", clonedTokenizer.nextToken());
}
@Test
void testConstructor_charArray() {
StrTokenizer tok = new StrTokenizer("a b".toCharArray());
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertFalse(tok.hasNext());
tok = new StrTokenizer(new char[0]);
assertFalse(tok.hasNext());
tok = new StrTokenizer((char[]) null);
assertFalse(tok.hasNext());
}
@Test
void testConstructor_charArray_char() {
StrTokenizer tok = new StrTokenizer("a b".toCharArray(), ' ');
assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertFalse(tok.hasNext());
tok = new StrTokenizer(new char[0], ' ');
assertFalse(tok.hasNext());
tok = new StrTokenizer((char[]) null, ' ');
assertFalse(tok.hasNext());
}
@Test
void testConstructor_charArray_char_char() {
StrTokenizer tok = new StrTokenizer("a b".toCharArray(), ' ', '"');
assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
assertEquals(1, tok.getQuoteMatcher().isMatch("\"".toCharArray(), 0, 0, 1));
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertFalse(tok.hasNext());
tok = new StrTokenizer(new char[0], ' ', '"');
assertFalse(tok.hasNext());
tok = new StrTokenizer((char[]) null, ' ', '"');
assertFalse(tok.hasNext());
}
@Test
void testConstructor_String() {
StrTokenizer tok = new StrTokenizer("a b");
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertFalse(tok.hasNext());
tok = new StrTokenizer("");
assertFalse(tok.hasNext());
tok = new StrTokenizer((String) null);
assertFalse(tok.hasNext());
}
@Test
void testConstructor_String_char() {
StrTokenizer tok = new StrTokenizer("a b", ' ');
assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertFalse(tok.hasNext());
tok = new StrTokenizer("", ' ');
assertFalse(tok.hasNext());
tok = new StrTokenizer((String) null, ' ');
assertFalse(tok.hasNext());
}
@Test
void testConstructor_String_char_char() {
StrTokenizer tok = new StrTokenizer("a b", ' ', '"');
assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
assertEquals(1, tok.getQuoteMatcher().isMatch("\"".toCharArray(), 0, 0, 1));
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertFalse(tok.hasNext());
tok = new StrTokenizer("", ' ', '"');
assertFalse(tok.hasNext());
tok = new StrTokenizer((String) null, ' ', '"');
assertFalse(tok.hasNext());
}
private void testCSV(final String data) {
testXSVAbc(StrTokenizer.getCSVInstance(data));
testXSVAbc(StrTokenizer.getCSVInstance(data.toCharArray()));
}
@Test
void testCSVEmpty() {
testEmpty(StrTokenizer.getCSVInstance());
testEmpty(StrTokenizer.getCSVInstance(""));
}
@Test
void testCSVSimple() {
testCSV(CSV_SIMPLE_FIXTURE);
}
@Test
void testCSVSimpleNeedsTrim() {
testCSV(" " + CSV_SIMPLE_FIXTURE);
testCSV(" \n\t " + CSV_SIMPLE_FIXTURE);
testCSV(" \n " + CSV_SIMPLE_FIXTURE + "\n\n\r");
}
void testEmpty(final StrTokenizer tokenizer) {
checkClone(tokenizer);
assertFalse(tokenizer.hasNext());
assertFalse(tokenizer.hasPrevious());
assertNull(tokenizer.nextToken());
assertEquals(0, tokenizer.size());
assertThrows(NoSuchElementException.class, tokenizer::next);
}
@Test
void testGetContent() {
final String input = "a b c \"d e\" f ";
StrTokenizer tok = new StrTokenizer(input);
assertEquals(input, tok.getContent());
tok = new StrTokenizer(input.toCharArray());
assertEquals(input, tok.getContent());
tok = new StrTokenizer();
assertNull(tok.getContent());
}
@Test
void testIteration() {
final StrTokenizer tkn = new StrTokenizer("a b c");
assertFalse(tkn.hasPrevious());
assertThrows(NoSuchElementException.class, tkn::previous);
assertTrue(tkn.hasNext());
assertEquals("a", tkn.next());
assertThrows(UnsupportedOperationException.class, tkn::remove);
assertThrows(UnsupportedOperationException.class, () -> tkn.set("x"));
assertThrows(UnsupportedOperationException.class, () -> tkn.add("y"));
assertTrue(tkn.hasPrevious());
assertTrue(tkn.hasNext());
assertEquals("b", tkn.next());
assertTrue(tkn.hasPrevious());
assertTrue(tkn.hasNext());
assertEquals("c", tkn.next());
assertTrue(tkn.hasPrevious());
assertFalse(tkn.hasNext());
assertThrows(NoSuchElementException.class, tkn::next);
assertTrue(tkn.hasPrevious());
assertFalse(tkn.hasNext());
}
@Test
void testListArray() {
final String input = "a b c";
final StrTokenizer tok = new StrTokenizer(input);
final String[] array = tok.getTokenArray();
final List<?> list = tok.getTokenList();
assertEquals(Arrays.asList(array), list);
assertEquals(3, list.size());
}
@Test
void testReset() {
final StrTokenizer tok = new StrTokenizer("a b c");
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertEquals("c", tok.next());
assertFalse(tok.hasNext());
tok.reset();
assertEquals("a", tok.next());
assertEquals("b", tok.next());
assertEquals("c", tok.next());
assertFalse(tok.hasNext());
}
@Test
void testReset_charArray() {
final StrTokenizer tok = new StrTokenizer("x x x");
final char[] array = {'a', 'b', 'c'};
tok.reset(array);
assertEquals("abc", tok.next());
assertFalse(tok.hasNext());
tok.reset((char[]) null);
assertFalse(tok.hasNext());
}
@Test
void testReset_String() {
final StrTokenizer tok = new StrTokenizer("x x x");
tok.reset("d e");
assertEquals("d", tok.next());
assertEquals("e", tok.next());
assertFalse(tok.hasNext());
tok.reset((String) null);
assertFalse(tok.hasNext());
}
@Test
void testTokenizeSubclassInputChange() {
final StrTokenizer tkn = new StrTokenizer("a b c d e") {
@Override
protected List<String> tokenize(final char[] chars, final int offset, final int count) {
return super.tokenize("w x y z".toCharArray(), 2, 5);
}
};
assertEquals("x", tkn.next());
assertEquals("y", tkn.next());
}
@Test
void testTokenizeSubclassOutputChange() {
final StrTokenizer tkn = new StrTokenizer("a b c") {
@Override
protected List<String> tokenize(final char[] chars, final int offset, final int count) {
final List<String> list = super.tokenize(chars, offset, count);
Collections.reverse(list);
return list;
}
};
assertEquals("c", tkn.next());
assertEquals("b", tkn.next());
assertEquals("a", tkn.next());
}
@Test
void testToString() {
final StrTokenizer tkn = new StrTokenizer("a b c d e");
assertEquals("StrTokenizer[not tokenized yet]", tkn.toString());
tkn.next();
assertEquals("StrTokenizer[a, b, c, d, e]", tkn.toString());
}
@Test
void testTSV() {
testXSVAbc(StrTokenizer.getTSVInstance(TSV_SIMPLE_FIXTURE));
testXSVAbc(StrTokenizer.getTSVInstance(TSV_SIMPLE_FIXTURE.toCharArray()));
}
@Test
void testTSVEmpty() {
testEmpty(StrTokenizer.getTSVInstance());
testEmpty(StrTokenizer.getTSVInstance(""));
}
void testXSVAbc(final StrTokenizer tokenizer) {
checkClone(tokenizer);
assertEquals(-1, tokenizer.previousIndex());
assertEquals(0, tokenizer.nextIndex());
assertNull(tokenizer.previousToken());
assertEquals("A", tokenizer.nextToken());
assertEquals(1, tokenizer.nextIndex());
assertEquals("b", tokenizer.nextToken());
assertEquals(2, tokenizer.nextIndex());
assertEquals("c", tokenizer.nextToken());
assertEquals(3, tokenizer.nextIndex());
assertNull(tokenizer.nextToken());
assertEquals(3, tokenizer.nextIndex());
assertEquals("c", tokenizer.previousToken());
assertEquals(2, tokenizer.nextIndex());
assertEquals("b", tokenizer.previousToken());
assertEquals(1, tokenizer.nextIndex());
assertEquals("A", tokenizer.previousToken());
assertEquals(0, tokenizer.nextIndex());
assertNull(tokenizer.previousToken());
assertEquals(0, tokenizer.nextIndex());
assertEquals(-1, tokenizer.previousIndex());
assertEquals(3, tokenizer.size());
}
}
|
StrTokenizerTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_filteredOn_notIn_Test.java
|
{
"start": 1115,
"end": 4401
}
|
class ____ extends AtomicReferenceArrayAssert_filtered_baseTest {
@Test
void should_apply_notIn_filter() {
assertThat(employees).filteredOn("age", notIn(800, 10)).containsOnly(luke);
assertThat(employees).filteredOn("age", notIn(800)).containsOnly(luke, noname);
}
@Test
void should_filter_object_array_under_test_on_property_not_backed_by_a_field_values() {
assertThat(employees).filteredOn("adult", notIn(false)).containsOnly(yoda, obiwan, luke);
assertThat(employees).filteredOn("adult", notIn(true)).containsOnly(noname);
assertThat(employees).filteredOn("adult", notIn(true, false)).isEmpty();
}
@Test
void should_filter_object_array_under_test_on_public_field_values() {
assertThat(employees).filteredOn("id", notIn(2L, 3L, 4L)).containsOnly(yoda);
}
@Test
void should_filter_object_array_under_test_on_private_field_values() {
assertThat(employees).filteredOn("city", notIn("Paris")).containsOnly(yoda, obiwan, luke, noname);
assertThat(employees).filteredOn("city", notIn("New York")).isEmpty();
assertThat(employees).filteredOn("city", notIn("New York", "Paris")).isEmpty();
}
@Test
void should_fail_if_filter_is_on_private_field_and_reading_private_field_is_disabled() {
setAllowExtractingPrivateFields(false);
try {
assertThatExceptionOfType(IntrospectionError.class).isThrownBy(() -> {
assertThat(employees).filteredOn("city", notIn("New York")).isEmpty();
});
} finally {
setAllowExtractingPrivateFields(true);
}
}
@Test
void should_filter_object_array_under_test_on_nested_property_values() {
assertThat(employees).filteredOn("name.first", notIn("Luke")).containsOnly(yoda, obiwan, noname);
}
@Test
void should_filter_object_array_under_test_on_nested_mixed_property_and_field_values() {
assertThat(employees).filteredOn("name.last", notIn("Skywalker")).containsOnly(yoda, obiwan, noname);
assertThat(employees).filteredOn("name.last", notIn("Skywalker", null)).isEmpty();
assertThat(employees).filteredOn("name.last", notIn("Vader")).containsOnly(yoda, obiwan, noname, luke);
}
@Test
void should_fail_if_given_property_or_field_name_is_null() {
assertThatIllegalArgumentException().isThrownBy(() -> assertThat(employees).filteredOn((String) null, notIn(800)))
.withMessage("The property/field name to filter on should not be null or empty");
}
@Test
void should_fail_if_given_property_or_field_name_is_empty() {
assertThatIllegalArgumentException().isThrownBy(() -> assertThat(employees).filteredOn("", notIn(800)))
.withMessage("The property/field name to filter on should not be null or empty");
}
@Test
void should_fail_if_on_of_the_object_array_element_does_not_have_given_property_or_field() {
assertThatExceptionOfType(IntrospectionError.class).isThrownBy(() -> assertThat(employees).filteredOn("secret",
notIn("???")))
.withMessageContaining("Can't find any field or property with name 'secret'");
}
}
|
AtomicReferenceArrayAssert_filteredOn_notIn_Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java
|
{
"start": 468,
"end": 1879
}
|
class ____ extends AbstractVectorBuilder implements DoubleVector.Builder {
private double[] values;
DoubleVectorBuilder(int estimatedSize, BlockFactory blockFactory) {
super(blockFactory);
int initialSize = Math.max(estimatedSize, 2);
adjustBreaker(initialSize);
values = new double[Math.max(estimatedSize, 2)];
}
@Override
public DoubleVectorBuilder appendDouble(double value) {
ensureCapacity();
values[valueCount] = value;
valueCount++;
return this;
}
@Override
protected int elementSize() {
return Double.BYTES;
}
@Override
protected int valuesLength() {
return values.length;
}
@Override
protected void growValuesArray(int newSize) {
values = Arrays.copyOf(values, newSize);
}
@Override
public DoubleVector build() {
finish();
DoubleVector vector;
if (valueCount == 1) {
vector = blockFactory.newConstantDoubleBlockWith(values[0], 1, estimatedBytes).asVector();
} else {
if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) {
values = Arrays.copyOf(values, valueCount);
}
vector = blockFactory.newDoubleArrayVector(values, valueCount, estimatedBytes);
}
built();
return vector;
}
}
|
DoubleVectorBuilder
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/IncompatibleModifiersCheckerTest.java
|
{
"start": 5062,
"end": 5136
}
|
interface ____ {
Foo[] value();
}
@
|
Foos
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/annotation/AutowiredAnnotationBeanPostProcessor.java
|
{
"start": 7808,
"end": 22679
}
|
class ____ implements SmartInstantiationAwareBeanPostProcessor,
MergedBeanDefinitionPostProcessor, BeanRegistrationAotProcessor, PriorityOrdered, BeanFactoryAware {
private static final Constructor<?>[] EMPTY_CONSTRUCTOR_ARRAY = new Constructor<?>[0];
protected final Log logger = LogFactory.getLog(getClass());
private final Set<Class<? extends Annotation>> autowiredAnnotationTypes = CollectionUtils.newLinkedHashSet(4);
private String requiredParameterName = "required";
private boolean requiredParameterValue = true;
private int order = Ordered.LOWEST_PRECEDENCE - 2;
private @Nullable ConfigurableListableBeanFactory beanFactory;
private @Nullable MetadataReaderFactory metadataReaderFactory;
private final Set<String> lookupMethodsChecked = ConcurrentHashMap.newKeySet(256);
private final Map<Class<?>, Constructor<?>[]> candidateConstructorsCache = new ConcurrentHashMap<>(256);
private final Map<String, InjectionMetadata> injectionMetadataCache = new ConcurrentHashMap<>(256);
/**
* Create a new {@code AutowiredAnnotationBeanPostProcessor} for Spring's
* standard {@link Autowired @Autowired} and {@link Value @Value} annotations.
* <p>Also supports the common {@link jakarta.inject.Inject @Inject} annotation
* if available.
*/
@SuppressWarnings("unchecked")
public AutowiredAnnotationBeanPostProcessor() {
this.autowiredAnnotationTypes.add(Autowired.class);
this.autowiredAnnotationTypes.add(Value.class);
ClassLoader classLoader = AutowiredAnnotationBeanPostProcessor.class.getClassLoader();
try {
this.autowiredAnnotationTypes.add((Class<? extends Annotation>)
ClassUtils.forName("jakarta.inject.Inject", classLoader));
logger.trace("'jakarta.inject.Inject' annotation found and supported for autowiring");
}
catch (ClassNotFoundException ex) {
// jakarta.inject API not available - simply skip.
}
}
/**
* Set the 'autowired' annotation type, to be used on constructors, fields,
* setter methods, and arbitrary config methods.
* <p>The default autowired annotation types are the Spring-provided
* {@link Autowired @Autowired} and {@link Value @Value} annotations as well
* as the common {@code @Inject} annotation, if available.
* <p>This setter property exists so that developers can provide their own
* (non-Spring-specific) annotation type to indicate that a member is supposed
* to be autowired.
*/
public void setAutowiredAnnotationType(Class<? extends Annotation> autowiredAnnotationType) {
Assert.notNull(autowiredAnnotationType, "'autowiredAnnotationType' must not be null");
this.autowiredAnnotationTypes.clear();
this.autowiredAnnotationTypes.add(autowiredAnnotationType);
}
/**
* Set the 'autowired' annotation types, to be used on constructors, fields,
* setter methods, and arbitrary config methods.
* <p>The default autowired annotation types are the Spring-provided
* {@link Autowired @Autowired} and {@link Value @Value} annotations as well
* as the common {@code @Inject} annotation, if available.
* <p>This setter property exists so that developers can provide their own
* (non-Spring-specific) annotation types to indicate that a member is supposed
* to be autowired.
*/
public void setAutowiredAnnotationTypes(Set<Class<? extends Annotation>> autowiredAnnotationTypes) {
Assert.notEmpty(autowiredAnnotationTypes, "'autowiredAnnotationTypes' must not be empty");
this.autowiredAnnotationTypes.clear();
this.autowiredAnnotationTypes.addAll(autowiredAnnotationTypes);
}
/**
* Set the name of an attribute of the annotation that specifies whether it is required.
* @see #setRequiredParameterValue(boolean)
*/
public void setRequiredParameterName(String requiredParameterName) {
this.requiredParameterName = requiredParameterName;
}
/**
* Set the boolean value that marks a dependency as required.
* <p>For example if using 'required=true' (the default), this value should be
* {@code true}; but if using 'optional=false', this value should be {@code false}.
* @see #setRequiredParameterName(String)
*/
public void setRequiredParameterValue(boolean requiredParameterValue) {
this.requiredParameterValue = requiredParameterValue;
}
public void setOrder(int order) {
this.order = order;
}
@Override
public int getOrder() {
return this.order;
}
@Override
public void setBeanFactory(BeanFactory beanFactory) {
if (!(beanFactory instanceof ConfigurableListableBeanFactory clbf)) {
throw new IllegalArgumentException(
"AutowiredAnnotationBeanPostProcessor requires a ConfigurableListableBeanFactory: " + beanFactory);
}
this.beanFactory = clbf;
this.metadataReaderFactory = MetadataReaderFactory.create(clbf.getBeanClassLoader());
}
@Override
public void postProcessMergedBeanDefinition(RootBeanDefinition beanDefinition, Class<?> beanType, String beanName) {
// Register externally managed config members on bean definition.
findInjectionMetadata(beanName, beanType, beanDefinition);
// Use opportunity to clear caches which are not needed after singleton instantiation.
// The injectionMetadataCache itself is left intact since it cannot be reliably
// reconstructed in terms of externally managed config members otherwise.
if (beanDefinition.isSingleton()) {
this.candidateConstructorsCache.remove(beanType);
// With actual lookup overrides, keep it intact along with bean definition.
if (!beanDefinition.hasMethodOverrides()) {
this.lookupMethodsChecked.remove(beanName);
}
}
}
@Override
public void resetBeanDefinition(String beanName) {
this.lookupMethodsChecked.remove(beanName);
this.injectionMetadataCache.remove(beanName);
}
@Override
public @Nullable BeanRegistrationAotContribution processAheadOfTime(RegisteredBean registeredBean) {
Class<?> beanClass = registeredBean.getBeanClass();
String beanName = registeredBean.getBeanName();
RootBeanDefinition beanDefinition = registeredBean.getMergedBeanDefinition();
InjectionMetadata metadata = findInjectionMetadata(beanName, beanClass, beanDefinition);
Collection<AutowiredElement> autowiredElements = getAutowiredElements(metadata,
beanDefinition.getPropertyValues());
if (!ObjectUtils.isEmpty(autowiredElements)) {
return new AotContribution(beanClass, autowiredElements, getAutowireCandidateResolver());
}
return null;
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private Collection<AutowiredElement> getAutowiredElements(InjectionMetadata metadata, PropertyValues propertyValues) {
return (Collection) metadata.getInjectedElements(propertyValues);
}
private @Nullable AutowireCandidateResolver getAutowireCandidateResolver() {
if (this.beanFactory instanceof DefaultListableBeanFactory lbf) {
return lbf.getAutowireCandidateResolver();
}
return null;
}
private InjectionMetadata findInjectionMetadata(String beanName, Class<?> beanType, RootBeanDefinition beanDefinition) {
InjectionMetadata metadata = findAutowiringMetadata(beanName, beanType, null);
metadata.checkConfigMembers(beanDefinition);
return metadata;
}
@Override
public Class<?> determineBeanType(Class<?> beanClass, String beanName) throws BeanCreationException {
checkLookupMethods(beanClass, beanName);
// Pick up subclass with fresh lookup method override from above
if (this.beanFactory instanceof AbstractAutowireCapableBeanFactory aacBeanFactory) {
RootBeanDefinition mbd = (RootBeanDefinition) this.beanFactory.getMergedBeanDefinition(beanName);
if (mbd.getFactoryMethodName() == null && mbd.hasBeanClass()) {
return aacBeanFactory.getInstantiationStrategy().getActualBeanClass(mbd, beanName, aacBeanFactory);
}
}
return beanClass;
}
@Override
public Constructor<?> @Nullable [] determineCandidateConstructors(Class<?> beanClass, final String beanName)
throws BeanCreationException {
checkLookupMethods(beanClass, beanName);
// Quick check on the concurrent map first, with minimal locking.
Constructor<?>[] candidateConstructors = this.candidateConstructorsCache.get(beanClass);
if (candidateConstructors == null) {
// Fully synchronized resolution now...
synchronized (this.candidateConstructorsCache) {
candidateConstructors = this.candidateConstructorsCache.get(beanClass);
if (candidateConstructors == null) {
Constructor<?>[] rawCandidates;
try {
rawCandidates = beanClass.getDeclaredConstructors();
}
catch (Throwable ex) {
throw new BeanCreationException(beanName,
"Resolution of declared constructors on bean Class [" + beanClass.getName() +
"] from ClassLoader [" + beanClass.getClassLoader() + "] failed", ex);
}
List<Constructor<?>> candidates = new ArrayList<>(rawCandidates.length);
Constructor<?> requiredConstructor = null;
Constructor<?> defaultConstructor = null;
Constructor<?> primaryConstructor = BeanUtils.findPrimaryConstructor(beanClass);
int nonSyntheticConstructors = 0;
for (Constructor<?> candidate : rawCandidates) {
if (!candidate.isSynthetic()) {
nonSyntheticConstructors++;
}
else if (primaryConstructor != null) {
continue;
}
MergedAnnotation<?> ann = findAutowiredAnnotation(candidate);
if (ann == null) {
Class<?> userClass = ClassUtils.getUserClass(beanClass);
if (userClass != beanClass) {
try {
Constructor<?> superCtor =
userClass.getDeclaredConstructor(candidate.getParameterTypes());
ann = findAutowiredAnnotation(superCtor);
}
catch (NoSuchMethodException ex) {
// Simply proceed, no equivalent superclass constructor found...
}
}
}
if (ann != null) {
if (requiredConstructor != null) {
throw new BeanCreationException(beanName,
"Invalid autowire-marked constructor: " + candidate +
". Found constructor with 'required' Autowired annotation already: " +
requiredConstructor);
}
boolean required = determineRequiredStatus(ann);
if (required) {
if (!candidates.isEmpty()) {
throw new BeanCreationException(beanName,
"Invalid autowire-marked constructors: " + candidates +
". Found constructor with 'required' Autowired annotation: " +
candidate);
}
requiredConstructor = candidate;
}
candidates.add(candidate);
}
else if (candidate.getParameterCount() == 0) {
defaultConstructor = candidate;
}
}
if (!candidates.isEmpty()) {
// Add default constructor to list of optional constructors, as fallback.
if (requiredConstructor == null) {
if (defaultConstructor != null) {
candidates.add(defaultConstructor);
}
else if (candidates.size() == 1 && logger.isInfoEnabled()) {
logger.info("Inconsistent constructor declaration on bean with name '" + beanName +
"': single autowire-marked constructor flagged as optional - " +
"this constructor is effectively required since there is no " +
"default constructor to fall back to: " + candidates.get(0));
}
}
candidateConstructors = candidates.toArray(EMPTY_CONSTRUCTOR_ARRAY);
}
else if (rawCandidates.length == 1 && rawCandidates[0].getParameterCount() > 0) {
candidateConstructors = new Constructor<?>[] {rawCandidates[0]};
}
else if (nonSyntheticConstructors == 2 && primaryConstructor != null &&
defaultConstructor != null && !primaryConstructor.equals(defaultConstructor)) {
candidateConstructors = new Constructor<?>[] {primaryConstructor, defaultConstructor};
}
else if (nonSyntheticConstructors == 1 && primaryConstructor != null) {
candidateConstructors = new Constructor<?>[] {primaryConstructor};
}
else {
candidateConstructors = EMPTY_CONSTRUCTOR_ARRAY;
}
this.candidateConstructorsCache.put(beanClass, candidateConstructors);
}
}
}
return (candidateConstructors.length > 0 ? candidateConstructors : null);
}
private void checkLookupMethods(Class<?> beanClass, final String beanName) throws BeanCreationException {
if (!this.lookupMethodsChecked.contains(beanName)) {
if (AnnotationUtils.isCandidateClass(beanClass, Lookup.class)) {
try {
Class<?> targetClass = beanClass;
do {
ReflectionUtils.doWithLocalMethods(targetClass, method -> {
Lookup lookup = method.getAnnotation(Lookup.class);
if (lookup != null) {
Assert.state(this.beanFactory != null, "No BeanFactory available");
LookupOverride override = new LookupOverride(method, lookup.value());
try {
RootBeanDefinition mbd = (RootBeanDefinition)
this.beanFactory.getMergedBeanDefinition(beanName);
mbd.getMethodOverrides().addOverride(override);
}
catch (NoSuchBeanDefinitionException ex) {
throw new BeanCreationException(beanName,
"Cannot apply @Lookup to beans without corresponding bean definition");
}
}
});
targetClass = targetClass.getSuperclass();
}
while (targetClass != null && targetClass != Object.class);
}
catch (IllegalStateException ex) {
throw new BeanCreationException(beanName, "Lookup method resolution failed", ex);
}
}
this.lookupMethodsChecked.add(beanName);
}
}
@Override
public PropertyValues postProcessProperties(PropertyValues pvs, Object bean, String beanName) {
InjectionMetadata metadata = findAutowiringMetadata(beanName, bean.getClass(), pvs);
try {
metadata.inject(bean, beanName, pvs);
}
catch (BeanCreationException ex) {
throw ex;
}
catch (Throwable ex) {
throw new BeanCreationException(beanName, "Injection of autowired dependencies failed", ex);
}
return pvs;
}
/**
* <em>Native</em> processing method for direct calls with an arbitrary target
* instance, resolving all of its fields and methods which are annotated with
* one of the configured 'autowired' annotation types.
* @param bean the target instance to process
* @throws BeanCreationException if autowiring failed
* @see #setAutowiredAnnotationTypes(Set)
*/
public void processInjection(Object bean) throws BeanCreationException {
Class<?> clazz = bean.getClass();
InjectionMetadata metadata = findAutowiringMetadata(clazz.getName(), clazz, null);
try {
metadata.inject(bean, null, null);
}
catch (BeanCreationException ex) {
throw ex;
}
catch (Throwable ex) {
throw new BeanCreationException(
"Injection of autowired dependencies failed for class [" + clazz + "]", ex);
}
}
private InjectionMetadata findAutowiringMetadata(String beanName, Class<?> clazz, @Nullable PropertyValues pvs) {
// Fall back to
|
AutowiredAnnotationBeanPostProcessor
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/records/GetSubClusterPolicyConfigurationRequest.java
|
{
"start": 1318,
"end": 2096
}
|
class ____ {
@Private
@Unstable
public static GetSubClusterPolicyConfigurationRequest newInstance(
String queueName) {
GetSubClusterPolicyConfigurationRequest request =
Records.newRecord(GetSubClusterPolicyConfigurationRequest.class);
request.setQueue(queueName);
return request;
}
/**
* Get the name of the queue for which we are requesting a policy
* configuration.
*
* @return the name of the queue
*/
@Public
@Unstable
public abstract String getQueue();
/**
* Sets the name of the queue for which we are requesting a policy
* configuration.
*
* @param queueName the name of the queue
*/
@Private
@Unstable
public abstract void setQueue(String queueName);
}
|
GetSubClusterPolicyConfigurationRequest
|
java
|
apache__camel
|
components/camel-saxon/src/test/java/org/apache/camel/builder/saxon/BeanWithXQueryInjectionTest.java
|
{
"start": 1193,
"end": 2081
}
|
class ____ extends CamelTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(BeanWithXQueryInjectionTest.class);
protected MyBean myBean = new MyBean();
@Test
public void testSendMessage() {
String expectedBody = "<foo id='bar'>hellow</foo>";
template.sendBodyAndHeader("direct:in", expectedBody, "foo", "bar");
assertEquals(expectedBody, myBean.body, "bean body: " + myBean);
assertEquals("bar", myBean.foo, "bean foo: " + myBean);
}
@Override
protected void bindToRegistry(Registry registry) {
registry.bind("myBean", myBean);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:in").bean("myBean");
}
};
}
public static
|
BeanWithXQueryInjectionTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/CollectionClassificationXmlAnnotation.java
|
{
"start": 552,
"end": 1625
}
|
class ____ implements CollectionClassification {
private LimitedCollectionClassification value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public CollectionClassificationXmlAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public CollectionClassificationXmlAnnotation(
CollectionClassification annotation,
ModelsContext modelContext) {
this.value = annotation.value();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public CollectionClassificationXmlAnnotation(
Map<String, Object> attributeValues,
ModelsContext modelContext) {
this.value = (LimitedCollectionClassification) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return CollectionClassification.class;
}
@Override
public LimitedCollectionClassification value() {
return value;
}
public void value(LimitedCollectionClassification value) {
this.value = value;
}
}
|
CollectionClassificationXmlAnnotation
|
java
|
apache__camel
|
components/camel-irc/src/test/java/org/apache/camel/component/irc/it/IrcRouteIT.java
|
{
"start": 1394,
"end": 3456
}
|
class ____ extends IrcIntegrationITSupport {
private static final Logger LOGGER = LoggerFactory.getLogger(IrcRouteIT.class);
protected String body1 = "Message One";
protected String body2 = "Message Two";
private boolean sentMessages;
@Test
public void testIrcMessages() throws Exception {
resultEndpoint.expectedBodiesReceivedInAnyOrder(body1, body2);
resultEndpoint.assertIsSatisfied();
List<Exchange> list = resultEndpoint.getReceivedExchanges();
for (Exchange exchange : list) {
LOGGER.info("Received exchange: " + exchange + " headers: " + exchange.getIn().getHeaders());
}
}
@Override
protected String sendUri() {
return "irc://{{camelTo}}@{{non.ssl.server}}?channels={{channel1}}";
}
@Override
protected String fromUri() {
return "irc://{{camelFrom}}@{{non.ssl.server}}?&channels={{channel1}}";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(fromUri()).choice().when(header(IrcConstants.IRC_MESSAGE_TYPE).isEqualTo("PRIVMSG"))
.to("direct:mock").when(header(IrcConstants.IRC_MESSAGE_TYPE).isEqualTo("JOIN"))
.to("seda:consumerJoined");
from("seda:consumerJoined").process(new Processor() {
public void process(Exchange exchange) {
sendMessages();
}
});
from("direct:mock").filter(e -> !e.getIn().getBody(String.class).contains("VERSION")).to(resultEndpoint);
}
};
}
/**
* Lets send messages once the consumer has joined
*/
protected void sendMessages() {
if (!sentMessages) {
sentMessages = true;
// now the consumer has joined, lets send some messages
template.sendBody(sendUri(), body1);
template.sendBody(sendUri(), body2);
}
}
}
|
IrcRouteIT
|
java
|
google__dagger
|
javatests/dagger/functional/names/ComponentFactoryNameConflictsTest.java
|
{
"start": 2229,
"end": 2322
}
|
interface ____ {
BuilderUsage getBuilderUsage();
@Component.Builder
|
BuilderComponent
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/internal/InheritanceState.java
|
{
"start": 1470,
"end": 7799
}
|
class ____ {
private ClassDetails classDetails;
/**
* Has sibling (either mappedsuperclass entity)
*/
private boolean hasSiblings = false;
/**
* a mother entity is available
*/
private boolean hasParents = false;
private InheritanceType type;
private boolean isEmbeddableSuperclass = false;
private final Map<ClassDetails, InheritanceState> inheritanceStatePerClass;
private final List<ClassDetails> classesToProcessForMappedSuperclass = new ArrayList<>();
private final MetadataBuildingContext buildingContext;
private AccessType accessType;
private ElementsToProcess elementsToProcess;
private Boolean hasIdClassOrEmbeddedId;
public InheritanceState(
ClassDetails classDetails,
Map<ClassDetails, InheritanceState> inheritanceStatePerClass,
MetadataBuildingContext buildingContext) {
this.setClassDetails( classDetails );
this.buildingContext = buildingContext;
this.inheritanceStatePerClass = inheritanceStatePerClass;
extractInheritanceType( classDetails );
}
private void extractInheritanceType(ClassDetails classDetails) {
final var inheritance = classDetails.getDirectAnnotationUsage( Inheritance.class );
final var mappedSuperclass = classDetails.getDirectAnnotationUsage( MappedSuperclass.class );
if ( mappedSuperclass != null ) {
setEmbeddableSuperclass( true );
setType( inheritance == null ? null : inheritance.strategy() );
}
else {
setType( inheritance == null ? SINGLE_TABLE : inheritance.strategy() );
}
}
public boolean hasTable() {
return !hasParents() || SINGLE_TABLE != getType();
}
public boolean hasDenormalizedTable() {
return hasParents() && TABLE_PER_CLASS == getType();
}
public static InheritanceState getInheritanceStateOfSuperEntity(
ClassDetails classDetails,
Map<ClassDetails, InheritanceState> states) {
ClassDetails candidate = classDetails;
do {
candidate = candidate.getSuperClass();
final var currentState = states.get( candidate );
if ( currentState != null && !currentState.isEmbeddableSuperclass() ) {
return currentState;
}
}
while ( candidate != null && !OBJECT_CLASS_NAME.equals( candidate.getName() ) );
return null;
}
public static InheritanceState getSuperclassInheritanceState(
ClassDetails classDetails,
Map<ClassDetails, InheritanceState> states) {
ClassDetails superclass = classDetails;
do {
superclass = superclass.getSuperClass();
final var currentState = states.get( superclass );
if ( currentState != null ) {
return currentState;
}
}
while ( superclass != null && !OBJECT_CLASS_NAME.equals( superclass.getName() ) );
return null;
}
public ClassDetails getClassDetails() {
return classDetails;
}
public void setClassDetails(ClassDetails classDetails) {
this.classDetails = classDetails;
}
public boolean hasSiblings() {
return hasSiblings;
}
public void setHasSiblings(boolean hasSiblings) {
this.hasSiblings = hasSiblings;
}
public boolean hasParents() {
return hasParents;
}
public void setHasParents(boolean hasParents) {
this.hasParents = hasParents;
}
public InheritanceType getType() {
return type;
}
public void setType(InheritanceType type) {
this.type = type;
}
public boolean isEmbeddableSuperclass() {
return isEmbeddableSuperclass;
}
public void setEmbeddableSuperclass(boolean embeddableSuperclass) {
isEmbeddableSuperclass = embeddableSuperclass;
}
public ElementsToProcess postProcess(PersistentClass persistenceClass, EntityBinder entityBinder) {
//make sure we run elements to process
getElementsToProcess();
addMappedSuperClassInMetadata( persistenceClass );
entityBinder.setPropertyAccessType( accessType );
return elementsToProcess;
}
public void postProcess(Component component) {
if ( classesToProcessForMappedSuperclass.isEmpty() ) {
// Component classes might be processed more than once,
// so only do this the first time we encounter them
getMappedSuperclassesTillNextEntityOrdered();
}
addMappedSuperClassInMetadata( component );
}
public ClassDetails getClassWithIdClass(boolean evenIfSubclass) {
if ( !evenIfSubclass && hasParents() ) {
return null;
}
else if ( classDetails.hasDirectAnnotationUsage( IdClass.class ) ) {
return classDetails;
}
else {
final long count =
Stream.concat( classDetails.getFields().stream(), classDetails.getMethods().stream() )
.filter( member -> member.hasDirectAnnotationUsage( Id.class ) )
.count();
if ( count > 1 ) {
return classDetails;
}
else {
final var state = getSuperclassInheritanceState( classDetails, inheritanceStatePerClass );
return state == null ? null : state.getClassWithIdClass( true );
}
}
}
public Boolean hasIdClassOrEmbeddedId() {
if ( hasIdClassOrEmbeddedId == null ) {
hasIdClassOrEmbeddedId = false;
if ( getClassWithIdClass( true ) != null ) {
hasIdClassOrEmbeddedId = true;
}
else {
for ( PropertyData property : getElementsToProcess().getElements() ) {
if ( property.getAttributeMember().hasDirectAnnotationUsage( EmbeddedId.class ) ) {
hasIdClassOrEmbeddedId = true;
break;
}
}
}
}
return hasIdClassOrEmbeddedId;
}
/*
* Get the annotated elements and determine access type from hierarchy,
* guessing from @Id or @EmbeddedId presence if not specified.
* Change EntityBinder by side effect
*/
private ElementsToProcess getElementsToProcess() {
if ( elementsToProcess == null ) {
final var inheritanceState = inheritanceStatePerClass.get( classDetails );
assert !inheritanceState.isEmbeddableSuperclass();
getMappedSuperclassesTillNextEntityOrdered();
accessType = determineDefaultAccessType();
final ArrayList<PropertyData> elements = new ArrayList<>();
int idPropertyCount = 0;
for ( ClassDetails classToProcessForMappedSuperclass : classesToProcessForMappedSuperclass ) {
final var container =
new PropertyContainer( classToProcessForMappedSuperclass, classDetails, accessType );
idPropertyCount = addElementsOfClass( elements, container, buildingContext, idPropertyCount );
}
if ( idPropertyCount == 0 && !inheritanceState.hasParents() ) {
throw new AnnotationException( "Entity '" + classDetails.getName() + "' has no identifier"
+ " (every '@Entity'
|
InheritanceState
|
java
|
apache__camel
|
components/camel-servicenow/camel-servicenow-maven-plugin/src/test/java/org/apache/camel/maven/CamelServiceNowMojoTestSupport.java
|
{
"start": 1026,
"end": 2295
}
|
class ____ {
protected CamelServiceNowGenerateMojo createMojo() throws IOException {
CamelServiceNowGenerateMojo mojo = new CamelServiceNowGenerateMojo();
mojo.setLog(new SystemStreamLog());
// set defaults
mojo.instanceName = getSystemPropertyOrEnvVar("servicenow.instance");
mojo.userName = getSystemPropertyOrEnvVar("servicenow.username");
mojo.userPassword = getSystemPropertyOrEnvVar("servicenow.password");
mojo.oauthClientId = getSystemPropertyOrEnvVar("servicenow.oauth2.client.id");
mojo.oauthClientSecret = getSystemPropertyOrEnvVar("servicenow.oauth2.client.secret");
mojo.outputDirectory = new File("target/generated-sources/camel-servicenow");
mojo.packageName = "org.apache.camel.servicenow.dto";
FileUtils.deleteDirectory(mojo.outputDirectory);
return mojo;
}
public static String getSystemPropertyOrEnvVar(String systemProperty) {
String answer = System.getProperty(systemProperty);
if (ObjectHelper.isEmpty(answer)) {
String envProperty = systemProperty.toUpperCase().replaceAll("[.-]", "_");
answer = System.getenv(envProperty);
}
return answer;
}
}
|
CamelServiceNowMojoTestSupport
|
java
|
quarkusio__quarkus
|
integration-tests/oidc-client-wiremock/src/test/java/io/quarkus/it/keycloak/KeycloakRealmResourceManager.java
|
{
"start": 831,
"end": 14417
}
|
class ____ implements QuarkusTestResourceLifecycleManager {
private static final Logger LOG = Logger.getLogger(KeycloakRealmResourceManager.class);
private WireMockServer server;
@Override
public Map<String, String> start() {
server = new WireMockServer(wireMockConfig().dynamicPort().useChunkedTransferEncoding(ChunkedEncodingPolicy.NEVER));
server.start();
server.stubFor(WireMock.post("/tokens")
.withRequestBody(matching("grant_type=password&username=alice&password=alice"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"access_token_1\", \"expires_in\":6, \"refresh_token\":\"refresh_token_1\"}")));
server.stubFor(WireMock.post("/tokens-exchange")
.withRequestBody(containing("grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Atoken-exchange"))
.withRequestBody(containing("subject_token=token_to_be_exchanged"))
.withRequestBody(containing("subject_token_type=urn%3Aietf%3Aparams%3Aoauth%3Atoken-type%3Aaccess_token"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"access_token_exchanged\", \"expires_in\":4}")));
server.stubFor(WireMock.post("/tokens-jwtbearer")
.withRequestBody(matching("grant_type=client_credentials&"
+ "client_assertion_type=urn%3Aietf%3Aparams%3Aoauth%3Aclient-assertion-type%3Ajwt-bearer&"
+ "client_assertion=123456"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"access_token_jwt_bearer\", \"expires_in\":4, \"refresh_token\":\"refresh_token_jwt_bearer\"}")));
server.stubFor(WireMock.post("/tokens-jwtbearer-forcenewtoken")
.withRequestBody(matching("grant_type=client_credentials&"
+ "client_assertion_type=urn%3Aietf%3Aparams%3Aoauth%3Aclient-assertion-type%3Ajwt-bearer&"
+ "client_assertion=123456"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"access_token_jwt_bearer_always_new\", \"expires_in\":4, \"refresh_token\":\"refresh_token_jwt_bearer\"}")));
server.stubFor(WireMock.post("/tokens-jwtbearer-grant")
.withRequestBody(containing("grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer&"
+ "assertion="))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"access_token_jwt_bearer_grant\", \"expires_in\":4, \"refresh_token\":\"refresh_token_jwt_bearer\"}")));
String jwtBearerToken = Jwt.preferredUserName("Arnold")
.issuer("https://server.example.com")
.audience("https://service.example.com")
.expiresIn(Duration.ofMinutes(30))
.signWithSecret("43".repeat(20));
var jwtBearerTokenPath = Path.of("target").resolve("bearer-token-client-assertion.json");
try {
Files.writeString(jwtBearerTokenPath, jwtBearerToken);
} catch (IOException e) {
throw new RuntimeException("Failed to prepare file with a client assertion", e);
}
server.stubFor(WireMock.post("/tokens-jwtbearer-file")
.withRequestBody(matching("grant_type=client_credentials&"
+ "client_assertion=" + jwtBearerToken
+ "&client_assertion_type=urn%3Aietf%3Aparams%3Aoauth%3Aclient-assertion-type%3Ajwt-bearer"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"access_token_jwt_bearer\", \"expires_in\":4, \"refresh_token\":\"refresh_token_jwt_bearer\"}")));
server.stubFor(WireMock.post("/tokens_public_client")
.withRequestBody(matching("grant_type=password&username=alice&password=alice&client_id=quarkus-app"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"access_token_public_client\", \"expires_in\":20}")));
server.stubFor(WireMock.post("/non-standard-tokens")
.withHeader("X-Custom", matching("XCustomHeaderValue"))
.withHeader("GrantType", matching("password"))
.withHeader("client-id", containing("non-standard-response"))
.withRequestBody(matching(
"grant_type=password&audience=audience1&username=alice&password=alice&extra_param=extra_param_value&custom_prop=custom_value"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"accessToken\":\"access_token_n\", \"expiresIn\":\"4\", \"refreshToken\":\"refresh_token_n\"}")));
server.stubFor(WireMock.post("/tokens")
.withRequestBody(matching("grant_type=refresh_token&refresh_token=refresh_token_1"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"access_token_2\", \"expires_in\":6, \"refresh_token\":\"refresh_token_2\", \"refresh_expires_in\":1}")));
server.stubFor(WireMock.post("/tokens-without-expires-in")
.withRequestBody(matching("grant_type=client_credentials&client_id=quarkus-app&client_secret=secret"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"access_token_without_expires_in\"}")));
server.stubFor(WireMock.post("/refresh-token-only")
.withRequestBody(
matching("grant_type=refresh_token&refresh_token=shared_refresh_token&extra_param=extra_param_value"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"temp_access_token\", \"expires_in\":4}")));
server.stubFor(WireMock.post("/ciba-token")
.withRequestBody(matching(
"grant_type=urn%3Aopenid%3Aparams%3Agrant-type%3Aciba&client_id=quarkus-app&client_secret=secret&auth_req_id=16cdaa49-9591-4b63-b188-703fa3b25031"))
.willReturn(WireMock
.badRequest()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"error\":\"expired_token\"}")));
server.stubFor(WireMock.post("/ciba-token")
.withRequestBody(matching(
"grant_type=urn%3Aopenid%3Aparams%3Agrant-type%3Aciba&client_id=quarkus-app&client_secret=secret&auth_req_id=b1493f2f-c25c-40f5-8d69-94e2ad4b06df"))
.inScenario("auth-device-approval")
.whenScenarioStateIs(CibaAuthDeviceApprovalState.PENDING.name())
.willReturn(WireMock
.badRequest()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"error\":\"authorization_pending\"}")));
server.stubFor(WireMock.post("/ciba-token")
.withRequestBody(matching(
"grant_type=urn%3Aopenid%3Aparams%3Agrant-type%3Aciba&client_id=quarkus-app&client_secret=secret&auth_req_id=b1493f2f-c25c-40f5-8d69-94e2ad4b06df"))
.inScenario("auth-device-approval")
.whenScenarioStateIs(CibaAuthDeviceApprovalState.DENIED.name())
.willReturn(WireMock
.badRequest()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"error\":\"access_denied\"}")));
server.stubFor(WireMock.post("/ciba-token")
.withRequestBody(matching(
"grant_type=urn%3Aopenid%3Aparams%3Agrant-type%3Aciba&client_id=quarkus-app&client_secret=secret&auth_req_id=b1493f2f-c25c-40f5-8d69-94e2ad4b06df"))
.inScenario("auth-device-approval")
.whenScenarioStateIs(CibaAuthDeviceApprovalState.APPROVED.name())
.willReturn(WireMock
.ok()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"ciba_access_token\", \"expires_in\":4, \"refresh_token\":\"ciba_refresh_token\"}")));
server.stubFor(WireMock.post("/device-token")
.withRequestBody(matching(
"grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Adevice_code&client_id=quarkus-app&client_secret=secret&device_code=123456789"))
.willReturn(WireMock
.ok()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"device_code_access_token\", \"expires_in\":4}")));
// delay to expand the gap for concurrency tests
server.stubFor(WireMock.post("/tokens-with-delay")
.withRequestBody(matching("grant_type=password&username=alice&password=alice"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"access_token_1\", \"expires_in\":1, \"refresh_token\":\"refresh_token_1\"}")
.withFixedDelay(50)));
server.stubFor(WireMock.post("/tokens-with-delay")
.withRequestBody(matching("grant_type=refresh_token&refresh_token=refresh_token_1"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody(
"{\"access_token\":\"access_token_2\", \"expires_in\":1, \"refresh_token\":\"refresh_token_2\", \"refresh_expires_in\":1}")
.withFixedDelay(50)));
server.stubFor(WireMock.post("/tokens-refresh-test")
.withRequestBody(matching("grant_type=password&username=alice&password=alice"))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody("{\"access_token\":\"access_token_1\", \"expires_in\":3, " +
"\"refresh_token\":\"refresh_token_1\", \"refresh_expires_in\": 100}")));
IntStream.range(0, 20).forEach(i -> {
int nextIndex = i + 1;
server.stubFor(WireMock.post("/tokens-refresh-test")
.withRequestBody(matching("grant_type=refresh_token&refresh_token=refresh_token_" + i))
.willReturn(WireMock
.aResponse()
.withHeader("Content-Type", MediaType.APPLICATION_JSON)
.withBody("{\"access_token\":\"access_token_" + nextIndex
+ "\", \"expires_in\":3, \"refresh_token\":\"refresh_token_"
+ nextIndex + "\", \"refresh_expires_in\":100}")));
});
LOG.infof("Keycloak started in mock mode: %s", server.baseUrl());
Map<String, String> conf = new HashMap<>();
conf.put("keycloak.url", server.baseUrl());
conf.put("token-path", jwtBearerTokenPath.toString());
return conf;
}
@Override
public synchronized void stop() {
if (server != null) {
server.stop();
LOG.info("Keycloak was shut down");
server = null;
}
}
@Override
public void inject(TestInjector testInjector) {
testInjector.injectIntoFields(server,
new TestInjector.AnnotatedAndMatchesType(InjectWireMock.class, WireMockServer.class));
}
}
|
KeycloakRealmResourceManager
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inlineme/SuggesterTest.java
|
{
"start": 4020,
"end": 4555
}
|
class ____ {
@InlineMe(replacement = "Duration.ofMillis(millis)", imports = "java.time.Duration")
@Deprecated
public Duration fromMillis(long millis) {
return Duration.ofMillis(millis);
}
}
""")
.doTest();
}
@Test
public void unqualifiedStaticFieldReference() {
refactoringTestHelper
.addInputLines(
"Client.java",
"""
package com.google.frobber;
public final
|
Client
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/script/ScriptMetadata.java
|
{
"start": 2186,
"end": 3839
}
|
class ____ {
private final Map<String, StoredScriptSource> scripts;
/**
* @param previous The current {@link ScriptMetadata} or {@code null} if there
* is no existing {@link ScriptMetadata}.
*/
public Builder(ScriptMetadata previous) {
this.scripts = previous == null ? new HashMap<>() : new HashMap<>(previous.scripts);
}
/**
* Add a new script to the existing stored scripts based on a user-specified id. If
* a script with the same id already exists it will be overwritten.
* @param id The user-specified id to use for the look up.
* @param source The user-specified stored script data held in {@link StoredScriptSource}.
*/
public Builder storeScript(String id, StoredScriptSource source) {
scripts.put(id, source);
return this;
}
/**
* Delete a script from the existing stored scripts based on a user-specified id.
* @param id The user-specified id to use for the look up.
*/
public Builder deleteScript(String id) {
StoredScriptSource deleted = scripts.remove(id);
if (deleted == null) {
throw new ResourceNotFoundException("stored script [" + id + "] does not exist and cannot be deleted");
}
return this;
}
/**
* @return A {@link ScriptMetadata} with the updated {@link Map} of scripts.
*/
public ScriptMetadata build() {
return new ScriptMetadata(scripts);
}
}
static final
|
Builder
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/util/FindIdentifiersTest.java
|
{
"start": 38416,
"end": 38698
}
|
class ____ {}
}
""")
.doTest();
}
@Test
public void aVisibleOutsideClass() {
CompilationTestHelper.newInstance(IsAVisible.class, getClass())
.addSourceLines(
"A.java",
"""
package pkg;
|
A
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/annotation/DependsOn.java
|
{
"start": 1916,
"end": 2210
}
|
class ____ declared via XML,
* {@link DependsOn} annotation metadata is ignored, and
* {@code <bean depends-on="..."/>} is respected instead.
*
* @author Juergen Hoeller
* @since 3.0
*/
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @
|
is
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/ExceptionInWriterTest.java
|
{
"start": 371,
"end": 954
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(Cheese.class, CheeseEndpoint.class);
}
});
@Test
public void test() {
RestAssured.with().header("Accept", "text/plain", "application/json").get("/cheese")
.then().statusCode(500);
}
}
|
ExceptionInWriterTest
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-redshift/src/main/java/org/apache/camel/component/aws2/redshift/data/RedshiftData2Operations.java
|
{
"start": 866,
"end": 1108
}
|
enum ____ {
listDatabases,
listSchemas,
listStatements,
listTables,
describeTable,
executeStatement,
batchExecuteStatement,
cancelStatement,
describeStatement,
getStatementResult
}
|
RedshiftData2Operations
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/tck/PublishSelectorTckTest.java
|
{
"start": 878,
"end": 1185
}
|
class ____ extends BaseTck<Integer> {
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public Publisher<Integer> createPublisher(long elements) {
return
Flowable.range(0, (int)elements).publish((Function)Functions.identity())
;
}
}
|
PublishSelectorTckTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/DirectExecutorService.java
|
{
"start": 1432,
"end": 4172
}
|
class ____<V> implements Future<V> {
private V result = null;
private Exception ex = null;
DirectFuture(Callable<V> c) {
try {
result = c.call();
} catch (Exception e) {
ex = e;
}
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
@Override
public V get() throws InterruptedException, ExecutionException {
if (ex != null) {
throw new ExecutionException(ex);
}
return result;
}
@Override
public V get(long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
return get();
}
}
private boolean isShutdown = false;
@Override
synchronized public void shutdown() {
isShutdown = true;
}
@Override
public List<Runnable> shutdownNow() {
throw new UnsupportedOperationException();
}
@Override
public boolean isShutdown() {
return isShutdown;
}
@Override
synchronized public boolean isTerminated() {
return isShutdown;
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit)
throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
synchronized public <T> Future<T> submit(Callable<T> task) {
if (isShutdown) {
throw new RejectedExecutionException("ExecutorService was shutdown");
}
return new DirectFuture<>(task);
}
@Override
public <T> Future<T> submit(Runnable task, T result) {
throw new UnsupportedOperationException();
}
@Override
public Future<?> submit(Runnable task) {
throw new UnsupportedOperationException();
}
@Override
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks)
throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks,
long timeout, TimeUnit unit) throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks)
throws InterruptedException, ExecutionException {
throw new UnsupportedOperationException();
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout,
TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
throw new UnsupportedOperationException();
}
@Override
synchronized public void execute(Runnable command) {
command.run();
}
}
|
DirectFuture
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java
|
{
"start": 3985,
"end": 30104
}
|
class ____ extends AbstractIndicesClusterStateServiceTestCase {
private ThreadPool threadPool;
private ClusterStateChanges cluster;
@Override
public void setUp() throws Exception {
super.setUp();
threadPool = new TestThreadPool(getClass().getName());
cluster = new ClusterStateChanges(xContentRegistry(), threadPool);
}
@Override
public void tearDown() throws Exception {
super.tearDown();
terminate(threadPool);
}
public void testRandomClusterStateUpdates() {
// we have an IndicesClusterStateService per node in the cluster
final Map<DiscoveryNode, IndicesClusterStateService> clusterStateServiceMap = new HashMap<>();
ClusterState state = randomInitialClusterState(clusterStateServiceMap, MockIndicesService::new);
// each of the following iterations represents a new cluster state update processed on all nodes
for (int i = 0; i < 30; i++) {
logger.info("Iteration {}", i);
final ClusterState previousState = state;
// calculate new cluster state
for (int j = 0; j < randomInt(3); j++) { // multiple iterations to simulate batching of cluster states
try {
state = randomlyUpdateClusterState(state, clusterStateServiceMap, MockIndicesService::new);
} catch (AssertionError error) {
ClusterState finalState = state;
logger.error(() -> format("failed to random change state. last good state: \n%s", finalState), error);
throw error;
}
}
// apply cluster state to nodes (incl. master)
for (DiscoveryNode node : state.nodes()) {
IndicesClusterStateService indicesClusterStateService = clusterStateServiceMap.get(node);
ClusterState localState = adaptClusterStateToLocalNode(state, node);
ClusterState previousLocalState = adaptClusterStateToLocalNode(previousState, node);
final ClusterChangedEvent event = new ClusterChangedEvent("simulated change " + i, localState, previousLocalState);
try {
indicesClusterStateService.applyClusterState(event);
} catch (AssertionError error) {
logger.error(
() -> format(
"failed to apply change on [%s].\n *** Previous state ***\n%s\n *** New state ***\n%s",
node,
event.previousState(),
event.state()
),
error
);
throw error;
}
// check that cluster state has been properly applied to node
assertClusterStateMatchesNodeState(localState, indicesClusterStateService);
}
}
// TODO: check if we can go to green by starting all shards and finishing all iterations
logger.info("Final cluster state: {}", state);
}
/**
* This test ensures that when a node joins a brand new cluster (different cluster UUID),
* different from the cluster it was previously a part of, the in-memory index data structures
* are all removed but the on disk contents of those indices remain so that they can later be
* imported as dangling indices. Normally, the first cluster state update that the node
* receives from the new cluster would contain a cluster block that would cause all in-memory
* structures to be removed (see {@link IndicesClusterStateService#applyClusterState(ClusterChangedEvent)}),
* but in the case where the node joined and was a few cluster state updates behind, it would
* not have received the cluster block, in which case we still need to remove the in-memory
* structures while ensuring the data remains on disk. This test executes this particular
* scenario.
*/
public void testJoiningNewClusterOnlyRemovesInMemoryIndexStructures() {
// a cluster state derived from the initial state that includes a created index
String name = "index_" + randomAlphaOfLength(8).toLowerCase(Locale.ROOT);
ShardRoutingState[] replicaStates = new ShardRoutingState[randomIntBetween(0, 3)];
Arrays.fill(replicaStates, ShardRoutingState.UNASSIGNED);
ClusterState stateWithIndex = ClusterStateCreationUtils.state(name, randomBoolean(), ShardRoutingState.INITIALIZING, replicaStates);
// the initial state which is derived from the newly created cluster state but doesn't contain the index
ClusterState initialState = ClusterState.builder(stateWithIndex)
.metadata(Metadata.builder(stateWithIndex.metadata()).remove(name))
.routingTable(RoutingTable.builder().build())
.build();
// pick a data node to simulate the adding an index cluster state change event on, that has shards assigned to it
DiscoveryNode node = stateWithIndex.nodes()
.get(randomFrom(stateWithIndex.routingTable().index(name).shardsWithState(INITIALIZING)).currentNodeId());
// simulate the cluster state change on the node
ClusterState localState = adaptClusterStateToLocalNode(stateWithIndex, node);
ClusterState previousLocalState = adaptClusterStateToLocalNode(initialState, node);
IndicesClusterStateService indicesCSSvc = createIndicesClusterStateService(node, RecordingIndicesService::new);
indicesCSSvc.start();
indicesCSSvc.applyClusterState(new ClusterChangedEvent("cluster state change that adds the index", localState, previousLocalState));
// create a new empty cluster state with a brand new cluster UUID
ClusterState newClusterState = ClusterState.builder(initialState)
.metadata(Metadata.builder(initialState.metadata()).clusterUUID(UUIDs.randomBase64UUID()))
.build();
// simulate the cluster state change on the node
localState = adaptClusterStateToLocalNode(newClusterState, node);
previousLocalState = adaptClusterStateToLocalNode(stateWithIndex, node);
indicesCSSvc.applyClusterState(
new ClusterChangedEvent(
"cluster state change with a new cluster UUID (and doesn't contain the index)",
localState,
previousLocalState
)
);
// check that in memory data structures have been removed once the new cluster state is applied,
// but the persistent data is still there
RecordingIndicesService indicesService = (RecordingIndicesService) indicesCSSvc.indicesService;
for (IndexMetadata indexMetadata : stateWithIndex.metadata().getProject()) {
Index index = indexMetadata.getIndex();
assertNull(indicesService.indexService(index));
assertFalse(indicesService.isDeleted(index));
}
}
/**
* In rare cases it is possible that a nodes gets an instruction to replace a replica
* shard that's in POST_RECOVERY with a new initializing primary with the same allocation id.
* This can happen by batching cluster states that include the starting of the replica, with
* closing of the indices, opening it up again and allocating the primary shard to the node in
* question. The node should then clean it's initializing replica and replace it with a new
* initializing primary.
*/
public void testInitializingPrimaryRemovesInitializingReplicaWithSameAID() {
disableRandomFailures();
String index = "index_" + randomAlphaOfLength(8).toLowerCase(Locale.ROOT);
ClusterState state = ClusterStateCreationUtils.state(
index,
randomBoolean(),
ShardRoutingState.STARTED,
ShardRoutingState.INITIALIZING
);
state = ClusterState.builder(state).nodes(state.nodes().withMasterNodeId(state.nodes().getLocalNodeId())).build();
// the initial state which is derived from the newly created cluster state but doesn't contain the index
ClusterState previousState = ClusterState.builder(state)
.metadata(Metadata.builder(state.metadata()).remove(index))
.routingTable(RoutingTable.builder().build())
.build();
// pick a data node to simulate the adding an index cluster state change event on, that has shards assigned to it
final ShardRouting shardRouting = state.routingTable().index(index).shard(0).replicaShards().get(0);
final ShardId shardId = shardRouting.shardId();
DiscoveryNode node = state.nodes().get(shardRouting.currentNodeId());
// simulate the cluster state change on the node
ClusterState localState = adaptClusterStateToLocalNode(state, node);
ClusterState previousLocalState = adaptClusterStateToLocalNode(previousState, node);
IndicesClusterStateService indicesCSSvc = createIndicesClusterStateService(node, RecordingIndicesService::new);
indicesCSSvc.start();
indicesCSSvc.applyClusterState(new ClusterChangedEvent("cluster state change that adds the index", localState, previousLocalState));
previousState = state;
// start the replica
state = cluster.applyStartedShards(state, state.routingTable().index(index).shard(0).replicaShards());
// close the index and open it up again (this will sometimes swap roles between primary and replica)
CloseIndexRequest closeIndexRequest = new CloseIndexRequest(state.metadata().getProject().index(index).getIndex().getName());
state = cluster.closeIndices(state, closeIndexRequest);
OpenIndexRequest openIndexRequest = new OpenIndexRequest(state.metadata().getProject().index(index).getIndex().getName());
openIndexRequest.waitForActiveShards(ActiveShardCount.NONE);
state = cluster.openIndices(state, openIndexRequest);
localState = adaptClusterStateToLocalNode(state, node);
previousLocalState = adaptClusterStateToLocalNode(previousState, node);
indicesCSSvc.applyClusterState(new ClusterChangedEvent("new cluster state", localState, previousLocalState));
final MockIndexShard shardOrNull = ((RecordingIndicesService) indicesCSSvc.indicesService).getShardOrNull(shardId);
assertThat(
shardOrNull == null ? null : shardOrNull.routingEntry(),
equalTo(state.getRoutingNodes().node(node.getId()).getByShardId(shardId))
);
}
public void testRecoveryFailures() {
disableRandomFailures();
String index = "index_" + randomAlphaOfLength(8).toLowerCase(Locale.ROOT);
ClusterState state = ClusterStateCreationUtils.state(
index,
randomBoolean(),
ShardRoutingState.STARTED,
ShardRoutingState.INITIALIZING
);
// the initial state which is derived from the newly created cluster state but doesn't contain the index
ClusterState previousState = ClusterState.builder(state)
.metadata(Metadata.builder(state.metadata()).remove(index))
.routingTable(RoutingTable.builder().build())
.build();
// pick a data node to simulate the adding an index cluster state change event on, that has shards assigned to it
final ShardRouting shardRouting = state.routingTable().index(index).shard(0).replicaShards().get(0);
final ShardId shardId = shardRouting.shardId();
DiscoveryNode node = state.nodes().get(shardRouting.currentNodeId());
// simulate the cluster state change on the node
ClusterState localState = adaptClusterStateToLocalNode(state, node);
ClusterState previousLocalState = adaptClusterStateToLocalNode(previousState, node);
IndicesClusterStateService indicesCSSvc = createIndicesClusterStateService(node, RecordingIndicesService::new);
indicesCSSvc.start();
indicesCSSvc.applyClusterState(new ClusterChangedEvent("cluster state change that adds the index", localState, previousLocalState));
assertNotNull(indicesCSSvc.indicesService.getShardOrNull(shardId));
// check that failing unrelated allocation does not remove shard
indicesCSSvc.handleRecoveryFailure(shardRouting.reinitializeReplicaShard(), false, new Exception("dummy"));
assertNotNull(indicesCSSvc.indicesService.getShardOrNull(shardId));
indicesCSSvc.handleRecoveryFailure(shardRouting, false, new Exception("dummy"));
assertNull(indicesCSSvc.indicesService.getShardOrNull(shardId));
}
public ClusterState randomInitialClusterState(
Map<DiscoveryNode, IndicesClusterStateService> clusterStateServiceMap,
Supplier<MockIndicesService> indicesServiceSupplier
) {
List<DiscoveryNode> allNodes = new ArrayList<>();
DiscoveryNode localNode = createNode(DiscoveryNodeRole.MASTER_ROLE); // local node is the master
allNodes.add(localNode);
// at least two nodes that have the data role so that we can allocate shards
allNodes.add(createNode(DiscoveryNodeRole.DATA_ROLE));
allNodes.add(createNode(DiscoveryNodeRole.DATA_ROLE));
for (int i = 0; i < randomIntBetween(2, 5); i++) {
allNodes.add(createNode());
}
ClusterState state = ClusterStateCreationUtils.state(localNode, localNode, allNodes.toArray(new DiscoveryNode[allNodes.size()]));
// add nodes to clusterStateServiceMap
updateNodes(state, clusterStateServiceMap, indicesServiceSupplier);
return state;
}
private void updateNodes(
ClusterState state,
Map<DiscoveryNode, IndicesClusterStateService> clusterStateServiceMap,
Supplier<MockIndicesService> indicesServiceSupplier
) {
for (DiscoveryNode node : state.nodes()) {
clusterStateServiceMap.computeIfAbsent(node, discoveryNode -> {
IndicesClusterStateService ics = createIndicesClusterStateService(discoveryNode, indicesServiceSupplier);
ics.start();
return ics;
});
}
for (Iterator<Entry<DiscoveryNode, IndicesClusterStateService>> it = clusterStateServiceMap.entrySet().iterator(); it.hasNext();) {
DiscoveryNode node = it.next().getKey();
if (state.nodes().nodeExists(node) == false) {
it.remove();
}
}
}
public ClusterState randomlyUpdateClusterState(
ClusterState state,
Map<DiscoveryNode, IndicesClusterStateService> clusterStateServiceMap,
Supplier<MockIndicesService> indicesServiceSupplier
) {
// randomly remove no_master blocks
if (randomBoolean() && state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID)) {
state = ClusterState.builder(state)
.blocks(ClusterBlocks.builder().blocks(state.blocks()).removeGlobalBlock(NoMasterBlockService.NO_MASTER_BLOCK_ID))
.build();
}
// randomly add no_master blocks
if (rarely() && state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID) == false) {
ClusterBlock block = randomBoolean() ? NoMasterBlockService.NO_MASTER_BLOCK_ALL : NoMasterBlockService.NO_MASTER_BLOCK_WRITES;
state = ClusterState.builder(state).blocks(ClusterBlocks.builder().blocks(state.blocks()).addGlobalBlock(block)).build();
}
// if no_master block is in place, make no other cluster state changes
if (state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID)) {
return state;
}
// randomly create new indices (until we have 200 max)
for (int i = 0; i < randomInt(5); i++) {
if (state.metadata().getProject().indices().size() > 200) {
break;
}
String name = "index_" + randomAlphaOfLength(15).toLowerCase(Locale.ROOT);
Settings.Builder settingsBuilder = Settings.builder().put(SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 3));
if (randomBoolean()) {
int min = randomInt(2);
int max = min + randomInt(3);
settingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, randomBoolean() ? min + "-" + max : min + "-all");
} else {
settingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, randomInt(2));
}
CreateIndexRequest request = new CreateIndexRequest(name, settingsBuilder.build()).waitForActiveShards(ActiveShardCount.NONE);
state = cluster.createIndex(state, request);
assertTrue(state.metadata().getProject().hasIndex(name));
}
// randomly delete indices
Set<String> indicesToDelete = new HashSet<>();
int numberOfIndicesToDelete = randomInt(Math.min(2, state.metadata().getProject().indices().size()));
for (String index : randomSubsetOf(
numberOfIndicesToDelete,
state.metadata().getProject().indices().keySet().toArray(new String[0])
)) {
indicesToDelete.add(state.metadata().getProject().index(index).getIndex().getName());
}
if (indicesToDelete.isEmpty() == false) {
DeleteIndexRequest deleteRequest = new DeleteIndexRequest(indicesToDelete.toArray(new String[indicesToDelete.size()]));
state = cluster.deleteIndices(state, deleteRequest);
for (String index : indicesToDelete) {
assertFalse(state.metadata().getProject().hasIndex(index));
}
}
// randomly close indices
int numberOfIndicesToClose = randomInt(Math.min(1, state.metadata().getProject().indices().size()));
for (String index : randomSubsetOf(
numberOfIndicesToClose,
state.metadata().getProject().indices().keySet().toArray(new String[0])
)) {
CloseIndexRequest closeIndexRequest = new CloseIndexRequest(state.metadata().getProject().index(index).getIndex().getName());
state = cluster.closeIndices(state, closeIndexRequest);
}
// randomly open indices
int numberOfIndicesToOpen = randomInt(Math.min(1, state.metadata().getProject().indices().size()));
for (String index : randomSubsetOf(
numberOfIndicesToOpen,
state.metadata().getProject().indices().keySet().toArray(new String[0])
)) {
OpenIndexRequest openIndexRequest = new OpenIndexRequest(state.metadata().getProject().index(index).getIndex().getName());
openIndexRequest.waitForActiveShards(ActiveShardCount.NONE);
state = cluster.openIndices(state, openIndexRequest);
}
// randomly update settings
Set<String> indicesToUpdate = new HashSet<>();
boolean containsClosedIndex = false;
int numberOfIndicesToUpdate = randomInt(Math.min(2, state.metadata().getProject().indices().size()));
for (String index : randomSubsetOf(
numberOfIndicesToUpdate,
state.metadata().getProject().indices().keySet().toArray(new String[0])
)) {
indicesToUpdate.add(state.metadata().getProject().index(index).getIndex().getName());
if (state.metadata().getProject().index(index).getState() == IndexMetadata.State.CLOSE) {
containsClosedIndex = true;
}
}
if (indicesToUpdate.isEmpty() == false) {
UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(
indicesToUpdate.toArray(new String[indicesToUpdate.size()])
);
Settings.Builder settings = Settings.builder();
if (containsClosedIndex == false) {
settings.put(SETTING_NUMBER_OF_REPLICAS, randomInt(2));
}
settings.put("index.refresh_interval", randomIntBetween(1, 5) + "s");
updateSettingsRequest.settings(settings.build());
state = cluster.updateSettings(state, updateSettingsRequest);
}
// randomly reroute
if (rarely()) {
state = cluster.reroute(state, new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT));
}
// randomly start and fail allocated shards
final Map<ShardRouting, Long> startedShards = new HashMap<>();
List<FailedShard> failedShards = new ArrayList<>();
for (DiscoveryNode node : state.nodes()) {
IndicesClusterStateService indicesClusterStateService = clusterStateServiceMap.get(node);
MockIndicesService indicesService = (MockIndicesService) indicesClusterStateService.indicesService;
for (MockIndexService indexService : indicesService) {
for (MockIndexShard indexShard : indexService) {
ShardRouting persistedShardRouting = indexShard.routingEntry();
if (persistedShardRouting.initializing() && randomBoolean()) {
startedShards.put(persistedShardRouting, indexShard.term());
} else if (rarely()) {
failedShards.add(new FailedShard(persistedShardRouting, "fake shard failure", new Exception(), randomBoolean()));
}
}
}
}
state = cluster.applyFailedShards(state, failedShards);
state = cluster.applyStartedShards(state, startedShards);
// randomly add and remove nodes (except current master)
if (rarely()) {
if (randomBoolean()) {
// add node
if (state.nodes().getSize() < 10) {
state = cluster.addNode(state, createNode(), TransportVersion.current());
updateNodes(state, clusterStateServiceMap, indicesServiceSupplier);
}
} else {
// remove node
if (state.nodes().getDataNodes().size() > 3) {
DiscoveryNode discoveryNode = randomFrom(state.nodes().getNodes().values());
if (discoveryNode.equals(state.nodes().getMasterNode()) == false) {
state = cluster.removeNodes(state, Collections.singletonList(discoveryNode));
updateNodes(state, clusterStateServiceMap, indicesServiceSupplier);
}
if (randomBoolean()) {
// and add it back
state = cluster.addNode(state, discoveryNode, TransportVersion.current());
updateNodes(state, clusterStateServiceMap, indicesServiceSupplier);
}
}
}
}
// TODO: go masterless?
return state;
}
private static final AtomicInteger nodeIdGenerator = new AtomicInteger();
protected DiscoveryNode createNode(DiscoveryNodeRole... mustHaveRoles) {
Set<DiscoveryNodeRole> roles = new HashSet<>(randomSubsetOf(DiscoveryNodeRole.roles()));
Collections.addAll(roles, mustHaveRoles);
final String id = format("node_%03d", nodeIdGenerator.incrementAndGet());
return DiscoveryNodeUtils.builder(id).name(id).roles(roles).build();
}
private static ClusterState adaptClusterStateToLocalNode(ClusterState state, DiscoveryNode node) {
return ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).add(node).localNodeId(node.getId())).build();
}
private IndicesClusterStateService createIndicesClusterStateService(
DiscoveryNode discoveryNode,
final Supplier<MockIndicesService> indicesServiceSupplier
) {
final ThreadPool threadPool = mock(ThreadPool.class);
when(threadPool.generic()).thenReturn(mock(ExecutorService.class));
final MockIndicesService indicesService = indicesServiceSupplier.get();
final Settings settings = Settings.builder().put("node.name", discoveryNode.getName()).build();
final TransportService transportService = new TransportService(
settings,
mock(Transport.class),
threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
boundAddress -> DiscoveryNodeUtils.builder(UUIDs.randomBase64UUID())
.applySettings(settings)
.address(boundAddress.publishAddress())
.build(),
null,
Collections.emptySet()
);
final ClusterService clusterService = mock(ClusterService.class);
final NodeClient client = mock(NodeClient.class);
final RepositoriesService repositoriesService = new RepositoriesService(
settings,
clusterService,
Collections.emptyMap(),
Collections.emptyMap(),
threadPool,
client,
List.of(),
SnapshotMetrics.NOOP
);
final PeerRecoveryTargetService recoveryTargetService = new PeerRecoveryTargetService(
client,
threadPool,
transportService,
null,
clusterService,
mock(SnapshotFilesProvider.class)
);
final ShardStateAction shardStateAction = mock(ShardStateAction.class);
final PrimaryReplicaSyncer primaryReplicaSyncer = mock(PrimaryReplicaSyncer.class);
return new IndicesClusterStateService(
settings,
indicesService,
clusterService,
threadPool,
recoveryTargetService,
shardStateAction,
repositoriesService,
null,
null,
null,
primaryReplicaSyncer,
RetentionLeaseSyncer.EMPTY,
client
) {
@Override
protected void updateGlobalCheckpointForShard(final ShardId shardId) {}
};
}
private
|
IndicesClusterStateServiceRandomUpdatesTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/BlobTest.java
|
{
"start": 801,
"end": 2213
}
|
class ____ {
@Test
public void test(EntityManagerFactoryScope scope) {
Integer productId = scope.fromTransaction( entityManager -> {
//tag::basic-blob-persist-example[]
byte[] image = new byte[] {1, 2, 3};
final Product product = new Product();
product.setId(1);
product.setName("Mobile phone");
product.setImage(BlobProxy.generateProxy(image));
entityManager.persist(product);
//end::basic-blob-persist-example[]
return product.getId();
});
scope.inTransaction( entityManager -> {
try {
//tag::basic-blob-find-example[]
Product product = entityManager.find(Product.class, productId);
try (InputStream inputStream = product.getImage().getBinaryStream()) {
assertArrayEquals(new byte[] {1, 2, 3}, toBytes(inputStream));
}
//end::basic-blob-find-example[]
}
catch (Exception e) {
fail(e.getMessage());
}
});
}
private byte[] toBytes(InputStream inputStream) throws IOException {
BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int result = bufferedInputStream.read();
while(result != -1) {
byteArrayOutputStream.write((byte) result);
result = bufferedInputStream.read();
}
return byteArrayOutputStream.toByteArray();
}
//tag::basic-blob-example[]
@Entity(name = "Product")
public static
|
BlobTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/asyncprocessing/AbstractStateIterator.java
|
{
"start": 2283,
"end": 4360
}
|
class ____<T> implements InternalStateIterator<T> {
/** The state this iterator iterates on. */
final State originalState;
/** The request type that create this iterator. */
final StateRequestType requestType;
/** The controller that can receive further requests. */
final StateRequestHandler stateHandler;
/** The already loaded partial elements. */
final Collection<T> cache;
public AbstractStateIterator(
State originalState,
StateRequestType requestType,
StateRequestHandler stateHandler,
Collection<T> partialResult) {
this.originalState = originalState;
this.requestType = requestType;
this.stateHandler = stateHandler;
this.cache = partialResult;
}
/** Return whether this iterator has more elements to load besides current cache. */
public abstract boolean hasNextLoading();
/**
* To perform following loading, build and get next payload for the next request. This will put
* into {@link StateRequest#getPayload()}.
*
* @return the packed payload for next loading.
*/
protected abstract Object nextPayloadForContinuousLoading();
public Iterable<T> getCurrentCache() {
return cache == null ? Collections.emptyList() : cache;
}
protected StateRequestType getRequestType() {
return requestType;
}
private InternalAsyncFuture<StateIterator<T>> asyncNextLoad() {
return stateHandler.handleRequest(
originalState,
StateRequestType.ITERATOR_LOADING,
nextPayloadForContinuousLoading());
}
private StateIterator<T> syncNextLoad() {
return stateHandler.handleRequestSync(
originalState,
StateRequestType.ITERATOR_LOADING,
nextPayloadForContinuousLoading());
}
@Override
public <U> StateFuture<Collection<U>> onNext(
FunctionWithException<T, StateFuture<? extends U>, Exception> iterating) {
// Public
|
AbstractStateIterator
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/PropertyAnnotationsTest.java
|
{
"start": 2505,
"end": 2608
}
|
interface ____ {
int foo() default 123;
String bar() default "bar";
}
public @
|
OtherAnnotation
|
java
|
netty__netty
|
codec-socks/src/main/java/io/netty/handler/codec/socksx/SocksMessage.java
|
{
"start": 805,
"end": 960
}
|
interface ____ extends DecoderResultProvider {
/**
* Returns the protocol version of this message.
*/
SocksVersion version();
}
|
SocksMessage
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/webjar/WebJarBuildItem.java
|
{
"start": 197,
"end": 1715
}
|
class ____ extends MultiBuildItem {
/**
* ArtifactKey pointing to the web jar. Has to be one of the applications dependencies.
*/
private final GACT artifactKey;
/**
* Root inside the webJar starting from which resources are unpacked.
*/
private final String root;
/**
* Only copy resources of the webjar which are either user overridden, or contain variables.
*/
private final boolean onlyCopyNonArtifactFiles;
/**
* Defines whether Quarkus can override resources of the webjar with Quarkus internal files.
*/
private final boolean useDefaultQuarkusBranding;
private final WebJarResourcesFilter filter;
private WebJarBuildItem(Builder builder) {
this.artifactKey = builder.artifactKey;
this.root = builder.root;
this.useDefaultQuarkusBranding = builder.useDefaultQuarkusBranding;
this.onlyCopyNonArtifactFiles = builder.onlyCopyNonArtifactFiles;
this.filter = builder.filter;
}
public GACT getArtifactKey() {
return artifactKey;
}
public String getRoot() {
return root;
}
public boolean getUseDefaultQuarkusBranding() {
return useDefaultQuarkusBranding;
}
public boolean getOnlyCopyNonArtifactFiles() {
return onlyCopyNonArtifactFiles;
}
public WebJarResourcesFilter getFilter() {
return filter;
}
public static Builder builder() {
return new Builder();
}
public static
|
WebJarBuildItem
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/injection/assignability/generics/AssignabilityWithGenericsTest.java
|
{
"start": 835,
"end": 3575
}
|
class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(Car.class, Engine.class, PetrolEngine.class,
Vehicle.class,
StringListConsumer.class, ListConsumer.class, ProducerBean.class, DefinitelyNotBar.class,
Bar.class, GenericInterface.class, AlmostCompleteBean.class, ActualBean.class,
BetaFace.class, GammaFace.class, GammaImpl.class, AbstractAlpha.class, AlphaImpl.class,
BeanInjectingActualType.class, FooTyped.class);
@Test
public void testSelectingInstanceOfCar() {
InstanceHandle<Car> instance = Arc.container().instance(Car.class);
assertTrue(instance.isAvailable());
assertNotNull(instance.get().getEngine());
}
@Test
public void testParameterizedTypeWithTypeVariable() {
InstanceHandle<StringListConsumer> instance = Arc.container().instance(StringListConsumer.class);
assertTrue(instance.isAvailable());
StringListConsumer obj = instance.get();
assertNotNull(obj.getList());
assertEquals(2, obj.getList().size());
assertEquals("qux", obj.getList().get(0));
assertEquals("quux", obj.getList().get(1));
assertNotNull(obj.getArray());
assertEquals(2, obj.getArray().length);
assertEquals("bar", obj.getArray()[0]);
assertEquals("baz", obj.getArray()[1]);
}
@Test
public void testHierarchyWithInterfacesAndMap() {
InstanceHandle<ActualBean> instance = Arc.container().instance(ActualBean.class);
assertTrue(instance.isAvailable());
assertNotNull(instance.get().getInjectedMap());
}
@Test
public void testProxiedBeanWithGenericMethodParams() {
InstanceHandle<AlphaImpl> alphaInstance = Arc.container().instance(AlphaImpl.class);
InstanceHandle<GammaImpl> gammaInstance = Arc.container().instance(GammaImpl.class);
assertTrue(alphaInstance.isAvailable());
assertTrue(gammaInstance.isAvailable());
AlphaImpl alpha = alphaInstance.get();
assertEquals(GammaImpl.class.getSimpleName(), alpha.ping(alpha.getParam()));
}
@SuppressWarnings("serial")
@Test
public void testRequiredTypeIsActualTypeAndBeanHasObject() {
InstanceHandle<FooTyped<Object>> fooTypedInstance = Arc.container().instance(new TypeLiteral<FooTyped<Object>>() {
});
assertTrue(fooTypedInstance.isAvailable());
InstanceHandle<BeanInjectingActualType> beanInjectingActualTypeInstance = Arc.container()
.instance(BeanInjectingActualType.class);
assertTrue(beanInjectingActualTypeInstance.isAvailable());
}
@ApplicationScoped
static
|
AssignabilityWithGenericsTest
|
java
|
apache__camel
|
components/camel-google/camel-google-bigquery/src/test/java/org/apache/camel/component/google/bigquery/unit/sql/GoogleBigQuerySQLComponentTest.java
|
{
"start": 1187,
"end": 2265
}
|
class ____ extends CamelTestSupport {
@Test
public void testQuerySet() throws Exception {
String uri = "google-bigquery-sql:myproject:insert into testDatasetId.testTableId(id) values(1)";
GoogleBigQuerySQLEndpoint endpoint
= (GoogleBigQuerySQLEndpoint) new GoogleBigQuerySQLComponent(context).createEndpoint(uri);
assertEquals("myproject", endpoint.getConfiguration().getProjectId());
assertEquals("insert into testDatasetId.testTableId(id) values(1)", endpoint.getConfiguration().getQueryString());
}
@Test
public void testQueryFromResourceSet() throws Exception {
String uri = "google-bigquery-sql:myproject:classpath:sql/delete.sql";
GoogleBigQuerySQLEndpoint endpoint
= (GoogleBigQuerySQLEndpoint) new GoogleBigQuerySQLComponent(context).createEndpoint(uri);
assertEquals("myproject", endpoint.getConfiguration().getProjectId());
assertEquals("classpath:sql/delete.sql", endpoint.getConfiguration().getQueryString());
}
}
|
GoogleBigQuerySQLComponentTest
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/pattern/ArrayPatternConverter.java
|
{
"start": 967,
"end": 1255
}
|
interface ____ extends PatternConverter {
/**
* Formats an array of Objects.
* @param toAppendTo The StringBuilder to add the content to.
* @param objects The Object array.
*/
void format(final StringBuilder toAppendTo, Object... objects);
}
|
ArrayPatternConverter
|
java
|
quarkusio__quarkus
|
integration-tests/spring-data-jpa/src/test/java/io/quarkus/it/spring/data/jpa/CountryResourceTest.java
|
{
"start": 845,
"end": 3950
}
|
class ____ {
private static final Set<String> NOT_ADDED_OR_REMOVED = new HashSet<>(
Arrays.asList("Greece", "France", "Czechia"));
@Test
@Order(1)
void testAll() {
List<Country> countries = when().get("/country/all").then()
.statusCode(200)
.extract().body().jsonPath().getList(".", Country.class);
// make sure /all contains the elements we know that are never removed
assertThat(countries).extracting("name").filteredOn(NOT_ADDED_OR_REMOVED::contains)
.containsExactlyInAnyOrder(new ArrayList<>(NOT_ADDED_OR_REMOVED).toArray(new String[0]));
}
@Test
@Order(2)
void testPage() {
when().get("/country/page/1/0").then()
.statusCode(200)
.body(is("false - true / 1"));
when().get("/country/page/1/1").then()
.statusCode(200)
.body(is("true - true / 1"));
when().get("/country/page/10/0").then()
.statusCode(200)
.body(startsWith("false - false / "));
when().get("/country/page/10/1").then()
.statusCode(200)
.body(is("true - false / 0"));
}
@Test
@Order(3)
void testPageSorted() {
String response = when().get("/country/page-sorted/2/0").then()
.statusCode(200)
.extract().response().asString();
assertThat(Arrays.stream(response.split(",")).map(Long::parseLong).collect(Collectors.toList()))
.isSortedAccordingTo(Comparator.reverseOrder());
}
@Test
@Order(4)
void testGetOne() {
when().get("/country/getOne/1").then()
.statusCode(200)
.body(containsString("Greece"));
when().get("/country/getOne/100").then()
.statusCode(400);
}
@Test
@Order(5)
void testNewAndEditIso() {
when().get("/country/all").then()
.statusCode(200)
.body(not(containsString("Germany")));
when().get("/country/new/Germany/GER").then()
.statusCode(200)
.body(containsString("Germany"));
when().get("/country/all").then()
.statusCode(200)
.body(containsString("Germany"))
.body(containsString("GER"));
when().get("/country/editIso3/4/DEU").then()
.statusCode(200)
.body(containsString("Germany"));
when().get("/country/all").then()
.statusCode(200)
.body(containsString("Germany"))
.body(containsString("DEU"))
.body(not(containsString("GER")));
when().get("/country/editIso3/100/ZZZ").then()
.statusCode(500);
}
@Test
@Order(6)
void testDeleteAllInBatch() {
when().delete("/country").then()
.statusCode(204);
when().get("/country/all").then()
.statusCode(200)
.body("size()", equalTo(0));
}
}
|
CountryResourceTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreationClient.java
|
{
"start": 1590,
"end": 1636
}
|
class ____ client lease recovery.
*/
public
|
tests
|
java
|
quarkusio__quarkus
|
integration-tests/grpc-vertx/src/test/java/io/quarkus/grpc/examples/hello/HelloWorldN2OServiceIT.java
|
{
"start": 331,
"end": 647
}
|
class ____ extends HelloWorldNewServiceTestBase {
@Override
protected int port() {
return 9001;
}
@Override
protected Vertx vertx() {
return Vertx.vertx();
}
@Override
protected void close(Vertx vertx) {
GRPCTestUtils.close(vertx);
}
}
|
HelloWorldN2OServiceIT
|
java
|
apache__avro
|
lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/DoubleTest.java
|
{
"start": 1314,
"end": 2187
}
|
class ____ {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
for (int i = 0; i < state.getBatchSize(); i += 4) {
e.writeDouble(state.testData[i + 0]);
e.writeDouble(state.testData[i + 1]);
e.writeDouble(state.testData[i + 2]);
e.writeDouble(state.testData[i + 3]);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public double decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
double total = 0;
for (int i = 0; i < state.getBatchSize(); i += 4) {
total += d.readDouble();
total += d.readDouble();
total += d.readDouble();
total += d.readDouble();
}
return total;
}
@State(Scope.Thread)
public static
|
DoubleTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicySecurityClient.java
|
{
"start": 1103,
"end": 1889
}
|
class ____ extends AbstractClient {
private final Client client;
private final Map<String, String> headers;
private final String origin;
public LifecyclePolicySecurityClient(Client client, String origin, Map<String, String> headers) {
super(client.settings(), client.threadPool(), client.projectResolver());
this.client = client;
this.origin = origin;
this.headers = headers;
}
@Override
protected <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
ActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
ClientHelper.executeWithHeadersAsync(headers, origin, client, action, request, listener);
}
}
|
LifecyclePolicySecurityClient
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/converter/custom/MyBean.java
|
{
"start": 908,
"end": 1883
}
|
class ____ {
private String a;
private String b;
public MyBean() {
}
public MyBean(String a, String b) {
this.a = a;
this.b = b;
}
@Override
public boolean equals(Object o) {
if (o instanceof MyBean that) {
return ObjectHelper.equal(this.a, that.a) && ObjectHelper.equal(this.b, that.b);
}
return false;
}
@Override
public int hashCode() {
int answer = 1;
if (a != null) {
answer += a.hashCode() * 37;
}
if (b != null) {
answer += b.hashCode() * 37;
}
return answer;
}
@Override
public String toString() {
return "MyBean[a=" + a + " b=" + b + "]";
}
public String getA() {
return a;
}
public void setA(String a) {
this.a = a;
}
public String getB() {
return b;
}
public void setB(String b) {
this.b = b;
}
}
|
MyBean
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/core/Completable.java
|
{
"start": 2772,
"end": 2994
}
|
class ____ <em>cold</em>
* and there is a standard <em>hot</em> implementation in the form of a subject:
* {@link io.reactivex.rxjava3.subjects.CompletableSubject CompletableSubject}.
* <p>
* The documentation for this
|
are
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/PrePersistJpaAnnotation.java
|
{
"start": 463,
"end": 1091
}
|
class ____ implements PrePersist {
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public PrePersistJpaAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public PrePersistJpaAnnotation(PrePersist annotation, ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from Jandex variant
*/
public PrePersistJpaAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
}
@Override
public Class<? extends Annotation> annotationType() {
return PrePersist.class;
}
}
|
PrePersistJpaAnnotation
|
java
|
resilience4j__resilience4j
|
resilience4j-circuitbreaker/src/test/java/io/github/resilience4j/circuitbreaker/internal/CircuitBreakerAutoTransitionStateMachineTest.java
|
{
"start": 1284,
"end": 4881
}
|
class ____ {
private CircuitBreaker circuitBreaker;
private ScheduledExecutorService schedulerMock;
@Before
public void setUp() {
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.custom()
.failureRateThreshold(50)
.slidingWindow(
5,
5,
CircuitBreakerConfig.SlidingWindowType.COUNT_BASED,
CircuitBreakerConfig.SlidingWindowSynchronizationStrategy.SYNCHRONIZED
)
.permittedNumberOfCallsInHalfOpenState(3)
.automaticTransitionFromOpenToHalfOpenEnabled(true)
.waitDurationInOpenState(Duration.ofSeconds(2))
.recordException(error -> !(error instanceof NumberFormatException))
.build();
SchedulerFactory schedulerFactoryMock = mock(SchedulerFactory.class);
schedulerMock = mock(ScheduledExecutorService.class);
when(schedulerFactoryMock.getScheduler()).thenReturn(schedulerMock);
circuitBreaker = new CircuitBreakerStateMachine("testName", circuitBreakerConfig,
schedulerFactoryMock);
}
@Test
public void testAutoTransition() {
// Initially the CircuitBreaker is open
circuitBreaker.transitionToOpenState();
ArgumentCaptor<Runnable> runnableArgumentCaptor = ArgumentCaptor.forClass(Runnable.class);
ArgumentCaptor<Long> delayArgumentCaptor = ArgumentCaptor.forClass(Long.class);
ArgumentCaptor<TimeUnit> unitArgumentCaptor = ArgumentCaptor.forClass(TimeUnit.class);
// Check that schedule is invoked
verify(schedulerMock)
.schedule(runnableArgumentCaptor.capture(), delayArgumentCaptor.capture(),
unitArgumentCaptor.capture());
assertThat(delayArgumentCaptor.getValue()).isEqualTo(2000L);
assertThat(unitArgumentCaptor.getValue()).isEqualTo(TimeUnit.MILLISECONDS);
// Check that the runnable transitions to half_open
runnableArgumentCaptor.getValue().run();
assertThat(circuitBreaker.getState()).isEqualTo(HALF_OPEN);
}
@Test
public void shouldCancelAutoTransition() {
ScheduledFuture<?> mockFuture = mock(ScheduledFuture.class);
doReturn(mockFuture)
.when(schedulerMock).schedule(any(Runnable.class), any(Long.class), any(TimeUnit.class));
// Auto transition scheduled
circuitBreaker.transitionToOpenState();
then(schedulerMock).should(times(1)).schedule(any(Runnable.class), any(Long.class), any(TimeUnit.class));
// Auto transition should be canceled
circuitBreaker.transitionToForcedOpenState();
// Verify scheduled future is canceled
then(mockFuture).should(times(1)).cancel(false);
}
@Test
public void notCancelAutoTransitionFutureIfAlreadyDone() {
ScheduledFuture<?> mockFuture = mock(ScheduledFuture.class);
doReturn(mockFuture)
.when(schedulerMock).schedule(any(Runnable.class), any(Long.class), any(TimeUnit.class));
// Already done
when(mockFuture.isDone()).thenReturn(true);
// Auto transition scheduled
circuitBreaker.transitionToOpenState();
then(schedulerMock).should(times(1)).schedule(any(Runnable.class), any(Long.class), any(TimeUnit.class));
// Auto transition should be canceled
circuitBreaker.transitionToForcedOpenState();
// Not called again because future is already done.
then(mockFuture).should(times(0)).cancel(true);
}
}
|
CircuitBreakerAutoTransitionStateMachineTest
|
java
|
junit-team__junit5
|
junit-vintage-engine/src/testFixtures/java/org/junit/vintage/engine/samples/junit4/PlainJUnit4TestCaseWithFiveTestMethods.java
|
{
"start": 1048,
"end": 1642
}
|
class ____ {
@Test
public void abortedTest() {
assumeFalse("this test should be aborted", true);
}
@Test
@Category(Failing.class)
public void failingTest() {
fail("this test should fail");
}
@Test
@Ignore
@Category(Skipped.class)
public void ignoredTest1_withoutReason() {
fail("this should never be called");
}
@Test
@Ignore("a custom reason")
@Category(SkippedWithReason.class)
public void ignoredTest2_withReason() {
fail("this should never be called");
}
@Test
public void successfulTest() {
assertEquals(3, 1 + 2);
}
}
|
PlainJUnit4TestCaseWithFiveTestMethods
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/commons/util/ToStringBuilderTests.java
|
{
"start": 4407,
"end": 4760
}
|
class ____ {
String name;
int age;
RoleModel() {
}
RoleModel(String name, int age) {
this.name = name;
this.age = age;
}
@Override
public String toString() {
// @formatter:off
return new ToStringBuilder(this)
.append("name", this.name)
.append("age", this.age)
.toString();
// @formatter:on
}
}
}
|
RoleModel
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BooleanNode.java
|
{
"start": 616,
"end": 1166
}
|
class ____ extends BinaryNode {
/* ---- begin visitor ---- */
@Override
public <Scope> void visit(IRTreeVisitor<Scope> irTreeVisitor, Scope scope) {
irTreeVisitor.visitBoolean(this, scope);
}
@Override
public <Scope> void visitChildren(IRTreeVisitor<Scope> irTreeVisitor, Scope scope) {
getLeftNode().visit(irTreeVisitor, scope);
getRightNode().visit(irTreeVisitor, scope);
}
/* ---- end visitor ---- */
public BooleanNode(Location location) {
super(location);
}
}
|
BooleanNode
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-performance-tests/src/test/java/org/assertj/tests/core/perf/SoftAssertionsPerfTest.java
|
{
"start": 2308,
"end": 10790
}
|
class ____ {
private SoftAssertions softly;
private CartoonCharacter homer;
private CartoonCharacter fred;
private CartoonCharacter lisa;
private CartoonCharacter maggie;
private CartoonCharacter bart;
private Map<String, Object> iterableMap;
private static long start;
@BeforeAll
static void beforeAll() {
setRemoveAssertJRelatedElementsFromStackTrace(false);
start = System.currentTimeMillis();
}
@AfterAll
static void afterAll() {
long duration = System.currentTimeMillis() - start;
System.out.println("SoftAssertionsTest execution time (ms): " + duration);
}
@BeforeEach
void setup() {
softly = new SoftAssertions();
bart = new CartoonCharacter("Bart Simpson");
lisa = new CartoonCharacter("Lisa Simpson");
maggie = new CartoonCharacter("Maggie Simpson");
homer = new CartoonCharacter("Homer Simpson");
homer.getChildren().add(bart);
homer.getChildren().add(lisa);
homer.getChildren().add(maggie);
CartoonCharacter pebbles = new CartoonCharacter("Pebbles Flintstone");
fred = new CartoonCharacter("Fred Flintstone");
fred.getChildren().add(pebbles);
List<String> names = asList("Dave", "Jeff");
LinkedHashSet<String> jobs = newLinkedHashSet("Plumber", "Builder");
Iterable<String> cities = asList("Dover", "Boston", "Paris");
int[] ranks = { 1, 2, 3 };
iterableMap = new LinkedHashMap<>();
iterableMap.put("name", names);
iterableMap.put("job", jobs);
iterableMap.put("city", cities);
iterableMap.put("rank", ranks);
}
@Test
void all_assertions_should_pass() {
softly.assertThat(1).isEqualTo(1);
softly.assertThat(Lists.newArrayList(1, 2)).containsOnly(1, 2);
softly.assertAll();
}
@Test
void all_assertions_should_pass2() {
softly.assertThat(1).isEqualTo(1);
softly.assertThat(Lists.newArrayList(1, 2)).containsOnly(1, 2);
softly.assertAll();
}
@Test
void all_assertions_should_pass3() {
softly.assertThat(1).isEqualTo(1);
softly.assertThat(Lists.newArrayList(1, 2)).containsOnly(1, 2);
softly.assertAll();
}
@Test
void all_assertions_should_pass4() {
softly.assertThat(1).isEqualTo(1);
softly.assertThat(Lists.newArrayList(1, 2)).containsOnly(1, 2);
softly.assertAll();
}
@Test
void should_return_success_of_last_assertion() {
softly.assertThat(true).isFalse();
softly.assertThat(true).isEqualTo(true);
assertThat(softly.wasSuccess()).isTrue();
}
@Test
void should_return_success_of_last_assertion_with_nested_calls() {
softly.assertThat(true).isFalse();
softly.assertThat(true).isTrue(); // isTrue() calls isEqualTo(true)
assertThat(softly.wasSuccess()).isTrue();
}
@Test
void should_return_failure_of_last_assertion() {
softly.assertThat(true).isTrue();
softly.assertThat(true).isEqualTo(false);
assertThat(softly.wasSuccess()).isFalse();
}
@Test
void should_return_failure_of_last_assertion_with_nested_calls() {
softly.assertThat(true).isTrue();
softly.assertThat(true).isFalse(); // isFalse() calls isEqualTo(false)
assertThat(softly.wasSuccess()).isFalse();
}
@Test
void should_be_able_to_catch_exceptions_thrown_by_map_assertions() {
// GIVEN
Map<String, String> map = mapOf(entry("54", "55"));
// WHEN
softly.assertThat(map).contains(entry("1", "2")).isEmpty();
// THEN
List<Throwable> errors = softly.errorsCollected();
assertThat(errors).hasSize(2);
}
@Test
void should_be_able_to_catch_exceptions_thrown_by_all_proxied_methods() {
// perform a bunch of soft assertions
softly.assertThat(BigDecimal.ZERO).isEqualTo(BigDecimal.ONE);
softly.assertThat(Boolean.FALSE).isTrue();
softly.assertThat(false).isTrue();
softly.assertThat(new boolean[] { false }).isEqualTo(new boolean[] { true });
softly.assertThat(Byte.valueOf((byte) 0)).isEqualTo((byte) 1);
softly.assertThat((byte) 2).inHexadecimal().isEqualTo((byte) 3);
softly.assertThat(new byte[] { 4 }).isEqualTo(new byte[] { 5 });
softly.assertThat(Character.valueOf((char) 65)).isEqualTo(Character.valueOf((char) 66));
softly.assertThat((char) 67).isEqualTo((char) 68);
softly.assertThat(new char[] { 69 }).isEqualTo(new char[] { 70 });
softly.assertThat(new StringBuilder("a")).isEqualTo(new StringBuilder("b"));
softly.assertThat(Object.class).isEqualTo(String.class);
softly.assertThat(parseDatetime("1999-12-31T23:59:59")).isEqualTo(parseDatetime("2000-01-01T00:00:01"));
softly.assertThat(Double.valueOf(6.0)).isEqualTo(Double.valueOf(7.0));
softly.assertThat(8.0d).isEqualTo(9.0d);
softly.assertThat(new double[] { 10.0d }).isEqualTo(new double[] { 11.0d });
softly.assertThat(new File("a"))
.overridingErrorMessage("%nexpected: File(a)%n but was: File(b)".formatted())
.isEqualTo(new File("b"));
softly.assertThat(Float.valueOf(12)).isEqualTo(Float.valueOf(13));
softly.assertThat(14f).isEqualTo(15f);
softly.assertThat(new float[] { 16f }).isEqualTo(new float[] { 17f });
softly.assertThat(new ByteArrayInputStream(new byte[] { (byte) 65 }))
.hasSameContentAs(new ByteArrayInputStream(new byte[] { (byte) 66 }));
softly.assertThat(Integer.valueOf(20)).isEqualTo(Integer.valueOf(21));
softly.assertThat(22).isEqualTo(23);
softly.assertThat(new int[] { 24 }).isEqualTo(new int[] { 25 });
softly.assertThat((Iterable<String>) Lists.newArrayList("26")).isEqualTo(Lists.newArrayList("27"));
softly.assertThat(Lists.newArrayList("28").iterator()).hasNext();
softly.assertThat(Lists.newArrayList("30")).isEqualTo(Lists.newArrayList("31"));
softly.assertThat(Long.valueOf(32)).isEqualTo(Long.valueOf(33));
softly.assertThat(34L).isEqualTo(35L);
softly.assertThat(new long[] { 36L }).isEqualTo(new long[] { 37L });
softly.assertThat(mapOf(entry("38", "39"))).isEqualTo(mapOf(entry("40", "41")));
softly.assertThat(Short.valueOf((short) 42)).isEqualTo(Short.valueOf((short) 43));
softly.assertThat((short) 44).isEqualTo((short) 45);
softly.assertThat(new short[] { (short) 46 }).isEqualTo(new short[] { (short) 47 });
softly.assertThat("48").isEqualTo("49");
softly.assertThat(new Object() {
@Override
public String toString() {
return "50";
}
}).isEqualTo(new Object() {
@Override
public String toString() {
return "51";
}
});
softly.assertThat(new Object[] { new Object() {
@Override
public String toString() {
return "52";
}
} }).isEqualTo(new Object[] { new Object() {
@Override
public String toString() {
return "53";
}
} });
final IllegalArgumentException illegalArgumentException = new IllegalArgumentException("IllegalArgumentException message");
softly.assertThat(illegalArgumentException).hasMessage("NullPointerException message");
softly.assertThatThrownBy(() -> {
throw new Exception("something was wrong");
}).hasMessage("something was good");
softly.assertThat(mapOf(entry("54", "55"))).contains(entry("1", "2"));
softly.assertThat(LocalTime.of(12, 0)).isEqualTo(LocalTime.of(13, 0));
softly.assertThat(OffsetTime.of(12, 0, 0, 0, ZoneOffset.UTC))
.isEqualTo(OffsetTime.of(13, 0, 0, 0, ZoneOffset.UTC));
softly.assertThat(Optional.of("not empty")).isEqualTo("empty");
softly.assertThat(OptionalInt.of(0)).isEqualTo(1);
softly.assertThat(OptionalDouble.of(0.0)).isEqualTo(1.0);
softly.assertThat(OptionalLong.of(0L)).isEqualTo(1L);
softly.assertThat(URI.create("http://assertj.org")).hasPort(8888);
softly.assertThat(CompletableFuture.completedFuture("done")).isCompletedExceptionally();
softly.assertThat((Predicate<String>) s -> s.equals("something")).accepts("something else");
softly.assertThat((IntPredicate) s -> s == 1).accepts(2);
softly.assertThat((LongPredicate) s -> s == 1).accepts(2);
softly.assertThat((DoublePredicate) s -> s == 1).accepts(2);
// assert everything, but catch the error since it is a perf test
catchThrowable(() -> softly.assertAll());
}
@SafeVarargs
private static <K, V> LinkedHashMap<K, V> mapOf(MapEntry<K, V>... entries) {
LinkedHashMap<K, V> map = new LinkedHashMap<>();
for (Map.Entry<K, V> entry : entries) {
map.put(entry.getKey(), entry.getValue());
}
return map;
}
private static
|
SoftAssertionsPerfTest
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/conditional/qualifier/ConditionalMethodWithSourceToTargetMapper.java
|
{
"start": 2817,
"end": 3223
}
|
class ____ {
private String line1;
private String line2;
public String getLine1() {
return line1;
}
public void setLine1(String line1) {
this.line1 = line1;
}
public String getLine2() {
return line2;
}
public void setLine2(String line2) {
this.line2 = line2;
}
}
}
|
Address
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cache/polymorphism/PolymorphicCacheAndBatchingTest.java
|
{
"start": 1899,
"end": 2337
}
|
class ____ id with no cache-hit
cache.evictEntityData();
scope.inTransaction( (session) -> {
final List<CachedItem2> resultList = session.byMultipleIds( CachedItem2.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.multiLoad( 1, 2 );
assertThat( resultList ).hasSize( 2 );
assertThat( resultList.get( 0 ) ).isNull();
assertThat( resultList.get( 1 ).getName() ).isEqualTo( "name 2" );
} );
}
}
|
by
|
java
|
elastic__elasticsearch
|
x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/patterntext/PatternTextDocValuesTests.java
|
{
"start": 10292,
"end": 11264
}
|
class ____ implements LeafStoredFieldLoader {
private final List<BytesRef> values;
private final String fieldName;
private int doc = -1;
SimpleStoredFieldLoader(List<BytesRef> values, String fieldName) {
this.values = values;
this.fieldName = fieldName;
}
@Override
public void advanceTo(int doc) throws IOException {
this.doc = doc;
}
@Override
public BytesReference source() {
throw new UnsupportedOperationException();
}
@Override
public String id() {
throw new UnsupportedOperationException();
}
@Override
public String routing() {
throw new UnsupportedOperationException();
}
@Override
public Map<String, List<Object>> storedFields() {
return Map.of(fieldName, List.of(values.get(doc)));
}
}
}
|
SimpleStoredFieldLoader
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/MySqlIgnoreIndexHint.java
|
{
"start": 742,
"end": 1146
}
|
class ____ extends MySqlIndexHintImpl {
@Override
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, getIndexList());
}
visitor.endVisit(this);
}
public MySqlIgnoreIndexHint clone() {
MySqlIgnoreIndexHint x = new MySqlIgnoreIndexHint();
cloneTo(x);
return x;
}
}
|
MySqlIgnoreIndexHint
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/web/servlet/ServletRegistrationBeanTests.java
|
{
"start": 1700,
"end": 8013
}
|
class ____ {
private final MockServlet servlet = new MockServlet();
@Mock
@SuppressWarnings("NullAway.Init")
private ServletContext servletContext;
@Mock
@SuppressWarnings("NullAway.Init")
private ServletRegistration.Dynamic registration;
@Test
void startupWithDefaults() throws Exception {
given(this.servletContext.addServlet(anyString(), any(Servlet.class))).willReturn(this.registration);
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>(this.servlet);
bean.onStartup(this.servletContext);
then(this.servletContext).should().addServlet("mockServlet", this.servlet);
then(this.registration).should().setAsyncSupported(true);
then(this.registration).should().addMapping("/*");
}
@Test
void failsWithDoubleRegistration() {
assertThatIllegalStateException().isThrownBy(this::doubleRegistration)
.withMessage("Failed to register 'servlet double-registration' on "
+ "the servlet context. Possibly already registered?");
}
private void doubleRegistration() throws ServletException {
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>(this.servlet);
bean.setName("double-registration");
given(this.servletContext.addServlet(anyString(), any(Servlet.class))).willReturn(null);
bean.onStartup(this.servletContext);
}
@Test
void startupWithSpecifiedValues() throws Exception {
given(this.servletContext.addServlet(anyString(), any(Servlet.class))).willReturn(this.registration);
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>();
bean.setName("test");
bean.setServlet(this.servlet);
bean.setAsyncSupported(false);
bean.setInitParameters(Collections.singletonMap("a", "b"));
bean.addInitParameter("c", "d");
bean.setUrlMappings(new LinkedHashSet<>(Arrays.asList("/a", "/b")));
bean.addUrlMappings("/c");
bean.setLoadOnStartup(10);
bean.onStartup(this.servletContext);
then(this.servletContext).should().addServlet("test", this.servlet);
then(this.registration).should().setAsyncSupported(false);
Map<String, String> expectedInitParameters = new HashMap<>();
expectedInitParameters.put("a", "b");
expectedInitParameters.put("c", "d");
then(this.registration).should().setInitParameters(expectedInitParameters);
then(this.registration).should().addMapping("/a", "/b", "/c");
then(this.registration).should().setLoadOnStartup(10);
}
@Test
void specificName() throws Exception {
given(this.servletContext.addServlet(anyString(), any(Servlet.class))).willReturn(this.registration);
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>();
bean.setName("specificName");
bean.setServlet(this.servlet);
bean.onStartup(this.servletContext);
then(this.servletContext).should().addServlet("specificName", this.servlet);
}
@Test
void deducedName() throws Exception {
given(this.servletContext.addServlet(anyString(), any(Servlet.class))).willReturn(this.registration);
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>();
bean.setServlet(this.servlet);
bean.onStartup(this.servletContext);
then(this.servletContext).should().addServlet("mockServlet", this.servlet);
}
@Test
void disable() throws Exception {
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>();
bean.setServlet(this.servlet);
bean.setEnabled(false);
bean.onStartup(this.servletContext);
then(this.servletContext).should(never()).addServlet("mockServlet", this.servlet);
}
@Test
void setServletMustNotBeNull() {
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>();
assertThatIllegalStateException().isThrownBy(() -> bean.onStartup(this.servletContext))
.withMessageContaining("Unable to return description for null servlet");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void createServletMustNotBeNull() {
assertThatIllegalArgumentException().isThrownBy(() -> new ServletRegistrationBean<MockServlet>(null))
.withMessageContaining("'servlet' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void setMappingMustNotBeNull() {
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>(this.servlet);
assertThatIllegalArgumentException().isThrownBy(() -> bean.setUrlMappings(null))
.withMessageContaining("'urlMappings' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void createMappingMustNotBeNull() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new ServletRegistrationBean<>(this.servlet, (String[]) null))
.withMessageContaining("'urlMappings' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void addMappingMustNotBeNull() {
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>(this.servlet);
assertThatIllegalArgumentException().isThrownBy(() -> bean.addUrlMappings((String[]) null))
.withMessageContaining("'urlMappings' must not be null");
}
@Test
void setMappingReplacesValue() throws Exception {
given(this.servletContext.addServlet(anyString(), any(Servlet.class))).willReturn(this.registration);
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>(this.servlet, "/a", "/b");
bean.setUrlMappings(new LinkedHashSet<>(Arrays.asList("/c", "/d")));
bean.onStartup(this.servletContext);
then(this.registration).should().addMapping("/c", "/d");
}
@Test
void modifyInitParameters() throws Exception {
given(this.servletContext.addServlet(anyString(), any(Servlet.class))).willReturn(this.registration);
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>(this.servlet, "/a", "/b");
bean.addInitParameter("a", "b");
bean.getInitParameters().put("a", "c");
bean.onStartup(this.servletContext);
then(this.registration).should().setInitParameters(Collections.singletonMap("a", "c"));
}
@Test
void withoutDefaultMappings() throws Exception {
given(this.servletContext.addServlet(anyString(), any(Servlet.class))).willReturn(this.registration);
ServletRegistrationBean<MockServlet> bean = new ServletRegistrationBean<>(this.servlet, false);
bean.onStartup(this.servletContext);
then(this.registration).should(never()).addMapping(any(String[].class));
}
}
|
ServletRegistrationBeanTests
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/ResolvableTypeTests.java
|
{
"start": 78007,
"end": 78100
}
|
class ____ extends MySuperclassType<Collection<String>> {
}
public
|
MyCollectionSuperclassType
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.