language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
spring-projects__spring-boot
|
module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/autoconfigure/RemoteDevToolsProperties.java
|
{
"start": 2418,
"end": 2952
}
|
class ____ {
/**
* The host of the proxy to use to connect to the remote application.
*/
private @Nullable String host;
/**
* The port of the proxy to use to connect to the remote application.
*/
private @Nullable Integer port;
public @Nullable String getHost() {
return this.host;
}
public void setHost(@Nullable String host) {
this.host = host;
}
public @Nullable Integer getPort() {
return this.port;
}
public void setPort(@Nullable Integer port) {
this.port = port;
}
}
}
|
Proxy
|
java
|
apache__rocketmq
|
remoting/src/main/java/org/apache/rocketmq/remoting/protocol/header/GetConsumeStatsInBrokerHeader.java
|
{
"start": 1390,
"end": 1738
}
|
class ____ implements CommandCustomHeader {
@CFNotNull
private boolean isOrder;
@Override
public void checkFields() throws RemotingCommandException {
}
public boolean isOrder() {
return isOrder;
}
public void setIsOrder(boolean isOrder) {
this.isOrder = isOrder;
}
}
|
GetConsumeStatsInBrokerHeader
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/script/SourceMapFieldScript.java
|
{
"start": 685,
"end": 1181
}
|
class ____ {
protected final Map<String, Object> ctxMap;
public SourceMapFieldScript(Map<String, Object> ctxMap) {
this.ctxMap = ctxMap;
}
/**
* Expose the {@link SourceMapField field} API
*
* @param path the path to the field in the source map
* @return a new {@link SourceMapField} instance for the specified path
*/
public SourceMapField field(String path) {
return new SourceMapField(path, () -> ctxMap);
}
}
|
SourceMapFieldScript
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/RuncContainerExecutorConfig.java
|
{
"start": 33676,
"end": 33863
}
|
class ____ a Java representation of the network section
* of the OCI Runtime Specification.
*/
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static
|
is
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeObserveOn.java
|
{
"start": 1017,
"end": 1441
}
|
class ____<T> extends AbstractMaybeWithUpstream<T, T> {
final Scheduler scheduler;
public MaybeObserveOn(MaybeSource<T> source, Scheduler scheduler) {
super(source);
this.scheduler = scheduler;
}
@Override
protected void subscribeActual(MaybeObserver<? super T> observer) {
source.subscribe(new ObserveOnMaybeObserver<>(observer, scheduler));
}
static final
|
MaybeObserveOn
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/applicationfieldaccess/PublicFieldWithProxyAndLazyLoadingAndInheritanceTest.java
|
{
"start": 2336,
"end": 2528
}
|
class ____ extends Contained {
ContainedExtended() {
}
ContainedExtended(String name) {
this.name = name;
}
}
/**
* A
|
ContainedExtended
|
java
|
apache__camel
|
components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/format/factories/DoublePatternFormatFactory.java
|
{
"start": 1119,
"end": 1741
}
|
class ____ extends AbstractFormatFactory {
{
supportedClasses.add(double.class);
supportedClasses.add(Double.class);
}
@Override
public boolean canBuild(FormattingOptions formattingOptions) {
return super.canBuild(formattingOptions)
&& ObjectHelper.isNotEmpty(formattingOptions.getPattern());
}
@Override
public Format<?> build(FormattingOptions formattingOptions) {
return new DoublePatternFormat(
formattingOptions.getPattern(),
formattingOptions.getLocale());
}
private static
|
DoublePatternFormatFactory
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/http/client/request/DefaultAsyncHttpClientRequest.java
|
{
"start": 1738,
"end": 3939
}
|
class ____ implements AsyncHttpClientRequest {
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultAsyncHttpClientRequest.class);
private final CloseableHttpAsyncClient asyncClient;
private final RequestConfig defaultConfig;
public DefaultAsyncHttpClientRequest(CloseableHttpAsyncClient asyncClient, DefaultConnectingIOReactor ioReactor, RequestConfig defaultConfig) {
this.asyncClient = asyncClient;
this.defaultConfig = defaultConfig;
if (this.asyncClient.getStatus() != IOReactorStatus.ACTIVE) {
this.asyncClient.start();
}
}
@Override
public <T> void execute(URI uri, String httpMethod, RequestHttpEntity requestHttpEntity,
final ResponseHandler<T> responseHandler, final Callback<T> callback) throws Exception {
HttpUriRequestBase httpRequestBase = DefaultHttpClientRequest.build(uri, httpMethod, requestHttpEntity, defaultConfig);
// IllegalStateException has been removed from ver.5.0, should catch it in DefaultConnectingIOReactor callback
FutureCallback<SimpleHttpResponse> futureCallback = new FutureCallback<SimpleHttpResponse>() {
@Override
public void completed(SimpleHttpResponse result) {
// SimpleHttpResponse doesn't need to close
DefaultClientHttpResponse response = new DefaultClientHttpResponse(result);
try {
HttpRestResult<T> httpRestResult = responseHandler.handle(response);
callback.onReceive(httpRestResult);
} catch (Exception e) {
callback.onError(e);
}
}
@Override
public void failed(Exception ex) {
callback.onError(ex);
}
@Override
public void cancelled() {
callback.onCancel();
}
};
asyncClient.execute(SimpleHttpRequest.copy(httpRequestBase), futureCallback);
}
@Override
public void close() throws IOException {
this.asyncClient.close();
}
}
|
DefaultAsyncHttpClientRequest
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/root/ApplicationTest.java
|
{
"start": 8980,
"end": 9635
}
|
class ____ extends Application {
@Override
public Set<Class<?>> getClasses() {
return new HashSet<>(
Arrays.asList(
ResourceInheritedInterfaceTest.class, ResourceInheritedClassTest.class,
ResourceTest1.class, Feature1.class, ExceptionMapper1.class));
}
@Override
public Set<Object> getSingletons() {
return new HashSet<>(
Arrays.asList(
new ResponseFilter1(), new DynamicFeature1()));
}
}
@ApplicationPath("whatever")
public static abstract
|
AppTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/crypto/CryptoService.java
|
{
"start": 1605,
"end": 9601
}
|
class ____ picked when Java 7 was still the min. supported
// version. The use of counter mode was chosen to simplify the need to deal with padding
// and for its speed. 128 bit key length is chosen due to the JCE policy that ships by
// default with the Oracle JDK.
// TODO: with better support in Java 8, we should consider moving to use AES GCM as it
// also provides authentication of the encrypted data, which is something that we are
// missing here.
private static final String DEFAULT_ENCRYPTION_ALGORITHM = "AES/CTR/NoPadding";
private static final String DEFAULT_KEY_ALGORITH = "AES";
private static final int DEFAULT_KEY_LENGTH = 128;
private static final Setting<String> ENCRYPTION_ALGO_SETTING = Setting.simpleString(
SecurityField.setting("encryption.algorithm"),
DEFAULT_ENCRYPTION_ALGORITHM,
Property.NodeScope
);
private static final Setting<Integer> ENCRYPTION_KEY_LENGTH_SETTING = Setting.intSetting(
SecurityField.setting("encryption_key.length"),
DEFAULT_KEY_LENGTH,
Property.NodeScope
);
private static final Setting<String> ENCRYPTION_KEY_ALGO_SETTING = Setting.simpleString(
SecurityField.setting("encryption_key.algorithm"),
DEFAULT_KEY_ALGORITH,
Property.NodeScope
);
private static final Logger logger = LogManager.getLogger(CryptoService.class);
private final SecureRandom secureRandom = new SecureRandom();
private final String encryptionAlgorithm;
private final int ivLength;
/*
* The encryption key is derived from the system key.
*/
private final SecretKey encryptionKey;
public CryptoService(Settings settings) throws IOException {
this.encryptionAlgorithm = ENCRYPTION_ALGO_SETTING.get(settings);
final int keyLength = ENCRYPTION_KEY_LENGTH_SETTING.get(settings);
this.ivLength = keyLength / 8;
String keyAlgorithm = ENCRYPTION_KEY_ALGO_SETTING.get(settings);
if (keyLength % 8 != 0) {
throw new IllegalArgumentException("invalid key length [" + keyLength + "]. value must be a multiple of 8");
}
try (InputStream in = WatcherField.ENCRYPTION_KEY_SETTING.get(settings)) {
if (in == null) {
throw new ElasticsearchException("setting [" + WatcherField.ENCRYPTION_KEY_SETTING.getKey() + "] must be set in keystore");
}
SecretKey systemKey = readSystemKey(in);
try {
encryptionKey = encryptionKey(systemKey, keyLength, keyAlgorithm);
} catch (NoSuchAlgorithmException nsae) {
throw new ElasticsearchException("failed to start crypto service. could not load encryption key", nsae);
}
}
assert encryptionKey != null : "the encryption key should never be null";
}
private static SecretKey readSystemKey(InputStream in) throws IOException {
final int keySizeBytes = KEY_SIZE / 8;
final byte[] keyBytes = new byte[keySizeBytes];
final int read = Streams.readFully(in, keyBytes);
if (read != keySizeBytes) {
throw new IllegalArgumentException(
"key size did not match expected value; was the key generated with elasticsearch-syskeygen?"
);
}
return new SecretKeySpec(keyBytes, KEY_ALGO);
}
/**
* Encrypts the provided char array and returns the encrypted values in a char array
* @param chars the characters to encrypt
* @return character array representing the encrypted data
*/
public char[] encrypt(char[] chars) {
byte[] charBytes = CharArrays.toUtf8Bytes(chars);
String base64 = Base64.getEncoder().encodeToString(encryptInternal(charBytes, encryptionKey));
return ENCRYPTED_TEXT_PREFIX.concat(base64).toCharArray();
}
/**
* Decrypts the provided char array and returns the plain-text chars
* @param chars the data to decrypt
* @return plaintext chars
*/
public char[] decrypt(char[] chars) {
if (isEncrypted(chars) == false) {
// Not encrypted
return chars;
}
String encrypted = new String(chars, ENCRYPTED_TEXT_PREFIX.length(), chars.length - ENCRYPTED_TEXT_PREFIX.length());
byte[] bytes;
try {
bytes = Base64.getDecoder().decode(encrypted);
} catch (IllegalArgumentException e) {
throw new ElasticsearchException("unable to decode encrypted data", e);
}
byte[] decrypted = decryptInternal(bytes, encryptionKey);
return CharArrays.utf8BytesToChars(decrypted);
}
/**
* Checks whether the given chars are encrypted
* @param chars the chars to check if they are encrypted
* @return true is data is encrypted
*/
protected static boolean isEncrypted(char[] chars) {
return CharArrays.charsBeginsWith(ENCRYPTED_TEXT_PREFIX, chars);
}
private byte[] encryptInternal(byte[] bytes, SecretKey key) {
byte[] iv = new byte[ivLength];
secureRandom.nextBytes(iv);
Cipher cipher = cipher(Cipher.ENCRYPT_MODE, encryptionAlgorithm, key, iv);
try {
byte[] encrypted = cipher.doFinal(bytes);
byte[] output = new byte[iv.length + encrypted.length];
System.arraycopy(iv, 0, output, 0, iv.length);
System.arraycopy(encrypted, 0, output, iv.length, encrypted.length);
return output;
} catch (BadPaddingException | IllegalBlockSizeException e) {
throw new ElasticsearchException("error encrypting data", e);
}
}
private byte[] decryptInternal(byte[] bytes, SecretKey key) {
if (bytes.length < ivLength) {
logger.error("received data for decryption with size [{}] that is less than IV length [{}]", bytes.length, ivLength);
throw new IllegalArgumentException("invalid data to decrypt");
}
byte[] iv = new byte[ivLength];
System.arraycopy(bytes, 0, iv, 0, ivLength);
byte[] data = new byte[bytes.length - ivLength];
System.arraycopy(bytes, ivLength, data, 0, bytes.length - ivLength);
Cipher cipher = cipher(Cipher.DECRYPT_MODE, encryptionAlgorithm, key, iv);
try {
return cipher.doFinal(data);
} catch (BadPaddingException | IllegalBlockSizeException e) {
throw new IllegalStateException("error decrypting data", e);
}
}
private static Cipher cipher(int mode, String encryptionAlgorithm, SecretKey key, byte[] initializationVector) {
try {
Cipher cipher = Cipher.getInstance(encryptionAlgorithm);
cipher.init(mode, key, new IvParameterSpec(initializationVector));
return cipher;
} catch (Exception e) {
throw new ElasticsearchException("error creating cipher", e);
}
}
private static SecretKey encryptionKey(SecretKey systemKey, int keyLength, String algorithm) throws NoSuchAlgorithmException {
byte[] bytes = systemKey.getEncoded();
if ((bytes.length * 8) < keyLength) {
throw new IllegalArgumentException("at least " + keyLength + " bits should be provided as key data");
}
MessageDigest messageDigest = MessageDigest.getInstance("SHA-256");
byte[] digest = messageDigest.digest(bytes);
assert digest.length == (256 / 8);
if ((digest.length * 8) < keyLength) {
throw new IllegalArgumentException("requested key length is too large");
}
byte[] truncatedDigest = Arrays.copyOfRange(digest, 0, (keyLength / 8));
return new SecretKeySpec(truncatedDigest, algorithm);
}
public static void addSettings(List<Setting<?>> settings) {
settings.add(ENCRYPTION_KEY_LENGTH_SETTING);
settings.add(ENCRYPTION_KEY_ALGO_SETTING);
settings.add(ENCRYPTION_ALGO_SETTING);
}
}
|
was
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsRequestTests.java
|
{
"start": 488,
"end": 1287
}
|
class ____ extends AbstractWireSerializingTestCase<FollowStatsAction.StatsRequest> {
@Override
protected Writeable.Reader<FollowStatsAction.StatsRequest> instanceReader() {
return FollowStatsAction.StatsRequest::new;
}
@Override
protected FollowStatsAction.StatsRequest createTestInstance() {
FollowStatsAction.StatsRequest statsRequest = new FollowStatsAction.StatsRequest();
if (randomBoolean()) {
statsRequest.setIndices(generateRandomStringArray(8, 4, false));
}
return statsRequest;
}
@Override
protected FollowStatsAction.StatsRequest mutateInstance(FollowStatsAction.StatsRequest instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
}
|
StatsRequestTests
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CredentialsInfo.java
|
{
"start": 1190,
"end": 1814
}
|
class ____ {
@XmlElementWrapper(name = "tokens")
HashMap<String, String> tokens;
@XmlElementWrapper(name = "secrets")
HashMap<String, String> secrets;
public CredentialsInfo() {
tokens = new HashMap<String, String>();
secrets = new HashMap<String, String>();
}
public HashMap<String, String> getTokens() {
return tokens;
}
public HashMap<String, String> getSecrets() {
return secrets;
}
public void setTokens(HashMap<String, String> tokens) {
this.tokens = tokens;
}
public void setSecrets(HashMap<String, String> secrets) {
this.secrets = secrets;
}
}
|
CredentialsInfo
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryTask.java
|
{
"start": 677,
"end": 1970
}
|
class ____ extends StoredAsyncTask<SqlQueryResponse> {
private final Mode mode;
private final SqlVersion sqlVersion;
private final boolean columnar;
public SqlQueryTask(
long id,
String type,
String action,
String description,
TaskId parentTaskId,
Map<String, String> headers,
Map<String, String> originHeaders,
AsyncExecutionId asyncExecutionId,
TimeValue keepAlive,
Mode mode,
SqlVersion sqlVersion,
boolean columnar
) {
super(id, type, action, description, parentTaskId, headers, originHeaders, asyncExecutionId, keepAlive);
this.mode = mode;
this.sqlVersion = sqlVersion;
this.columnar = columnar;
}
@Override
public SqlQueryResponse getCurrentResult() {
// for Ql searches we never store a search response in the task (neither partial, nor final)
// we kill the task on final response, so if the task is still present, it means the search is still running
// NB: the schema is only returned in the actual first (and currently last) response to the query
return new SqlQueryResponse("", mode, sqlVersion, columnar, null, emptyList(), getExecutionId().getEncoded(), true, true);
}
}
|
SqlQueryTask
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_2901/Target.java
|
{
"start": 222,
"end": 423
}
|
class ____ {
private List<Integer> field;
public List<Integer> getField() {
return field;
}
public void setField(List<Integer> field) {
this.field = field;
}
}
|
Target
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/pubsub/StatefulRedisPubSubConnectionImpl.java
|
{
"start": 1941,
"end": 5529
}
|
class ____<K, V> extends StatefulRedisConnectionImpl<K, V>
implements StatefulRedisPubSubConnection<K, V> {
private final PubSubEndpoint<K, V> endpoint;
/**
* Initialize a new connection.
*
* @param endpoint the {@link PubSubEndpoint}
* @param writer the writer used to write commands
* @param codec Codec used to encode/decode keys and values.
* @param timeout Maximum time to wait for a response.
*/
public StatefulRedisPubSubConnectionImpl(PubSubEndpoint<K, V> endpoint, RedisChannelWriter writer, RedisCodec<K, V> codec,
Duration timeout) {
super(writer, endpoint, codec, timeout, DEFAULT_JSON_PARSER);
this.endpoint = endpoint;
endpoint.setConnectionState(getConnectionState());
}
/**
* Add a new listener.
*
* @param listener Listener.
*/
@Override
public void addListener(RedisPubSubListener<K, V> listener) {
endpoint.addListener(listener);
}
/**
* Remove an existing listener.
*
* @param listener Listener.
*/
@Override
public void removeListener(RedisPubSubListener<K, V> listener) {
endpoint.removeListener(listener);
}
@Override
public RedisPubSubAsyncCommands<K, V> async() {
return (RedisPubSubAsyncCommands<K, V>) async;
}
@Override
protected RedisPubSubAsyncCommandsImpl<K, V> newRedisAsyncCommandsImpl() {
return new RedisPubSubAsyncCommandsImpl<>(this, codec);
}
@Override
public RedisPubSubCommands<K, V> sync() {
return (RedisPubSubCommands<K, V>) sync;
}
@Override
protected RedisPubSubCommands<K, V> newRedisSyncCommandsImpl() {
return syncHandler(async(), RedisPubSubCommands.class);
}
@Override
public RedisPubSubReactiveCommands<K, V> reactive() {
return (RedisPubSubReactiveCommands<K, V>) reactive;
}
@Override
protected RedisPubSubReactiveCommandsImpl<K, V> newRedisReactiveCommandsImpl() {
return new RedisPubSubReactiveCommandsImpl<>(this, codec);
}
/**
* Re-subscribe to all previously subscribed channels and patterns.
*
* @return list of the futures of the {@literal subscribe} and {@literal psubscribe} commands.
*/
protected List<RedisFuture<Void>> resubscribe() {
List<RedisFuture<Void>> result = new ArrayList<>();
if (endpoint.hasChannelSubscriptions()) {
result.add(async().subscribe(toArray(endpoint.getChannels())));
}
if (endpoint.hasShardChannelSubscriptions()) {
result.add(async().ssubscribe(toArray(endpoint.getShardChannels())));
}
if (endpoint.hasPatternSubscriptions()) {
result.add(async().psubscribe(toArray(endpoint.getPatterns())));
}
return result;
}
@SuppressWarnings("unchecked")
private <T> T[] toArray(Collection<T> c) {
Class<T> cls = (Class<T>) c.iterator().next().getClass();
T[] array = (T[]) Array.newInstance(cls, c.size());
return c.toArray(array);
}
@Override
public void activated() {
super.activated();
for (RedisFuture<Void> command : resubscribe()) {
command.exceptionally(throwable -> {
if (throwable instanceof RedisCommandExecutionException) {
InternalLoggerFactory.getInstance(getClass()).warn("Re-subscribe failed: " + command.getError());
}
return null;
});
}
}
}
|
StatefulRedisPubSubConnectionImpl
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/SubResourcesAsBeansTest.java
|
{
"start": 1747,
"end": 2122
}
|
class ____ {
private final MiddleRestResource restSubResource;
public RestResource(MiddleRestResource restSubResource) {
this.restSubResource = restSubResource;
}
@Path("sub-resource/{first}")
public MiddleRestResource hello(String first) {
return restSubResource;
}
}
public static
|
RestResource
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/builder/EqualsBuilderTest.java
|
{
"start": 8679,
"end": 8987
}
|
class ____ extends TestTTSubObject {
@SuppressWarnings("unused")
private final int leafValue;
TestTTLeafObject(final int a, final int t, final int tt, final int leafValue) {
super(a, t, tt);
this.leafValue = leafValue;
}
}
static
|
TestTTLeafObject
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java
|
{
"start": 1238,
"end": 4008
}
|
class ____ extends DocumentParserContext {
private final LuceneDocument document = new LuceneDocument();
private final ContentPath contentPath = new ContentPath();
private final XContentParser parser;
/**
* The shortest and easiest way to create a context, to be used when none of the constructor arguments are needed.
* Use with caution as it can cause {@link NullPointerException}s down the line.
*/
public TestDocumentParserContext() {
this(MappingLookup.EMPTY, null);
}
public TestDocumentParserContext(Settings settings) {
this(MappingLookup.EMPTY, null, null, settings);
}
public TestDocumentParserContext(XContentParser parser) {
this(MappingLookup.EMPTY, null, parser, Settings.EMPTY);
}
/**
* More verbose way to create a context, to be used when one or more constructor arguments are needed as final methods
* that depend on them are called while executing tests.
*/
public TestDocumentParserContext(MappingLookup mappingLookup, SourceToParse source) {
this(mappingLookup, source, null, Settings.EMPTY);
}
private TestDocumentParserContext(MappingLookup mappingLookup, SourceToParse source, XContentParser parser, Settings settings) {
super(
mappingLookup,
new MappingParserContext(
s -> null,
s -> null,
s -> null,
IndexVersion.current(),
() -> TransportVersion.current(),
() -> null,
null,
(type, name) -> Lucene.STANDARD_ANALYZER,
MapperTestCase.createIndexSettings(IndexVersion.current(), settings),
null,
query -> {
throw new UnsupportedOperationException();
},
null
),
source,
mappingLookup.getMapping().getRoot(),
ObjectMapper.Dynamic.getRootDynamic(mappingLookup)
);
this.parser = parser;
}
@Override
public final LuceneDocument doc() {
return document;
}
@Override
public final ContentPath path() {
return contentPath;
}
@Override
public Iterable<LuceneDocument> nonRootDocuments() {
throw new UnsupportedOperationException();
}
@Override
public XContentParser parser() {
return parser;
}
@Override
public LuceneDocument rootDoc() {
throw new UnsupportedOperationException();
}
@Override
protected void addDoc(LuceneDocument doc) {
throw new UnsupportedOperationException();
}
@Override
public BytesRef getTsid() {
return null;
}
}
|
TestDocumentParserContext
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/models/role/RoleParser.java
|
{
"start": 486,
"end": 7124
}
|
class ____ {
protected static final Map<String, RedisInstance.Role> ROLE_MAPPING;
protected static final Map<String, RedisReplicaInstance.State> REPLICA_STATE_MAPPING;
static {
Map<String, RedisInstance.Role> roleMap = new HashMap<>();
roleMap.put("master", RedisInstance.Role.UPSTREAM);
roleMap.put("slave", RedisInstance.Role.REPLICA);
roleMap.put("sentinel", RedisInstance.Role.SENTINEL);
ROLE_MAPPING = Collections.unmodifiableMap(roleMap);
Map<String, RedisReplicaInstance.State> replicas = new HashMap<>();
replicas.put("connect", RedisReplicaInstance.State.CONNECT);
replicas.put("connected", RedisReplicaInstance.State.CONNECTED);
replicas.put("connecting", RedisReplicaInstance.State.CONNECTING);
replicas.put("sync", RedisReplicaInstance.State.SYNC);
replicas.put("handshake", RedisReplicaInstance.State.HANDSHAKE);
replicas.put("none", RedisReplicaInstance.State.NONE);
replicas.put("unknown", RedisReplicaInstance.State.NONE);
REPLICA_STATE_MAPPING = Collections.unmodifiableMap(replicas);
}
/**
* Utility constructor.
*/
private RoleParser() {
}
/**
* Parse the output of the Redis ROLE command and convert to a RedisInstance.
*
* @param roleOutput output of the Redis ROLE command.
* @return RedisInstance
*/
public static RedisInstance parse(List<?> roleOutput) {
LettuceAssert.isTrue(roleOutput != null && !roleOutput.isEmpty(), "Empty role output");
LettuceAssert.isTrue(roleOutput.get(0) instanceof String && ROLE_MAPPING.containsKey(roleOutput.get(0)),
() -> "First role element must be a string (any of " + ROLE_MAPPING.keySet() + ")");
RedisInstance.Role role = ROLE_MAPPING.get(roleOutput.get(0));
switch (role) {
case MASTER:
case UPSTREAM:
return parseUpstream(roleOutput);
case SLAVE:
case REPLICA:
return parseReplica(roleOutput);
case SENTINEL:
return parseSentinel(roleOutput);
}
return null;
}
private static RedisInstance parseUpstream(List<?> roleOutput) {
long replicationOffset = getUpstreamReplicationOffset(roleOutput);
List<ReplicationPartner> replicas = getUpstreamReplicaReplicationPartners(roleOutput);
RedisMasterInstance redisUpstreamInstanceRole = new RedisMasterInstance(replicationOffset,
Collections.unmodifiableList(replicas));
return redisUpstreamInstanceRole;
}
private static RedisInstance parseReplica(List<?> roleOutput) {
Iterator<?> iterator = roleOutput.iterator();
iterator.next(); // skip first element
String ip = getStringFromIterator(iterator, "");
long port = getLongFromIterator(iterator, 0);
String stateString = getStringFromIterator(iterator, null);
long replicationOffset = getLongFromIterator(iterator, 0);
ReplicationPartner master = new ReplicationPartner(HostAndPort.of(ip, Math.toIntExact(port)), replicationOffset);
RedisReplicaInstance.State state = REPLICA_STATE_MAPPING.get(stateString);
if (state == null) {
throw new IllegalStateException("Cannot resolve Replica State for \"" + stateString + "\"");
}
return new RedisReplicaInstance(master, state);
}
private static RedisInstance parseSentinel(List<?> roleOutput) {
Iterator<?> iterator = roleOutput.iterator();
iterator.next(); // skip first element
List<String> monitoredMasters = getMonitoredUpstreams(iterator);
return new RedisSentinelInstance(Collections.unmodifiableList(monitoredMasters));
}
private static List<String> getMonitoredUpstreams(Iterator<?> iterator) {
List<String> monitoredUpstreams = new ArrayList<>();
if (!iterator.hasNext()) {
return monitoredUpstreams;
}
Object upstreams = iterator.next();
if (!(upstreams instanceof Collection)) {
return monitoredUpstreams;
}
for (Object upstream : (Collection) upstreams) {
if (upstream instanceof String) {
monitoredUpstreams.add((String) upstream);
}
}
return monitoredUpstreams;
}
private static List<ReplicationPartner> getUpstreamReplicaReplicationPartners(List<?> roleOutput) {
List<ReplicationPartner> replicas = new ArrayList<>();
if (roleOutput.size() > 2 && roleOutput.get(2) instanceof Collection) {
Collection<?> segments = (Collection<?>) roleOutput.get(2);
for (Object output : segments) {
if (!(output instanceof Collection<?>)) {
continue;
}
ReplicationPartner replicationPartner = getReplicationPartner((Collection<?>) output);
replicas.add(replicationPartner);
}
}
return replicas;
}
private static ReplicationPartner getReplicationPartner(Collection<?> segments) {
Iterator<?> iterator = segments.iterator();
String ip = getStringFromIterator(iterator, "");
long port = getLongFromIterator(iterator, 0);
long replicationOffset = getLongFromIterator(iterator, 0);
return new ReplicationPartner(HostAndPort.of(ip, Math.toIntExact(port)), replicationOffset);
}
private static long getLongFromIterator(Iterator<?> iterator, long defaultValue) {
if (iterator.hasNext()) {
Object object = iterator.next();
if (object instanceof String) {
return Long.parseLong((String) object);
}
if (object instanceof Number) {
return ((Number) object).longValue();
}
}
return defaultValue;
}
private static String getStringFromIterator(Iterator<?> iterator, String defaultValue) {
if (iterator.hasNext()) {
Object object = iterator.next();
if (object instanceof String) {
return (String) object;
}
}
return defaultValue;
}
private static long getUpstreamReplicationOffset(List<?> roleOutput) {
long replicationOffset = 0;
if (roleOutput.size() > 1 && roleOutput.get(1) instanceof Number) {
Number number = (Number) roleOutput.get(1);
replicationOffset = number.longValue();
}
return replicationOffset;
}
}
|
RoleParser
|
java
|
quarkusio__quarkus
|
extensions/vertx/deployment/src/test/java/io/quarkus/vertx/CodecRegistrationTest.java
|
{
"start": 9455,
"end": 9674
}
|
class ____ {
private final String name;
CustomType4(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
static
|
CustomType4
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/DistributedSchedulingAllocateRequest.java
|
{
"start": 1498,
"end": 2567
}
|
class ____ {
/**
* Get the underlying <code>AllocateRequest</code> object.
* @return Allocate request
*/
@Public
@Evolving
public abstract AllocateRequest getAllocateRequest();
/**
* Set the underlying <code>AllocateRequest</code> object.
* @param allocateRequest Allocate request
*/
@Public
@Evolving
public abstract void setAllocateRequest(AllocateRequest allocateRequest);
/**
* Get the list of <em>newly allocated</em> <code>Container</code> by the
* Distributed Scheduling component on the NodeManager.
* @return list of <em>newly allocated</em> <code>Container</code>
*/
@Public
@Evolving
public abstract List<Container> getAllocatedContainers();
/**
* Set the list of <em>newly allocated</em> <code>Container</code> by the
* Distributed Scheduling component on the NodeManager.
* @param containers list of <em>newly allocated</em> <code>Container</code>
*/
@Public
@Evolving
public abstract void setAllocatedContainers(List<Container> containers);
}
|
DistributedSchedulingAllocateRequest
|
java
|
apache__camel
|
components/camel-telemetry-dev/src/main/java/org/apache/camel/telemetrydev/DevTrace.java
|
{
"start": 2091,
"end": 3843
}
|
class ____ implements Iterator<DevSpanAdapter> {
Stack<DevSpanAdapter> actual = new Stack<>();
private HashMap<String, Boolean> scanned;
DevSpanAdapterIterator() {
this.scanned = new HashMap<>();
this.actual = new Stack<>();
}
@Override
public boolean hasNext() {
return scanned.size() < spans.size();
}
@Override
public DevSpanAdapter next() {
DevSpanAdapter next;
if (actual.empty()) {
next = getWithParent(null);
} else {
next = getWithParent(actual.peek().getSpanId());
}
while (next == null && !actual.empty()) {
// it's a leaf, let's find out the upper branch
DevSpanAdapter upperLevel = actual.pop();
next = getWithParent(upperLevel.getParentSpanId());
}
if (next != null) {
actual.push(next);
scanned.put(next.getSpanId(), true);
}
return next;
}
private DevSpanAdapter getWithParent(String parentSpanId) {
for (DevSpanAdapter span : spans) {
if (parentSpanId == null &&
span.getParentSpanId() == null &&
!scanned.containsKey(span.getSpanId())) {
return span;
}
if (span.getParentSpanId() != null &&
span.getParentSpanId().equals(parentSpanId) &&
!scanned.containsKey(span.getSpanId())) {
return span;
}
}
return null;
}
}
}
|
DevSpanAdapterIterator
|
java
|
apache__camel
|
components/camel-cassandraql/src/test/java/org/apache/camel/component/cassandra/integration/CassandraComponentBeanRefIT.java
|
{
"start": 1168,
"end": 1997
}
|
class ____ extends BaseCassandra {
public static final String CQL = "insert into camel_user(login, first_name, last_name) values (?, ?, ?)";
public static final String SESSION_URI = "cql:bean:cassandraSession?cql=" + CQL;
@BindToRegistry("cassandraSession")
private CqlSession session = getSession();
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:inputSession").to(SESSION_URI);
}
};
}
@Test
public void testSession() {
CassandraEndpoint endpoint = getMandatoryEndpoint(SESSION_URI, CassandraEndpoint.class);
assertEquals(KEYSPACE_NAME, endpoint.getKeyspace());
assertEquals(CQL, endpoint.getCql());
}
}
|
CassandraComponentBeanRefIT
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/localdatetime/LocalDateTimeAssert_hasSecond_Test.java
|
{
"start": 1062,
"end": 1974
}
|
class ____ {
@Test
void should_pass_if_actual_is_in_given_second() {
// GIVEN
LocalDateTime actual = LocalDateTime.of(2021, 12, 31, 23, 59, 55);
// WHEN/THEN
then(actual).hasSecond(55);
}
@Test
void should_fail_if_actual_is_not_in_given_second() {
// GIVEN
LocalDateTime actual = LocalDateTime.of(2021, 12, 31, 23, 59, 55);
int expectedSecond = 58;
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).hasSecond(expectedSecond));
// THEN
then(assertionError).hasMessage(shouldHaveDateField(actual, "second", expectedSecond).create());
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
LocalDateTime actual = null;
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).hasSecond(1));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
}
|
LocalDateTimeAssert_hasSecond_Test
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
|
{
"start": 1942,
"end": 6235
}
|
class ____ implements Configurable, Closeable {
public static Logger LOG = LoggerFactory.getLogger(CryptoCodec.class);
/**
* Get crypto codec for specified algorithm/mode/padding.
*
* @param conf
* the configuration
* @param cipherSuite
* algorithm/mode/padding
* @return CryptoCodec the codec object. Null value will be returned if no
* crypto codec classes with cipher suite configured.
*/
public static CryptoCodec getInstance(Configuration conf,
CipherSuite cipherSuite) {
List<Class<? extends CryptoCodec>> klasses = getCodecClasses(
conf, cipherSuite);
if (klasses == null) {
return null;
}
CryptoCodec codec = null;
for (Class<? extends CryptoCodec> klass : klasses) {
try {
CryptoCodec c = ReflectionUtils.newInstance(klass, conf);
if (c.getCipherSuite().getName().equals(cipherSuite.getName())) {
if (codec == null) {
PerformanceAdvisory.LOG.debug("Using crypto codec {}.",
klass.getName());
codec = c;
}
} else {
PerformanceAdvisory.LOG.debug(
"Crypto codec {} doesn't meet the cipher suite {}.",
klass.getName(), cipherSuite.getName());
}
} catch (Exception e) {
PerformanceAdvisory.LOG.debug("Crypto codec {} is not available.",
klass.getName());
}
}
return codec;
}
/**
* Get crypto codec for algorithm/mode/padding in config value
* hadoop.security.crypto.cipher.suite
*
* @param conf
* the configuration
* @return CryptoCodec the codec object Null value will be returned if no
* crypto codec classes with cipher suite configured.
*/
public static CryptoCodec getInstance(Configuration conf) {
String name = conf.get(HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_KEY,
HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_DEFAULT);
return getInstance(conf, CipherSuite.convert(name));
}
private static List<Class<? extends CryptoCodec>> getCodecClasses(
Configuration conf, CipherSuite cipherSuite) {
List<Class<? extends CryptoCodec>> result = Lists.newArrayList();
String configName = HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX +
cipherSuite.getConfigSuffix();
String codecString;
if (configName.equals(CommonConfigurationKeysPublic
.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_KEY)) {
codecString = conf.get(configName, CommonConfigurationKeysPublic
.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_DEFAULT);
} else if (configName.equals(CommonConfigurationKeysPublic
.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_SM4_CTR_NOPADDING_KEY)){
codecString = conf.get(configName, CommonConfigurationKeysPublic
.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_SM4_CTR_NOPADDING_DEFAULT);
} else {
codecString = conf.get(configName);
}
if (codecString == null) {
PerformanceAdvisory.LOG.debug(
"No crypto codec classes with cipher suite configured.");
return null;
}
for (String c : Splitter.on(',').trimResults().omitEmptyStrings().
split(codecString)) {
try {
Class<?> cls = conf.getClassByName(c);
result.add(cls.asSubclass(CryptoCodec.class));
} catch (ClassCastException e) {
PerformanceAdvisory.LOG.debug("Class {} is not a CryptoCodec.", c);
} catch (ClassNotFoundException e) {
PerformanceAdvisory.LOG.debug("Crypto codec {} not found.", c);
}
}
return result;
}
/**
* @return the CipherSuite for this codec.
*/
public abstract CipherSuite getCipherSuite();
/**
* Create a {@link org.apache.hadoop.crypto.Encryptor}.
*
* @return Encryptor the encryptor.
* @throws GeneralSecurityException thrown if create encryptor error.
*/
public abstract Encryptor createEncryptor() throws GeneralSecurityException;
/**
* Create a {@link org.apache.hadoop.crypto.Decryptor}.
*
* @return Decryptor the decryptor
* @throws GeneralSecurityException thrown if create decryptor error.
*/
public abstract Decryptor createDecryptor() throws GeneralSecurityException;
/**
* This
|
CryptoCodec
|
java
|
bumptech__glide
|
library/test/src/test/java/com/bumptech/glide/util/ContentLengthInputStreamTest.java
|
{
"start": 741,
"end": 5538
}
|
class ____ {
@Mock private InputStream wrapped;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
}
@Test
public void testAvailable_withZeroReadsAndValidContentLength_returnsContentLength()
throws IOException {
int value = 123356;
InputStream is = ContentLengthInputStream.obtain(wrapped, String.valueOf(value));
assertThat(is.available()).isEqualTo(value);
}
@Test
public void testAvailable_withNullContentLength_returnsWrappedAvailable() throws IOException {
InputStream is = ContentLengthInputStream.obtain(wrapped, null /*contentLengthHeader*/);
int expected = 1234;
when(wrapped.available()).thenReturn(expected);
assertThat(is.available()).isEqualTo(expected);
}
@Test
public void testAvailable_withInvalidContentLength_returnsWrappedAvailable() throws IOException {
InputStream is = ContentLengthInputStream.obtain(wrapped, "invalid_length");
int expected = 567;
when(wrapped.available()).thenReturn(expected);
assertThat(is.available()).isEqualTo(expected);
}
@Test
public void testAvailable_withRead_returnsContentLengthOffsetByRead() throws IOException {
int contentLength = 999;
InputStream is = ContentLengthInputStream.obtain(wrapped, String.valueOf(contentLength));
when(wrapped.read()).thenReturn(1);
assertThat(is.read()).isEqualTo(1);
assertThat(is.available()).isEqualTo(contentLength - 1);
}
@Test
public void testAvailable_handlesReadValueOfZero() throws IOException {
int contentLength = 999;
InputStream is = ContentLengthInputStream.obtain(wrapped, String.valueOf(contentLength));
when(wrapped.read()).thenReturn(0);
assertThat(is.read()).isEqualTo(0);
assertThat(is.available()).isEqualTo(contentLength - 1);
}
@Test
public void testAvailable_withReadBytes_returnsContentLengthOffsetByNumberOfBytes()
throws IOException {
int contentLength = 678;
InputStream is = ContentLengthInputStream.obtain(wrapped, String.valueOf(contentLength));
int read = 100;
when(wrapped.read(any(byte[].class), anyInt(), anyInt())).thenReturn(read);
assertThat(is.read(new byte[500], 0, 0)).isEqualTo(read);
assertThat(is.available()).isEqualTo(contentLength - read);
}
@Test
public void testRead_whenReturnsLessThanZeroWithoutReadingAllContent_throwsIOException()
throws IOException {
int contentLength = 1;
InputStream is = ContentLengthInputStream.obtain(wrapped, String.valueOf(contentLength));
when(wrapped.read()).thenReturn(-1);
try {
//noinspection ResultOfMethodCallIgnored
is.read();
fail("Failed to throw expected exception");
} catch (IOException e) {
// Expected.
}
}
@Test
public void testReadBytes_whenReturnsLessThanZeroWithoutReadingAllContent_throwsIOException()
throws IOException {
int contentLength = 2;
InputStream is = ContentLengthInputStream.obtain(wrapped, String.valueOf(contentLength));
when(wrapped.read(any(byte[].class), anyInt(), anyInt())).thenReturn(-1);
try {
//noinspection ResultOfMethodCallIgnored
is.read(new byte[10], 0, 0);
fail("Failed to throw expected exception");
} catch (IOException e) {
// Expected.
}
}
@Test
public void testRead_whenReturnsLessThanZeroWithInvalidLength_doesNotThrow() throws IOException {
InputStream is = ContentLengthInputStream.obtain(wrapped, "invalid_length");
when(wrapped.read()).thenReturn(-1);
//noinspection ResultOfMethodCallIgnored
is.read();
}
@Test
public void testReadBytes_whenReturnsLessThanZeroWithInvalidLength_doesNotThrow()
throws IOException {
InputStream is = ContentLengthInputStream.obtain(wrapped, "invalid_length");
when(wrapped.read(any(byte[].class), anyInt(), anyInt())).thenReturn(-1);
//noinspection ResultOfMethodCallIgnored
is.read(new byte[10], 0, 0);
}
@Test
public void testRead_readWithZeroes_doesNotThrow() throws IOException {
ByteArrayInputStream inner = new ByteArrayInputStream(new byte[] {0, 0, 0});
InputStream is = ContentLengthInputStream.obtain(inner, 3);
assertThat(is.read()).isEqualTo(0);
assertThat(is.read()).isEqualTo(0);
assertThat(is.read()).isEqualTo(0);
assertThat(is.read()).isEqualTo(-1);
}
@Test
public void testRead_readWithHighValues_doesNotThrow() throws IOException {
ByteArrayInputStream inner =
new ByteArrayInputStream(new byte[] {(byte) 0xF0, (byte) 0xA0, (byte) 0xFF});
InputStream is = ContentLengthInputStream.obtain(inner, 3);
assertThat(is.read()).isEqualTo(0xF0);
assertThat(is.read()).isEqualTo(0xA0);
assertThat(is.read()).isEqualTo(0xFF);
assertThat(is.read()).isEqualTo(-1);
}
}
|
ContentLengthInputStreamTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java
|
{
"start": 902,
"end": 6322
}
|
class ____ implements AggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("min", ElementType.BOOLEAN),
new IntermediateStateDesc("seen", ElementType.BOOLEAN) );
private final DriverContext driverContext;
private final BooleanState state;
private final List<Integer> channels;
public MinBooleanAggregatorFunction(DriverContext driverContext, List<Integer> channels,
BooleanState state) {
this.driverContext = driverContext;
this.channels = channels;
this.state = state;
}
public static MinBooleanAggregatorFunction create(DriverContext driverContext,
List<Integer> channels) {
return new MinBooleanAggregatorFunction(driverContext, channels, new BooleanState(MinBooleanAggregator.init()));
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public void addRawInput(Page page, BooleanVector mask) {
if (mask.allFalse()) {
// Entire page masked away
} else if (mask.allTrue()) {
addRawInputNotMasked(page);
} else {
addRawInputMasked(page, mask);
}
}
private void addRawInputMasked(Page page, BooleanVector mask) {
BooleanBlock vBlock = page.getBlock(channels.get(0));
BooleanVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock, mask);
return;
}
addRawVector(vVector, mask);
}
private void addRawInputNotMasked(Page page) {
BooleanBlock vBlock = page.getBlock(channels.get(0));
BooleanVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock);
return;
}
addRawVector(vVector);
}
private void addRawVector(BooleanVector vVector) {
state.seen(true);
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
boolean vValue = vVector.getBoolean(valuesPosition);
state.booleanValue(MinBooleanAggregator.combine(state.booleanValue(), vValue));
}
}
private void addRawVector(BooleanVector vVector, BooleanVector mask) {
state.seen(true);
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
if (mask.getBoolean(valuesPosition) == false) {
continue;
}
boolean vValue = vVector.getBoolean(valuesPosition);
state.booleanValue(MinBooleanAggregator.combine(state.booleanValue(), vValue));
}
}
private void addRawBlock(BooleanBlock vBlock) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
state.seen(true);
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
boolean vValue = vBlock.getBoolean(vOffset);
state.booleanValue(MinBooleanAggregator.combine(state.booleanValue(), vValue));
}
}
}
private void addRawBlock(BooleanBlock vBlock, BooleanVector mask) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
if (mask.getBoolean(p) == false) {
continue;
}
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
state.seen(true);
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
boolean vValue = vBlock.getBoolean(vOffset);
state.booleanValue(MinBooleanAggregator.combine(state.booleanValue(), vValue));
}
}
}
@Override
public void addIntermediateInput(Page page) {
assert channels.size() == intermediateBlockCount();
assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size();
Block minUncast = page.getBlock(channels.get(0));
if (minUncast.areAllValuesNull()) {
return;
}
BooleanVector min = ((BooleanBlock) minUncast).asVector();
assert min.getPositionCount() == 1;
Block seenUncast = page.getBlock(channels.get(1));
if (seenUncast.areAllValuesNull()) {
return;
}
BooleanVector seen = ((BooleanBlock) seenUncast).asVector();
assert seen.getPositionCount() == 1;
if (seen.getBoolean(0)) {
state.booleanValue(MinBooleanAggregator.combine(state.booleanValue(), min.getBoolean(0)));
state.seen(true);
}
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
state.toIntermediate(blocks, offset, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) {
if (state.seen() == false) {
blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1);
return;
}
blocks[offset] = driverContext.blockFactory().newConstantBooleanBlockWith(state.booleanValue(), 1);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
|
MinBooleanAggregatorFunction
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/test/java/org/springframework/messaging/handler/annotation/support/DestinationVariableMethodArgumentResolverTests.java
|
{
"start": 1566,
"end": 3314
}
|
class ____ {
private final DestinationVariableMethodArgumentResolver resolver =
new DestinationVariableMethodArgumentResolver(new DefaultConversionService());
private final ResolvableMethod resolvable =
ResolvableMethod.on(getClass()).named("handleMessage").build();
@Test
void supportsParameter() {
assertThat(resolver.supportsParameter(this.resolvable.annot(destinationVar().noValue()).arg())).isTrue();
assertThat(resolver.supportsParameter(this.resolvable.annotNotPresent(DestinationVariable.class).arg())).isFalse();
}
@Test
void resolveArgument() throws Exception {
Map<String, Object> vars = new HashMap<>();
vars.put("foo", "bar");
vars.put("name", "value");
Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeader(
DestinationVariableMethodArgumentResolver.DESTINATION_TEMPLATE_VARIABLES_HEADER, vars).build();
MethodParameter param = this.resolvable.annot(destinationVar().noValue()).arg();
Object result = this.resolver.resolveArgument(param, message);
assertThat(result).isEqualTo("bar");
param = this.resolvable.annot(destinationVar("name")).arg();
result = this.resolver.resolveArgument(param, message);
assertThat(result).isEqualTo("value");
}
@Test
void resolveArgumentNotFound() {
Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).build();
assertThatExceptionOfType(MessageHandlingException.class).isThrownBy(() ->
this.resolver.resolveArgument(this.resolvable.annot(destinationVar().noValue()).arg(), message));
}
@SuppressWarnings("unused")
private void handleMessage(
@DestinationVariable String foo,
@DestinationVariable(value = "name") String param1,
String param3) {
}
}
|
DestinationVariableMethodArgumentResolverTests
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/stream/StreamWriterTest_writeValueString.java
|
{
"start": 203,
"end": 673
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
StringWriter out = new StringWriter();
SerializeWriter writer = new SerializeWriter(out, 10);
Assert.assertEquals(10, writer.getBufferLength());
writer.writeString("abcde12345678");
writer.close();
String text = out.toString();
Assert.assertEquals("\"abcde12345678\"", text);
}
}
|
StreamWriterTest_writeValueString
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/invoker/transformer/ReturnValueTransformerTest.java
|
{
"start": 1447,
"end": 2376
}
|
class ____ {
static String change(String result) {
return result.toUpperCase(Locale.ROOT);
}
static Set<String> change(Set<String> result) {
return result.stream().map(it -> it.toUpperCase(Locale.ROOT)).collect(Collectors.toSet());
}
}
@Test
public void test() throws Exception {
InvokerHelper helper = Arc.container().instance(InvokerHelper.class).get();
InstanceHandle<MyService> service = Arc.container().instance(MyService.class);
Invoker<MyService, String> hello = helper.getInvoker("hello");
assertEquals("FOOBAR0[]", hello.invoke(service.get(), new Object[] { 0, List.of() }));
Invoker<MyService, Set<String>> doSomething = helper.getInvoker("doSomething");
assertEquals(Set.of("_", "QUUX"), doSomething.invoke(service.get(), new Object[] { "_" }));
}
@Singleton
static
|
ReturnValueTransformer
|
java
|
spring-projects__spring-boot
|
module/spring-boot-security/src/main/java/org/springframework/boot/security/autoconfigure/SecurityProperties.java
|
{
"start": 1212,
"end": 2152
}
|
class ____ {
/**
* Default user name.
*/
private String name = "user";
/**
* Password for the default user name.
*/
private String password = UUID.randomUUID().toString();
/**
* Granted roles for the default user name.
*/
private List<String> roles = new ArrayList<>();
private boolean passwordGenerated = true;
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public String getPassword() {
return this.password;
}
public void setPassword(String password) {
if (!StringUtils.hasLength(password)) {
return;
}
this.passwordGenerated = false;
this.password = password;
}
public List<String> getRoles() {
return this.roles;
}
public void setRoles(List<String> roles) {
this.roles = new ArrayList<>(roles);
}
public boolean isPasswordGenerated() {
return this.passwordGenerated;
}
}
}
|
User
|
java
|
quarkusio__quarkus
|
integration-tests/opentelemetry-spi/src/test/java/io/quarkus/it/opentelemetry/spi/propagation/OpenTelemetryPropagatorsTest.java
|
{
"start": 232,
"end": 845
}
|
class ____ {
@Test
void test() throws NoSuchFieldException, IllegalAccessException {
given()
.contentType("application/json")
.when().get("/export/propagation")
.then()
.statusCode(200)
.body("", Matchers.containsInAnyOrder(
//default baggage will be missing
//W3C headers:
"traceparent",
"tracestate",
//XRAY headers:
"X-Amzn-Trace-Id"));
}
}
|
OpenTelemetryPropagatorsTest
|
java
|
hibernate__hibernate-orm
|
tooling/hibernate-ant/src/main/java/org/hibernate/tool/hbm2ddl/SuppliedConnectionHelper.java
|
{
"start": 532,
"end": 1486
}
|
class ____ implements ConnectionHelper {
private Connection connection;
private boolean toggleAutoCommit;
private final SqlExceptionHelper sqlExceptionHelper;
public SuppliedConnectionHelper(Connection connection, SqlExceptionHelper sqlExceptionHelper) {
this.connection = connection;
this.sqlExceptionHelper = sqlExceptionHelper;
}
public void prepare(boolean needsAutoCommit) throws SQLException {
toggleAutoCommit = needsAutoCommit && !connection.getAutoCommit();
if ( toggleAutoCommit ) {
try {
connection.commit();
}
catch( Throwable ignore ) {
// might happen with a managed connection
}
connection.setAutoCommit( true );
}
}
public Connection getConnection() {
return connection;
}
public void release() throws SQLException {
sqlExceptionHelper.logAndClearWarnings( connection );
if ( toggleAutoCommit ) {
connection.setAutoCommit( false );
}
connection = null;
}
}
|
SuppliedConnectionHelper
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cascade/CodedPairHolder.java
|
{
"start": 398,
"end": 1560
}
|
class ____ implements Serializable {
@Id
@GeneratedValue
@Column(name = "ID")
private Long id;
@Column(name = "CODE", nullable = false, unique = true, updatable = false, length = 256)
private String code;
private PersonPair pair;
CodedPairHolder() {
super();
}
CodedPairHolder(final String pCode, PersonPair pair) {
super();
this.code = pCode;
this.pair = pair;
}
Long getId() {
return this.id;
}
String getCode() {
return this.code;
}
PersonPair getPair() {
return this.pair;
}
@Override
public int hashCode() {
final int prime = 101;
int result = 1;
result = prime * result + ((getCode() == null) ? 0 : getCode().hashCode());
return result;
}
@Override
public boolean equals(final Object pObject) {
if (this == pObject) {
return true;
}
if (pObject == null) {
return false;
}
if (!(pObject instanceof CodedPairHolder )) {
return false;
}
final CodedPairHolder other = (CodedPairHolder) pObject;
if (getCode() == null) {
if (other.getCode() != null) {
return false;
}
} else if (!getCode().equals(other.getCode())) {
return false;
}
return true;
}
}
|
CodedPairHolder
|
java
|
apache__flink
|
flink-filesystems/flink-gs-fs-hadoop/src/test/java/org/apache/flink/fs/gs/writer/GSRecoverableFsDataOutputStreamTest.java
|
{
"start": 1948,
"end": 9361
}
|
class ____ {
@Parameter private boolean empty;
@Parameter(value = 1)
@Nullable
private String temporaryBucketName;
@Parameter(value = 2)
private int componentObjectCount;
@Parameter(value = 3)
private long position;
@Parameter(value = 4)
private boolean closed;
@Parameters(
name =
"empty={0}, temporaryBucketName={1}, componentObjectCount={2}, position={3}, closed={4}")
private static Collection<Object[]> data() {
return Arrays.asList(
new Object[][] {
// not empty, no explicit temporary bucket, 0 components, position=0, not closed
{false, null, 0, 0, false},
// not empty, no explicit temporary bucket, 4 components, position=32, not
// closed
{false, null, 4, 32, true},
// not empty, explicit temporary bucket, 4 components, position=32, not closed
{false, "temporary-bucket", 4, 32, false},
// not empty, explicit temporary bucket, 4 components, position=64, closed
{false, "temporary-bucket", 4, 64, true},
// empty, no explicit temporary bucket, 0 components, position=0, not closed
{true, null, 0, 0, false},
// empty, explicit temporary bucket, 0 components, position=0, not closed
{true, "temporary-bucket", 0, 0, false},
});
}
private Random random;
private GSFileSystemOptions options;
private MockBlobStorage blobStorage;
private ArrayList<UUID> componentObjectIds;
private GSRecoverableFsDataOutputStream fsDataOutputStream;
private GSBlobIdentifier blobIdentifier;
private byte byteValue;
@BeforeEach
void before() {
random = new Random(TestUtils.RANDOM_SEED);
blobIdentifier = new GSBlobIdentifier("foo", "bar");
byteValue = (byte) 167;
Configuration flinkConfig = new Configuration();
if (temporaryBucketName != null) {
flinkConfig.set(GSFileSystemOptions.WRITER_TEMPORARY_BUCKET_NAME, temporaryBucketName);
}
componentObjectIds = new ArrayList<>();
for (int i = 0; i < componentObjectCount; i++) {
componentObjectIds.add(UUID.randomUUID());
}
options = new GSFileSystemOptions(flinkConfig);
blobStorage = new MockBlobStorage();
if (empty) {
fsDataOutputStream =
new GSRecoverableFsDataOutputStream(blobStorage, options, blobIdentifier);
} else {
GSResumeRecoverable resumeRecoverable =
new GSResumeRecoverable(blobIdentifier, componentObjectIds, position, closed);
fsDataOutputStream =
new GSRecoverableFsDataOutputStream(blobStorage, options, resumeRecoverable);
}
}
@TestTemplate
void emptyStreamShouldHaveProperPositionAndComponentObjectCount() {
if (empty) {
assertThat(position).isZero();
assertThat(componentObjectCount).isZero();
}
}
@TestTemplate
void shouldConstructStream() throws IOException {
if (empty) {
assertThat(fsDataOutputStream.getPos()).isEqualTo(0);
} else {
assertThat(fsDataOutputStream.getPos()).isEqualTo(position);
}
}
@TestTemplate
void shouldReturnPosition() throws IOException {
assertThat(fsDataOutputStream.getPos()).isEqualTo(position);
}
private void writeContent(ThrowingRunnable<IOException> write, byte[] expectedContent)
throws IOException {
// write the byte, confirm position change and existence of write channel
assertThat(fsDataOutputStream.getPos()).isEqualTo(position);
write.run();
assertThat(fsDataOutputStream.getPos()).isEqualTo(position + expectedContent.length);
// close and persist. there should be exactly zero blobs before and one after, with this
// byte value in it
assertThat(blobStorage.blobs).isEmpty();
fsDataOutputStream.closeForCommit();
assertThat(blobStorage.blobs).hasSize(1);
GSBlobIdentifier blobIdentifier =
blobStorage.blobs.keySet().toArray(new GSBlobIdentifier[0])[0];
MockBlobStorage.BlobValue blobValue = blobStorage.blobs.get(blobIdentifier);
assertThat(blobValue).isNotNull();
assertThat(blobValue.content).isEqualTo(expectedContent);
}
private void writeByte() throws IOException {
writeContent(() -> fsDataOutputStream.write(byteValue), new byte[] {byteValue});
}
@TestTemplate
void shouldWriteByte() throws IOException {
if (closed) {
assertThatThrownBy(this::writeByte).isInstanceOf(IOException.class);
} else {
writeByte();
}
}
private void writeArray() throws IOException {
byte[] bytes = new byte[64];
random.nextBytes(bytes);
writeContent(() -> fsDataOutputStream.write(bytes), bytes);
}
@TestTemplate
void shouldWriteArray() throws IOException {
if (closed) {
assertThatThrownBy(this::writeArray).isInstanceOf(IOException.class);
} else {
writeArray();
}
}
private void writeArraySlice() throws IOException {
final int start = 4;
final int length = 10;
byte[] bytes = new byte[64];
random.nextBytes(bytes);
writeContent(
() -> fsDataOutputStream.write(bytes, start, length),
Arrays.copyOfRange(bytes, start, start + length));
}
@TestTemplate
void shouldWriteArraySlice() throws IOException {
if (closed) {
assertThatThrownBy(this::writeArraySlice).isInstanceOf(IOException.class);
} else {
writeArraySlice();
}
}
@TestTemplate
void shouldFlush() throws IOException {
if (!closed) {
fsDataOutputStream.write(byteValue);
fsDataOutputStream.flush();
}
}
@TestTemplate
void shouldSync() throws IOException {
if (!closed) {
fsDataOutputStream.write(byteValue);
fsDataOutputStream.sync();
}
}
@TestTemplate
void shouldPersist() throws IOException {
if (!closed) {
GSResumeRecoverable recoverable = (GSResumeRecoverable) fsDataOutputStream.persist();
assertThat(recoverable.finalBlobIdentifier).isEqualTo(blobIdentifier);
if (empty) {
assertThat(recoverable.componentObjectIds).isEmpty();
} else {
assertThat(recoverable.componentObjectIds.toArray())
.isEqualTo(componentObjectIds.toArray());
}
assertThat(recoverable.position).isEqualTo(position);
assertThat(recoverable.closed).isFalse();
}
}
@TestTemplate
void shouldFailOnPartialWrite() {
if (!closed) {
blobStorage.maxWriteCount = 1;
byte[] bytes = new byte[2];
random.nextBytes(bytes);
assertThatThrownBy(() -> fsDataOutputStream.write(bytes))
.isInstanceOf(IOException.class);
}
}
}
|
GSRecoverableFsDataOutputStreamTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceDiagnosticsAction.java
|
{
"start": 1289,
"end": 1935
}
|
class ____ extends ActionType<GetInferenceDiagnosticsAction.Response> {
public static final GetInferenceDiagnosticsAction INSTANCE = new GetInferenceDiagnosticsAction();
public static final String NAME = "cluster:monitor/xpack/inference/diagnostics/get";
private static final TransportVersion ML_INFERENCE_ENDPOINT_CACHE = TransportVersion.fromName("ml_inference_endpoint_cache");
private static final TransportVersion INFERENCE_API_EIS_DIAGNOSTICS = TransportVersion.fromName("inference_api_eis_diagnostics");
public GetInferenceDiagnosticsAction() {
super(NAME);
}
public static
|
GetInferenceDiagnosticsAction
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/lucene/spatial/CentroidCalculator.java
|
{
"start": 1139,
"end": 1293
}
|
class ____ a running Kahan-sum of coordinates
* that are to be averaged in {@code TriangleTreeWriter} for use
* as the centroid of a shape.
*/
public
|
keeps
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/DeepInheritanceWithNonEntitiesProxyTest.java
|
{
"start": 63003,
"end": 63359
}
|
class ____ extends NonEntityAEntitySuperclass {
private Boolean fieldInAEntity;
public AEntity(String id) {
super(id);
}
protected AEntity() {
}
public Boolean getFieldInAEntity() {
return fieldInAEntity;
}
public void setFieldInAEntity(Boolean fieldInAEntity) {
this.fieldInAEntity = fieldInAEntity;
}
}
public static
|
AEntity
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LeafExec.java
|
{
"start": 634,
"end": 1196
}
|
class ____ extends PhysicalPlan {
protected LeafExec(Source source) {
super(source, Collections.emptyList());
}
@Override
public final LeafExec replaceChildren(List<PhysicalPlan> newChildren) {
throw new UnsupportedOperationException("this type of node doesn't have any children to replace");
}
@Override
public void execute(Session session, ActionListener<Page> listener) {
execute((SqlSession) session, listener);
}
abstract void execute(SqlSession session, ActionListener<Page> listener);
}
|
LeafExec
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlCreateServerStatement.java
|
{
"start": 899,
"end": 3600
}
|
class ____ extends MySqlStatementImpl implements SQLCreateStatement {
private SQLName name;
private SQLName foreignDataWrapper;
private SQLExpr host;
private SQLExpr database;
private SQLExpr user;
private SQLExpr password;
private SQLExpr socket;
private SQLExpr owner;
private SQLExpr port;
public SQLName getName() {
return name;
}
public void setName(SQLName x) {
if (x != null) {
x.setParent(this);
}
this.name = x;
}
public SQLName getForeignDataWrapper() {
return foreignDataWrapper;
}
public void setForeignDataWrapper(SQLName x) {
if (x != null) {
x.setParent(this);
}
this.foreignDataWrapper = x;
}
public SQLExpr getHost() {
return host;
}
public void setHost(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.host = x;
}
public SQLExpr getDatabase() {
return database;
}
public void setDatabase(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.database = x;
}
public SQLExpr getUser() {
return user;
}
public void setUser(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.user = x;
}
public SQLExpr getPassword() {
return password;
}
public void setPassword(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.password = x;
}
public SQLExpr getSocket() {
return socket;
}
public void setSocket(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.socket = x;
}
public SQLExpr getOwner() {
return owner;
}
public void setOwner(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.owner = x;
}
public SQLExpr getPort() {
return port;
}
public void setPort(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.port = x;
}
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, name);
acceptChild(visitor, foreignDataWrapper);
acceptChild(visitor, host);
acceptChild(visitor, database);
acceptChild(visitor, user);
acceptChild(visitor, password);
acceptChild(visitor, socket);
acceptChild(visitor, owner);
acceptChild(visitor, port);
}
visitor.endVisit(this);
}
}
|
MySqlCreateServerStatement
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/MultipleInheritanceTest.java
|
{
"start": 3072,
"end": 3190
}
|
class ____ {
@EmbeddedId
private CarOptionalPK id;
private String name;
@Embeddable
public static
|
CarOptional
|
java
|
apache__camel
|
components/camel-undertow/src/test/java/org/apache/camel/component/undertow/rest/RestUndertowProducerVerbUpperCaseTest.java
|
{
"start": 1100,
"end": 2510
}
|
class ____ extends BaseUndertowTest {
@Test
public void testVerbUpperCase() {
String out = fluentTemplate.withHeader("id", "123").to("direct:start").request(String.class);
assertEquals("123;Donald Duck", out);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// configure to use undertow on localhost with the given port
restConfiguration().component("undertow").host("localhost").port(getPort());
from("direct:start")
.to("rest:get:users/{id}/basic");
// use the rest DSL to define the rest services
rest("/users/")
.get("{id}/basic")
.to("direct:basic");
from("direct:basic")
.to("mock:input")
.process(exchange -> {
String method = exchange.getIn().getHeader(Exchange.HTTP_METHOD, String.class);
assertEquals("GET", method);
String id = exchange.getIn().getHeader("id", String.class);
exchange.getMessage().setBody(id + ";Donald Duck");
});
}
};
}
}
|
RestUndertowProducerVerbUpperCaseTest
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/checkpointing/RescalingITCase.java
|
{
"start": 30050,
"end": 31894
}
|
class ____ extends RichParallelSourceFunction<Integer> {
private static final long serialVersionUID = -400066323594122516L;
private final int numberKeys;
private final int numberElements;
private final boolean terminateAfterEmission;
protected int counter = 0;
private boolean running = true;
SubtaskIndexSource(int numberKeys, int numberElements, boolean terminateAfterEmission) {
this.numberKeys = numberKeys;
this.numberElements = numberElements;
this.terminateAfterEmission = terminateAfterEmission;
}
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
final Object lock = ctx.getCheckpointLock();
final int subtaskIndex = getRuntimeContext().getTaskInfo().getIndexOfThisSubtask();
while (running) {
if (counter < numberElements) {
synchronized (lock) {
for (int value = subtaskIndex;
value < numberKeys;
value +=
getRuntimeContext()
.getTaskInfo()
.getNumberOfParallelSubtasks()) {
ctx.collect(value);
}
counter++;
}
} else {
if (terminateAfterEmission) {
running = false;
} else {
Thread.sleep(100);
}
}
}
}
@Override
public void cancel() {
running = false;
}
}
private static
|
SubtaskIndexSource
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestLogAggregationMetaCollector.java
|
{
"start": 3076,
"end": 14781
}
|
class ____
extends FakeLogAggregationFileController {
private Map<ImmutablePair<String, String>,
Map<String, List<ContainerLogFileInfo>>> logFiles;
private List<FileStatus> appDirs;
private List<FileStatus> nodeFiles;
FakeNodeFileController(
Map<ImmutablePair<String, String>, Map<String,
List<ContainerLogFileInfo>>> logFiles, List<FileStatus> appDirs,
List<FileStatus> nodeFiles) {
this.logFiles = logFiles;
this.appDirs = appDirs;
this.nodeFiles = nodeFiles;
}
@Override
public RemoteIterator<FileStatus> getApplicationDirectoriesOfUser(
String user) throws IOException {
return new RemoteIterator<FileStatus>() {
private Iterator<FileStatus> iter = appDirs.iterator();
@Override
public boolean hasNext() throws IOException {
return iter.hasNext();
}
@Override
public FileStatus next() throws IOException {
return iter.next();
}
};
}
@Override
public RemoteIterator<FileStatus> getNodeFilesOfApplicationDirectory(
FileStatus appDir) throws IOException {
return new RemoteIterator<FileStatus>() {
private Iterator<FileStatus> iter = nodeFiles.iterator();
@Override
public boolean hasNext() throws IOException {
return iter.hasNext();
}
@Override
public FileStatus next() throws IOException {
return iter.next();
}
};
}
@Override
public Map<String, List<ContainerLogFileInfo>> getLogMetaFilesOfNode(
ExtendedLogMetaRequest logRequest, FileStatus currentNodeFile,
ApplicationId appId) throws IOException {
return logFiles.get(new ImmutablePair<>(appId.toString(),
currentNodeFile.getPath().getName()));
}
}
@BeforeEach
public void setUp() throws Exception {
fileController = createFileController();
}
@AfterEach
public void tearDown() throws Exception {
}
@Test
void testAllNull() throws IOException {
ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder request =
new ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder();
request.setAppId(null);
request.setContainerId(null);
request.setFileName(null);
request.setFileSize(null);
request.setModificationTime(null);
request.setNodeId(null);
request.setUser(null);
LogAggregationMetaCollector collector = new LogAggregationMetaCollector(
request.build(), new YarnConfiguration());
List<ContainerLogMeta> res = collector.collect(fileController);
List<ContainerLogFileInfo> allFile = res.stream()
.flatMap(m -> m.getContainerLogMeta().stream())
.collect(Collectors.toList());
assertEquals(8, allFile.size());
}
@Test
void testAllSet() throws IOException {
ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder request =
new ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder();
Set<String> fileSizeExpressions = new HashSet<>();
fileSizeExpressions.add("<51");
Set<String> modificationTimeExpressions = new HashSet<>();
modificationTimeExpressions.add("<1000");
request.setAppId(app.toString());
request.setContainerId(attemptContainer.toString());
request.setFileName(String.format("%s.*", SMALL_FILE_NAME));
request.setFileSize(fileSizeExpressions);
request.setModificationTime(modificationTimeExpressions);
request.setNodeId(TEST_NODE);
request.setUser("TEST");
LogAggregationMetaCollector collector = new LogAggregationMetaCollector(
request.build(), new YarnConfiguration());
List<ContainerLogMeta> res = collector.collect(fileController);
List<ContainerLogFileInfo> allFile = res.stream()
.flatMap(m -> m.getContainerLogMeta().stream())
.collect(Collectors.toList());
assertEquals(1, allFile.size());
}
@Test
void testSingleNodeRequest() throws IOException {
ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder request =
new ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder();
request.setAppId(null);
request.setContainerId(null);
request.setFileName(null);
request.setFileSize(null);
request.setModificationTime(null);
request.setNodeId(TEST_NODE);
request.setUser(null);
LogAggregationMetaCollector collector = new LogAggregationMetaCollector(
request.build(), new YarnConfiguration());
List<ContainerLogMeta> res = collector.collect(fileController);
List<ContainerLogFileInfo> allFile = res.stream()
.flatMap(m -> m.getContainerLogMeta().stream())
.collect(Collectors.toList());
assertEquals(4, allFile.stream().
filter(f -> f.getFileName().contains(TEST_NODE)).count());
}
@Test
void testMultipleNodeRegexRequest() throws IOException {
ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder request =
new ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder();
request.setAppId(null);
request.setContainerId(null);
request.setFileName(null);
request.setFileSize(null);
request.setModificationTime(null);
request.setNodeId("TEST_NODE_.*");
request.setUser(null);
LogAggregationMetaCollector collector = new LogAggregationMetaCollector(
request.build(), new YarnConfiguration());
List<ContainerLogMeta> res = collector.collect(fileController);
List<ContainerLogFileInfo> allFile = res.stream()
.flatMap(m -> m.getContainerLogMeta().stream())
.collect(Collectors.toList());
assertEquals(8, allFile.size());
}
@Test
void testMultipleFileRegex() throws IOException {
ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder request =
new ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder();
request.setAppId(null);
request.setContainerId(null);
request.setFileName(String.format("%s.*", BIG_FILE_NAME));
request.setFileSize(null);
request.setModificationTime(null);
request.setNodeId(null);
request.setUser(null);
LogAggregationMetaCollector collector = new LogAggregationMetaCollector(
request.build(), new YarnConfiguration());
List<ContainerLogMeta> res = collector.collect(fileController);
List<ContainerLogFileInfo> allFile = res.stream()
.flatMap(m -> m.getContainerLogMeta().stream())
.collect(Collectors.toList());
assertEquals(4, allFile.size());
assertTrue(allFile.stream().allMatch(
f -> f.getFileName().contains(BIG_FILE_NAME)));
}
@Test
void testContainerIdExactMatch() throws IOException {
ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder request =
new ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder();
request.setAppId(null);
request.setContainerId(attemptContainer.toString());
request.setFileName(null);
request.setFileSize(null);
request.setModificationTime(null);
request.setNodeId(null);
request.setUser(null);
LogAggregationMetaCollector collector = new LogAggregationMetaCollector(
request.build(), new YarnConfiguration());
List<ContainerLogMeta> res = collector.collect(fileController);
List<ContainerLogFileInfo> allFile = res.stream()
.flatMap(m -> m.getContainerLogMeta().stream())
.collect(Collectors.toList());
assertEquals(2, allFile.size());
assertTrue(allFile.stream().allMatch(
f -> f.getFileName().contains(attemptContainer.toString())));
}
@Test
void testMultipleFileBetweenSize() throws IOException {
ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder request =
new ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder();
Set<String> fileSizeExpressions = new HashSet<>();
fileSizeExpressions.add(">50");
fileSizeExpressions.add("<101");
request.setAppId(null);
request.setContainerId(null);
request.setFileName(null);
request.setFileSize(fileSizeExpressions);
request.setModificationTime(null);
request.setNodeId(null);
request.setUser(null);
LogAggregationMetaCollector collector = new LogAggregationMetaCollector(
request.build(), new YarnConfiguration());
List<ContainerLogMeta> res = collector.collect(fileController);
List<ContainerLogFileInfo> allFile = res.stream()
.flatMap(m -> m.getContainerLogMeta().stream())
.collect(Collectors.toList());
assertEquals(4, allFile.size());
assertTrue(allFile.stream().allMatch(
f -> f.getFileSize().equals("100")));
}
@Test
void testInvalidQueryStrings() throws IOException {
ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder request =
new ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder();
Set<String> fileSizeExpressions = new HashSet<>();
fileSizeExpressions.add("50");
fileSizeExpressions.add("101");
try {
request.setFileName("*");
fail("An error should be thrown due to an invalid regex");
} catch (IllegalArgumentException ignored) {
}
try {
request.setFileSize(fileSizeExpressions);
fail("An error should be thrown due to multiple exact match expression");
} catch (IllegalArgumentException ignored) {
}
}
private FakeNodeFileController createFileController() {
FileStatus appDir = new FileStatus();
appDir.setPath(new Path(String.format("test/%s", app.toString())));
FileStatus appDir2 = new FileStatus();
appDir2.setPath(new Path(String.format("test/%s", app2.toString())));
List<FileStatus> appDirs = new ArrayList<>();
appDirs.add(appDir);
appDirs.add(appDir2);
FileStatus nodeFile = new FileStatus();
nodeFile.setPath(new Path(String.format("test/%s", TEST_NODE)));
FileStatus nodeFile2 = new FileStatus();
nodeFile2.setPath(new Path(String.format("test/%s", TEST_NODE_2)));
List<FileStatus> nodeFiles = new ArrayList<>();
nodeFiles.add(nodeFile);
nodeFiles.add(nodeFile2);
Map<ImmutablePair<String, String>, Map<String,
List<ContainerLogFileInfo>>> internal = new HashMap<>();
internal.put(new ImmutablePair<>(app.toString(), TEST_NODE),
createLogFiles(TEST_NODE, attemptContainer));
internal.put(new ImmutablePair<>(app.toString(), TEST_NODE_2),
createLogFiles(TEST_NODE_2, attemptContainer2));
internal.put(new ImmutablePair<>(app2.toString(), TEST_NODE),
createLogFiles(TEST_NODE, attempt2Container));
internal.put(new ImmutablePair<>(app2.toString(), TEST_NODE_2),
createLogFiles(TEST_NODE_2, attempt2Container2));
return new FakeNodeFileController(internal, appDirs, nodeFiles);
}
private Map<String, List<ContainerLogFileInfo>> createLogFiles(
String nodeId, ContainerId... containerId) {
Map<String, List<ContainerLogFileInfo>> logFiles = new HashMap<>();
for (ContainerId c : containerId) {
List<ContainerLogFileInfo> files = new ArrayList<>();
ContainerLogFileInfo bigFile = new ContainerLogFileInfo();
bigFile.setFileName(generateFileName(
BIG_FILE_NAME, nodeId, c.toString()));
bigFile.setFileSize("100");
bigFile.setLastModifiedTime("1000");
ContainerLogFileInfo smallFile = new ContainerLogFileInfo();
smallFile.setFileName(generateFileName(
SMALL_FILE_NAME, nodeId, c.toString()));
smallFile.setFileSize("50");
smallFile.setLastModifiedTime("100");
files.add(bigFile);
files.add(smallFile);
logFiles.put(c.toString(), files);
}
return logFiles;
}
private String generateFileName(
String name, String nodeId, String containerId) {
return String.format("%s_%s_%s", name, nodeId, containerId);
}
}
|
FakeNodeFileController
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-api/src/test/java/org/apache/dubbo/rpc/support/TrieTreeTest.java
|
{
"start": 1094,
"end": 2017
}
|
class ____ {
private TrieTree trie;
@BeforeEach
void setUp() {
// Initialize the set of words before each test
Set<String> words = new HashSet<>();
words.add("apple");
words.add("App-le");
words.add("apply");
words.add("app_le.juice");
words.add("app-LE_juice");
// Initialize TrieTree
trie = new TrieTree(words);
}
@Test
void testSearchValidWords() {
// Test valid words
assertTrue(trie.search("apple"));
assertTrue(trie.search("App-LE"));
assertTrue(trie.search("apply"));
assertTrue(trie.search("app_le.juice"));
assertTrue(trie.search("app-LE_juice"));
}
@Test
void testSearchInvalidWords() {
// Test invalid words
assertFalse(trie.search("app"));
// Invalid character test
assertFalse(trie.search("app%le"));
}
}
|
TrieTreeTest
|
java
|
junit-team__junit5
|
junit-platform-engine/src/main/java/org/junit/platform/engine/support/discovery/EngineDiscoveryRequestResolver.java
|
{
"start": 8238,
"end": 10058
}
|
class ____ to this builder.
*
* @param classFilter predicate the resolved classes must satisfy; never
* {@code null}
* @return this builder for method chaining
*/
public Builder<T> addClassContainerSelectorResolver(Predicate<Class<?>> classFilter) {
Preconditions.notNull(classFilter, "classFilter must not be null");
return addClassContainerSelectorResolverWithContext(__ -> classFilter);
}
/**
* Add a predefined resolver that resolves {@link ClasspathRootSelector
* ClasspathRootSelectors}, {@link ModuleSelector ModuleSelectors}, and
* {@link PackageSelector PackageSelectors} into {@link ClassSelector
* ClassSelectors} by scanning for classes that satisfy the predicate
* created by the supplied {@code Function} in the respective class
* containers to this builder.
*
* @param classFilterCreator the function that will be called to create
* the predicate the resolved classes must satisfy; never
* {@code null}
* @return this builder for method chaining
*/
@API(status = EXPERIMENTAL, since = "6.0")
public Builder<T> addClassContainerSelectorResolverWithContext(
Function<InitializationContext<T>, Predicate<Class<?>>> classFilterCreator) {
Preconditions.notNull(classFilterCreator, "classFilterCreator must not be null");
return addSelectorResolver(context -> new ClassContainerSelectorResolver(classFilterCreator.apply(context),
context.getClassNameFilter()));
}
/**
* Add a predefined resolver that resolves {@link ClasspathRootSelector
* ClasspathRootSelectors}, {@link ModuleSelector ModuleSelectors}, and
* {@link PackageSelector PackageSelectors} into {@link ClasspathResourceSelector
* ClasspathResourceSelectors} by scanning for resources that satisfy the supplied
* predicate in the respective
|
containers
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java
|
{
"start": 75054,
"end": 75201
}
|
class ____ a given sub class
// because the allow list may have gaps between classes
// example:
//
|
for
|
java
|
quarkusio__quarkus
|
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/client/ClientConnectionEventsTest.java
|
{
"start": 3429,
"end": 4154
}
|
class ____ {
static final CountDownLatch OPEN_LATCH = new CountDownLatch(1);
static final CountDownLatch CLOSED_LATCH = new CountDownLatch(1);
static final AtomicReference<WebSocketClientConnection> OPEN_CONN = new AtomicReference<>();
static final AtomicReference<WebSocketClientConnection> CLOSED_CONN = new AtomicReference<>();
void onOpen(@ObservesAsync @Open WebSocketClientConnection connection) {
OPEN_CONN.set(connection);
OPEN_LATCH.countDown();
}
void onClose(@ObservesAsync @Closed WebSocketClientConnection connection) {
CLOSED_CONN.set(connection);
CLOSED_LATCH.countDown();
}
}
}
|
ObservingBean
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-sts/src/main/java/org/apache/camel/component/aws2/sts/STS2Operations.java
|
{
"start": 856,
"end": 941
}
|
enum ____ {
assumeRole,
getSessionToken,
getFederationToken
}
|
STS2Operations
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/builder/abstractGenericTarget/ImmutableParent.java
|
{
"start": 217,
"end": 906
}
|
class ____ implements Parent<ImmutableChild> {
private final int count;
private final ImmutableChild child;
private final Child nonGenericChild;
public ImmutableParent(Builder builder) {
this.count = builder.count;
this.child = builder.child;
this.nonGenericChild = builder.nonGenericChild;
}
public static Builder builder() {
return new Builder();
}
@Override
public Child getNonGenericChild() {
return nonGenericChild;
}
@Override
public int getCount() {
return count;
}
@Override
public ImmutableChild getChild() {
return child;
}
public static
|
ImmutableParent
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/spi/DumpRoutesStrategy.java
|
{
"start": 992,
"end": 2683
}
|
interface ____ extends StaticService {
/**
* Service factory key.
*/
String FACTORY = "dump-routes";
/**
* Dump routes
*
* @param format xml or yaml
*/
void dumpRoutes(String format);
String getInclude();
/**
* Controls what to include in output.
*
* Possible values: all, routes, rests, routeConfigurations, routeTemplates, beans, dataFormats. Multiple values can
* be separated by comma. Default is routes.
*/
void setInclude(String include);
boolean isResolvePlaceholders();
/**
* Whether to resolve property placeholders in the dumped output. Default is true.
*/
void setResolvePlaceholders(boolean resolvePlaceholders);
boolean isUriAsParameters();
/**
* When dumping to YAML format, then this option controls whether endpoint URIs should be expanded into a key/value
* parameters.
*/
void setUriAsParameters(boolean uriAsParameters);
boolean isGeneratedIds();
/**
* Whether to include auto generated IDs in the dumped output. Default is false.
*/
void setGeneratedIds(boolean generatedIds);
boolean isLog();
/**
* Whether to log route dumps to Logger
*/
void setLog(boolean log);
String getOutput();
/**
* Whether to save route dumps to an output file.
*
* If the output is a filename, then all content is saved to this file. If the output is a directory name, then one
* or more files are saved to the directory, where the names are based on the original source file names, or auto
* generated names.
*/
void setOutput(String output);
}
|
DumpRoutesStrategy
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SpringRabbitMQEndpointBuilderFactory.java
|
{
"start": 69350,
"end": 78127
}
|
interface ____
extends
SpringRabbitMQEndpointConsumerBuilder,
SpringRabbitMQEndpointProducerBuilder {
default AdvancedSpringRabbitMQEndpointBuilder advanced() {
return (AdvancedSpringRabbitMQEndpointBuilder) this;
}
/**
* The connection factory to be use. A connection factory must be
* configured either on the component or endpoint.
*
* The option is a:
* <code>org.springframework.amqp.rabbit.connection.ConnectionFactory</code> type.
*
* Group: common
*
* @param connectionFactory the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder connectionFactory(org.springframework.amqp.rabbit.connection.ConnectionFactory connectionFactory) {
doSetProperty("connectionFactory", connectionFactory);
return this;
}
/**
* The connection factory to be use. A connection factory must be
* configured either on the component or endpoint.
*
* The option will be converted to a
* <code>org.springframework.amqp.rabbit.connection.ConnectionFactory</code> type.
*
* Group: common
*
* @param connectionFactory the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder connectionFactory(String connectionFactory) {
doSetProperty("connectionFactory", connectionFactory);
return this;
}
/**
* The name of the dead letter exchange.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param deadLetterExchange the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder deadLetterExchange(String deadLetterExchange) {
doSetProperty("deadLetterExchange", deadLetterExchange);
return this;
}
/**
* The type of the dead letter exchange.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: direct
* Group: common
*
* @param deadLetterExchangeType the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder deadLetterExchangeType(String deadLetterExchangeType) {
doSetProperty("deadLetterExchangeType", deadLetterExchangeType);
return this;
}
/**
* The name of the dead letter queue.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param deadLetterQueue the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder deadLetterQueue(String deadLetterQueue) {
doSetProperty("deadLetterQueue", deadLetterQueue);
return this;
}
/**
* The routing key for the dead letter exchange.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param deadLetterRoutingKey the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder deadLetterRoutingKey(String deadLetterRoutingKey) {
doSetProperty("deadLetterRoutingKey", deadLetterRoutingKey);
return this;
}
/**
* Specifies whether Camel ignores the ReplyTo header in messages. If
* true, Camel does not send a reply back to the destination specified
* in the ReplyTo header. You can use this option if you want Camel to
* consume from a route and you do not want Camel to automatically send
* back a reply message because another component in your code handles
* the reply message. You can also use this option if you want to use
* Camel as a proxy between different message brokers and you want to
* route message from one system to another.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param disableReplyTo the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder disableReplyTo(boolean disableReplyTo) {
doSetProperty("disableReplyTo", disableReplyTo);
return this;
}
/**
* Specifies whether Camel ignores the ReplyTo header in messages. If
* true, Camel does not send a reply back to the destination specified
* in the ReplyTo header. You can use this option if you want Camel to
* consume from a route and you do not want Camel to automatically send
* back a reply message because another component in your code handles
* the reply message. You can also use this option if you want to use
* Camel as a proxy between different message brokers and you want to
* route message from one system to another.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param disableReplyTo the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder disableReplyTo(String disableReplyTo) {
doSetProperty("disableReplyTo", disableReplyTo);
return this;
}
/**
* The queue(s) to use for consuming or producing messages. Multiple
* queue names can be separated by comma. If none has been configured
* then Camel will generate an unique id as the queue name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param queues the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder queues(String queues) {
doSetProperty("queues", queues);
return this;
}
/**
* The value of a routing key to use. Default is empty which is not
* helpful when using the default (or any direct) exchange, but fine if
* the exchange is a headers exchange for instance.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param routingKey the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder routingKey(String routingKey) {
doSetProperty("routingKey", routingKey);
return this;
}
/**
* Specifies whether to test the connection on startup. This ensures
* that when Camel starts that all the JMS consumers have a valid
* connection to the JMS broker. If a connection cannot be granted then
* Camel throws an exception on startup. This ensures that Camel is not
* started with failed connections. The JMS producers is tested as well.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param testConnectionOnStartup the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder testConnectionOnStartup(boolean testConnectionOnStartup) {
doSetProperty("testConnectionOnStartup", testConnectionOnStartup);
return this;
}
/**
* Specifies whether to test the connection on startup. This ensures
* that when Camel starts that all the JMS consumers have a valid
* connection to the JMS broker. If a connection cannot be granted then
* Camel throws an exception on startup. This ensures that Camel is not
* started with failed connections. The JMS producers is tested as well.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param testConnectionOnStartup the value to set
* @return the dsl builder
*/
default SpringRabbitMQEndpointBuilder testConnectionOnStartup(String testConnectionOnStartup) {
doSetProperty("testConnectionOnStartup", testConnectionOnStartup);
return this;
}
}
/**
* Advanced builder for endpoint for the Spring RabbitMQ component.
*/
public
|
SpringRabbitMQEndpointBuilder
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/annotation/AnnotationAwareOrderComparator.java
|
{
"start": 1142,
"end": 1734
}
|
interface ____ well as the
* {@link Order @Order} and {@link jakarta.annotation.Priority @Priority}
* annotations, with an order value provided by an {@code Ordered}
* instance overriding a statically defined annotation value (if any).
*
* <p>Consult the Javadoc for {@link OrderComparator} for details on the
* sort semantics for non-ordered objects.
*
* @author Juergen Hoeller
* @author Oliver Gierke
* @author Stephane Nicoll
* @since 2.0.1
* @see org.springframework.core.Ordered
* @see org.springframework.core.annotation.Order
* @see jakarta.annotation.Priority
*/
public
|
as
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/processor/internals/InternalTopologyBuilder.java
|
{
"start": 91818,
"end": 92389
}
|
class ____ implements Comparator<TopologyDescription.Subtopology>, Serializable {
@Override
public int compare(final TopologyDescription.Subtopology subtopology1,
final TopologyDescription.Subtopology subtopology2) {
if (subtopology1.equals(subtopology2)) {
return 0;
}
return subtopology1.id() - subtopology2.id();
}
}
private static final SubtopologyComparator SUBTOPOLOGY_COMPARATOR = new SubtopologyComparator();
public static final
|
SubtopologyComparator
|
java
|
grpc__grpc-java
|
netty/src/main/java/io/grpc/netty/NettyServerTransport.java
|
{
"start": 5953,
"end": 10151
}
|
class ____ implements ChannelFutureListener {
boolean done;
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if (!done) {
done = true;
notifyTerminated(grpcHandler.connectionError());
}
}
}
ChannelHandler negotiationHandler = protocolNegotiator.newHandler(grpcHandler);
ChannelHandler bufferingHandler = new WriteBufferingAndExceptionHandler(negotiationHandler);
ChannelFutureListener terminationNotifier = new TerminationNotifier();
channelUnused.addListener(terminationNotifier);
channel.closeFuture().addListener(terminationNotifier);
channel.pipeline().addLast(bufferingHandler);
}
@Override
public ScheduledExecutorService getScheduledExecutorService() {
return channel.eventLoop();
}
@Override
public void shutdown() {
if (channel.isOpen()) {
channel.close();
}
}
@Override
public void shutdownNow(Status reason) {
if (channel.isOpen()) {
channel.writeAndFlush(new ForcefulCloseCommand(reason));
}
}
@Override
public InternalLogId getLogId() {
return logId;
}
/**
* For testing purposes only.
*/
Channel channel() {
return channel;
}
/**
* Accepts a throwable and returns the appropriate logging level. Uninteresting exceptions
* should not clutter the log.
*/
@VisibleForTesting
static Level getLogLevel(Throwable t) {
if (t.getClass().equals(IOException.class)
|| t.getClass().equals(SocketException.class)
|| QUIET_EXCEPTIONS.contains(t.getClass().getSimpleName())) {
return Level.FINE;
}
return Level.INFO;
}
private void notifyTerminated(Throwable t) {
if (t != null) {
connectionLog.log(getLogLevel(t), "Transport failed", t);
}
if (!terminated) {
terminated = true;
listener.transportTerminated();
}
}
@Override
public ListenableFuture<SocketStats> getStats() {
final SettableFuture<SocketStats> result = SettableFuture.create();
if (channel.eventLoop().inEventLoop()) {
// This is necessary, otherwise we will block forever if we get the future from inside
// the event loop.
result.set(getStatsHelper(channel));
return result;
}
channel.eventLoop().submit(
new Runnable() {
@Override
public void run() {
result.set(getStatsHelper(channel));
}
})
.addListener(
new GenericFutureListener<Future<Object>>() {
@Override
public void operationComplete(Future<Object> future) throws Exception {
if (!future.isSuccess()) {
result.setException(future.cause());
}
}
});
return result;
}
private SocketStats getStatsHelper(Channel ch) {
Preconditions.checkState(ch.eventLoop().inEventLoop());
return new SocketStats(
transportTracer.getStats(),
channel.localAddress(),
channel.remoteAddress(),
Utils.getSocketOptions(ch),
grpcHandler == null ? null : grpcHandler.getSecurityInfo());
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("logId", logId.getId())
.add("channel", channel)
.toString();
}
/**
* Creates the Netty handler to be used in the channel pipeline.
*/
private NettyServerHandler createHandler(
ServerTransportListener transportListener, ChannelPromise channelUnused) {
return NettyServerHandler.newHandler(
transportListener,
channelUnused,
streamTracerFactories,
transportTracer,
maxStreams,
autoFlowControl,
flowControlWindow,
maxHeaderListSize,
softLimitHeaderListSize,
maxMessageSize,
keepAliveTimeInNanos,
keepAliveTimeoutInNanos,
maxConnectionIdleInNanos,
maxConnectionAgeInNanos,
maxConnectionAgeGraceInNanos,
permitKeepAliveWithoutCalls,
permitKeepAliveTimeInNanos,
maxRstCount,
maxRstPeriodNanos,
eagAttributes);
}
}
|
TerminationNotifier
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/wiring/BeanWiringInfoTests.java
|
{
"start": 947,
"end": 2425
}
|
class ____ {
@Test
void ctorWithNullBeanName() {
assertThatIllegalArgumentException().isThrownBy(() ->
new BeanWiringInfo(null));
}
@Test
void ctorWithWhitespacedBeanName() {
assertThatIllegalArgumentException().isThrownBy(() ->
new BeanWiringInfo(" \t"));
}
@Test
void ctorWithEmptyBeanName() {
assertThatIllegalArgumentException().isThrownBy(() ->
new BeanWiringInfo(""));
}
@Test
void ctorWithNegativeIllegalAutowiringValue() {
assertThatIllegalArgumentException().isThrownBy(() ->
new BeanWiringInfo(-1, true));
}
@Test
void ctorWithPositiveOutOfRangeAutowiringValue() {
assertThatIllegalArgumentException().isThrownBy(() ->
new BeanWiringInfo(123871, true));
}
@Test
void usingAutowireCtorIndicatesAutowiring() {
BeanWiringInfo info = new BeanWiringInfo(BeanWiringInfo.AUTOWIRE_BY_NAME, true);
assertThat(info.indicatesAutowiring()).isTrue();
}
@Test
void usingBeanNameCtorDoesNotIndicateAutowiring() {
BeanWiringInfo info = new BeanWiringInfo("fooService");
assertThat(info.indicatesAutowiring()).isFalse();
}
@Test
void noDependencyCheckValueIsPreserved() {
BeanWiringInfo info = new BeanWiringInfo(BeanWiringInfo.AUTOWIRE_BY_NAME, true);
assertThat(info.getDependencyCheck()).isTrue();
}
@Test
void dependencyCheckValueIsPreserved() {
BeanWiringInfo info = new BeanWiringInfo(BeanWiringInfo.AUTOWIRE_BY_TYPE, false);
assertThat(info.getDependencyCheck()).isFalse();
}
}
|
BeanWiringInfoTests
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/JsonValueSerializationTest.java
|
{
"start": 1692,
"end": 2059
}
|
class ____
extends ValueClass<String>
{
public ToStringValueClass2(String value) { super(value); }
// Simple as well, but let's ensure that other getters won't matter...
@JsonProperty int getFoobar() { return 4; }
public String[] getSomethingElse() { return new String[] { "1", "a" }; }
}
static
|
ToStringValueClass2
|
java
|
apache__camel
|
components/camel-mina/src/test/java/org/apache/camel/component/mina/MinaVMTextlineProtocolTest.java
|
{
"start": 987,
"end": 1874
}
|
class ____ extends BaseMinaTest {
@Test
public void testMinaRoute() throws Exception {
MockEndpoint endpoint = getMockEndpoint("mock:result");
Object body = "Hello there!";
endpoint.expectedBodiesReceived(body);
template.sendBodyAndHeader(String.format("mina:vm://localhost:%1$s?textline=true&sync=false", getPort()), body,
"cheese", 123);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
fromF("mina:vm://localhost:%1$s?textline=true&sync=false", getPort())
.to("log:before?showAll=true")
.to("mock:result")
.to("log:after?showAll=true");
}
};
}
}
|
MinaVMTextlineProtocolTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/SlotOwner.java
|
{
"start": 951,
"end": 1136
}
|
interface ____ {
/**
* Return the given slot to the slot owner.
*
* @param logicalSlot to return
*/
void returnLogicalSlot(LogicalSlot logicalSlot);
}
|
SlotOwner
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/state/TestCheckpointStorageWorkerView.java
|
{
"start": 1070,
"end": 2398
}
|
class ____ implements CheckpointStorageWorkerView {
private final int maxStateSize;
private final MemCheckpointStreamFactory taskOwnedCheckpointStreamFactory;
private final CheckpointedStateScope taskOwnedStateScope;
public TestCheckpointStorageWorkerView(int maxStateSize) {
this(maxStateSize, CheckpointedStateScope.EXCLUSIVE);
}
private TestCheckpointStorageWorkerView(
int maxStateSize, CheckpointedStateScope taskOwnedStateScope) {
this.maxStateSize = maxStateSize;
this.taskOwnedCheckpointStreamFactory = new MemCheckpointStreamFactory(maxStateSize);
this.taskOwnedStateScope = taskOwnedStateScope;
}
@Override
public CheckpointStreamFactory resolveCheckpointStorageLocation(
long checkpointId, CheckpointStorageLocationReference reference) {
return new MemCheckpointStreamFactory(maxStateSize);
}
@Override
public CheckpointStateOutputStream createTaskOwnedStateStream() throws IOException {
return taskOwnedCheckpointStreamFactory.createCheckpointStateOutputStream(
taskOwnedStateScope);
}
@Override
public CheckpointStateToolset createTaskOwnedCheckpointStateToolset() {
return new NotDuplicatingCheckpointStateToolset();
}
}
|
TestCheckpointStorageWorkerView
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/customexceptions/UnwrappedExceptionTest.java
|
{
"start": 11272,
"end": 11499
}
|
class ____ extends IntermediateMappedException {
public UnwrapIfNoExactMatchDirectlyMappedException(Throwable cause) {
super(cause);
}
}
public static
|
UnwrapIfNoExactMatchDirectlyMappedException
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/cache/impl/PerpetualCache.java
|
{
"start": 864,
"end": 2024
}
|
class ____ implements Cache {
private final String id;
private final Map<Object, Object> cache = new HashMap<>();
public PerpetualCache(String id) {
this.id = id;
}
@Override
public String getId() {
return id;
}
@Override
public int getSize() {
return cache.size();
}
@Override
public void putObject(Object key, Object value) {
cache.put(key, value);
}
@Override
public Object getObject(Object key) {
return cache.get(key);
}
@Override
public Object removeObject(Object key) {
return cache.remove(key);
}
@Override
public void clear() {
cache.clear();
}
@Override
public boolean equals(Object o) {
if (getId() == null) {
throw new CacheException("Cache instances require an ID.");
}
if (this == o) {
return true;
}
if (!(o instanceof Cache)) {
return false;
}
Cache otherCache = (Cache) o;
return getId().equals(otherCache.getId());
}
@Override
public int hashCode() {
if (getId() == null) {
throw new CacheException("Cache instances require an ID.");
}
return getId().hashCode();
}
}
|
PerpetualCache
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/util/FindIdentifiersTest.java
|
{
"start": 20696,
"end": 21477
}
|
class ____ {
void doIt() {
for (int i = 0; i < 10; i++) {
// BUG: Diagnostic contains: [i]
String.format(Integer.toString(i));
}
for (int j : new int[] {0, 1, 2}) {
// BUG: Diagnostic contains: [j]
String.format(Integer.toString(j));
}
}
}
""")
.doTest();
}
@Test
public void findAllIdentsTryWithResources() {
CompilationTestHelper.newInstance(PrintIdents.class, getClass())
.addSourceLines(
"Test.java",
"""
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.*;
|
Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java
|
{
"start": 2337,
"end": 10597
}
|
class ____ extends PhysicalOptimizerRules.ParameterizedOptimizerRule<FilterExec, LocalPhysicalOptimizerContext> {
@Override
protected PhysicalPlan rule(FilterExec filterExec, LocalPhysicalOptimizerContext ctx) {
PhysicalPlan plan = filterExec;
if (filterExec.child() instanceof EsQueryExec queryExec) {
plan = planFilterExec(filterExec, queryExec, ctx);
} else if (filterExec.child() instanceof EvalExec evalExec && evalExec.child() instanceof EsQueryExec queryExec) {
plan = planFilterExec(filterExec, evalExec, queryExec, ctx);
}
return plan;
}
private static PhysicalPlan planFilterExec(FilterExec filterExec, EsQueryExec queryExec, LocalPhysicalOptimizerContext ctx) {
LucenePushdownPredicates pushdownPredicates = LucenePushdownPredicates.from(ctx.searchStats(), ctx.flags());
List<Expression> pushable = new ArrayList<>();
List<Expression> nonPushable = new ArrayList<>();
for (Expression exp : splitAnd(filterExec.condition())) {
switch (translatable(exp, pushdownPredicates).finish()) {
case NO -> nonPushable.add(exp);
case YES -> pushable.add(exp);
case RECHECK -> {
pushable.add(exp);
nonPushable.add(exp);
}
}
}
return rewrite(pushdownPredicates, filterExec, queryExec, pushable, nonPushable, List.of());
}
private static PhysicalPlan planFilterExec(
FilterExec filterExec,
EvalExec evalExec,
EsQueryExec queryExec,
LocalPhysicalOptimizerContext ctx
) {
LucenePushdownPredicates pushdownPredicates = LucenePushdownPredicates.from(ctx.searchStats(), ctx.flags());
AttributeMap<Attribute> aliasReplacedBy = getAliasReplacedBy(evalExec);
List<Expression> pushable = new ArrayList<>();
List<Expression> nonPushable = new ArrayList<>();
for (Expression exp : splitAnd(filterExec.condition())) {
Expression resExp = exp.transformUp(ReferenceAttribute.class, r -> aliasReplacedBy.resolve(r, r));
switch (translatable(resExp, pushdownPredicates).finish()) {
case NO -> nonPushable.add(exp);
case YES -> pushable.add(exp);
case RECHECK -> {
nonPushable.add(exp);
nonPushable.add(exp);
}
}
}
// Replace field references with their actual field attributes
pushable.replaceAll(e -> e.transformDown(ReferenceAttribute.class, r -> aliasReplacedBy.resolve(r, r)));
return rewrite(pushdownPredicates, filterExec, queryExec, pushable, nonPushable, evalExec.fields());
}
static AttributeMap<Attribute> getAliasReplacedBy(EvalExec evalExec) {
AttributeMap.Builder<Attribute> aliasReplacedByBuilder = AttributeMap.builder();
evalExec.fields().forEach(alias -> {
if (alias.child() instanceof Attribute attr) {
aliasReplacedByBuilder.put(alias.toAttribute(), attr);
}
});
return aliasReplacedByBuilder.build();
}
private static PhysicalPlan rewrite(
LucenePushdownPredicates pushdownPredicates,
FilterExec filterExec,
EsQueryExec queryExec,
List<Expression> pushable,
List<Expression> nonPushable,
List<Alias> evalFields
) {
// Combine GT, GTE, LT and LTE in pushable to Range if possible
List<Expression> newPushable = combineEligiblePushableToRange(pushable);
if (newPushable.size() > 0) { // update the executable with pushable conditions
Query queryDSL = TRANSLATOR_HANDLER.asQuery(pushdownPredicates, Predicates.combineAnd(newPushable));
QueryBuilder planQuery = queryDSL.toQueryBuilder();
Queries.Clause combiningQueryClauseType = queryExec.hasScoring() ? Queries.Clause.MUST : Queries.Clause.FILTER;
var query = Queries.combine(combiningQueryClauseType, asList(queryExec.query(), planQuery));
queryExec = new EsQueryExec(
queryExec.source(),
queryExec.indexPattern(),
queryExec.indexMode(),
queryExec.output(),
queryExec.limit(),
queryExec.sorts(),
queryExec.estimatedRowSize(),
List.of(new EsQueryExec.QueryBuilderAndTags(query, List.of()))
);
// If the eval contains other aliases, not just field attributes, we need to keep them in the plan
PhysicalPlan plan = evalFields.isEmpty() ? queryExec : new EvalExec(filterExec.source(), queryExec, evalFields);
if (nonPushable.size() > 0) {
// update filter with remaining non-pushable conditions
return new FilterExec(filterExec.source(), plan, Predicates.combineAnd(nonPushable));
} else {
// prune Filter entirely
return plan;
}
} // else: nothing changes
return filterExec;
}
private static List<Expression> combineEligiblePushableToRange(List<Expression> pushable) {
List<EsqlBinaryComparison> bcs = new ArrayList<>();
List<Range> ranges = new ArrayList<>();
List<Expression> others = new ArrayList<>();
boolean changed = false;
pushable.forEach(e -> {
if (e instanceof GreaterThan || e instanceof GreaterThanOrEqual || e instanceof LessThan || e instanceof LessThanOrEqual) {
if (((EsqlBinaryComparison) e).right().foldable()) {
bcs.add((EsqlBinaryComparison) e);
} else {
others.add(e);
}
} else {
others.add(e);
}
});
for (int i = 0, step = 1; i < bcs.size() - 1; i += step, step = 1) {
BinaryComparison main = bcs.get(i);
for (int j = i + 1; j < bcs.size(); j++) {
BinaryComparison other = bcs.get(j);
if (main.left().semanticEquals(other.left())) {
// >/>= AND </<=
if ((main instanceof GreaterThan || main instanceof GreaterThanOrEqual)
&& (other instanceof LessThan || other instanceof LessThanOrEqual)) {
bcs.remove(j);
bcs.remove(i);
ranges.add(
new Range(
main.source(),
main.left(),
main.right(),
main instanceof GreaterThanOrEqual,
other.right(),
other instanceof LessThanOrEqual,
main.zoneId()
)
);
changed = true;
step = 0;
break;
}
// </<= AND >/>=
else if ((other instanceof GreaterThan || other instanceof GreaterThanOrEqual)
&& (main instanceof LessThan || main instanceof LessThanOrEqual)) {
bcs.remove(j);
bcs.remove(i);
ranges.add(
new Range(
main.source(),
main.left(),
other.right(),
other instanceof GreaterThanOrEqual,
main.right(),
main instanceof LessThanOrEqual,
main.zoneId()
)
);
changed = true;
step = 0;
break;
}
}
}
}
return changed ? CollectionUtils.combine(others, bcs, ranges) : pushable;
}
}
|
PushFiltersToSource
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedAllocatedPersistentTask.java
|
{
"start": 619,
"end": 2868
}
|
class ____ extends AllocatedPersistentTask {
private final LicensedFeature.Persistent licensedFeature;
private final String featureContext;
private final XPackLicenseState licenseState;
public LicensedAllocatedPersistentTask(
long id,
String type,
String action,
String description,
TaskId parentTask,
Map<String, String> headers,
LicensedFeature.Persistent feature,
String featureContext,
XPackLicenseState licenseState
) {
super(id, type, action, description, parentTask, headers);
this.licensedFeature = feature;
this.featureContext = featureContext;
this.licenseState = licenseState;
licensedFeature.startTracking(licenseState, featureContext);
}
private void stopTracking() {
licensedFeature.stopTracking(licenseState, featureContext);
}
@Override
protected final boolean markAsCancelled() {
stopTracking();
return doMarkAsCancelled();
}
protected boolean doMarkAsCancelled() {
return super.markAsCancelled();
}
@Override
public final void markAsCompleted() {
stopTracking();
doMarkAsCompleted();
}
protected void doMarkAsCompleted() {
super.markAsCompleted();
}
@Override
public final void markAsFailed(Exception e) {
stopTracking();
doMarkAsFailed(e);
}
protected void doMarkAsFailed(Exception e) {
super.markAsFailed(e);
}
@Override
public final void markAsLocallyAborted(String localAbortReason) {
stopTracking();
doMarkAsLocallyAborted(localAbortReason);
}
protected void doMarkAsLocallyAborted(String localAbortReason) {
super.markAsLocallyAborted(localAbortReason);
}
// this is made public for tests, and final to ensure it is not overridden with something that may throw
@Override
public final void init(
PersistentTasksService persistentTasksService,
TaskManager taskManager,
String persistentTaskId,
long allocationId
) {
super.init(persistentTasksService, taskManager, persistentTaskId, allocationId);
}
}
|
LicensedAllocatedPersistentTask
|
java
|
apache__hadoop
|
hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSDataBlocks.java
|
{
"start": 1799,
"end": 3524
}
|
class ____ {
/**
* Class logger.
*/
private static final Logger LOG = LoggerFactory.getLogger(
OBSDataBlocks.class);
private OBSDataBlocks() {
}
/**
* Validate args to a write command. These are the same validation checks
* expected for any implementation of {@code OutputStream.write()}.
*
* @param b byte array containing data
* @param off offset in array where to start
* @param len number of bytes to be written
* @throws NullPointerException for a null buffer
* @throws IndexOutOfBoundsException if indices are out of range
*/
static void validateWriteArgs(final byte[] b, final int off,
final int len) {
Preconditions.checkNotNull(b);
if (off < 0 || off > b.length || len < 0 || off + len > b.length
|| off + len < 0) {
throw new IndexOutOfBoundsException(
"write (b[" + b.length + "], " + off + ", " + len + ')');
}
}
/**
* Create a factory.
*
* @param owner factory owner
* @param name factory name -the option from {@link OBSConstants}.
* @return the factory, ready to be initialized.
* @throws IllegalArgumentException if the name is unknown.
*/
static BlockFactory createFactory(final OBSFileSystem owner,
final String name) {
switch (name) {
case OBSConstants.FAST_UPLOAD_BUFFER_ARRAY:
return new ByteArrayBlockFactory(owner);
case OBSConstants.FAST_UPLOAD_BUFFER_DISK:
return new DiskBlockFactory(owner);
case OBSConstants.FAST_UPLOAD_BYTEBUFFER:
return new ByteBufferBlockFactory(owner);
default:
throw new IllegalArgumentException(
"Unsupported block buffer" + " \"" + name + '"');
}
}
/**
* Base
|
OBSDataBlocks
|
java
|
quarkusio__quarkus
|
extensions/redis-cache/deployment/src/test/java/io/quarkus/cache/redis/deployment/PojoAndMultipleKeysCacheTest.java
|
{
"start": 2592,
"end": 2932
}
|
class ____ {
@CacheResult(cacheName = "message")
public Uni<Message> getMessage(@CacheKey String key, String notPartOfTheKey, @CacheKey String otherKey) {
return Uni.createFrom().item(new Message(UUID.randomUUID().toString(), ThreadLocalRandom.current().nextInt()));
}
}
public static
|
CachedService
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java
|
{
"start": 19490,
"end": 19922
}
|
class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
if (dialect instanceof DerbyDialect
|| dialect instanceof FirebirdDialect
|| dialect instanceof InformixDialect) {
return false;
}
try {
dialect.appendDatetimeFormat( new StringBuilderSqlAppender(), "" );
return true;
}
catch (Exception ex) {
return false;
}
}
}
public static
|
SupportsDateTimeTruncation
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/querydsl/query/MatchAll.java
|
{
"start": 485,
"end": 758
}
|
class ____ extends LeafQuery {
public MatchAll(Source source) {
super(source);
}
@Override
public QueryBuilder asBuilder() {
return matchAllQuery();
}
@Override
protected String innerToString() {
return "";
}
}
|
MatchAll
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/data/OnRepositoryTypeCondition.java
|
{
"start": 1356,
"end": 2811
}
|
class ____ extends SpringBootCondition {
@Override
public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) {
Map<String, @Nullable Object> attributes = metadata
.getAnnotationAttributes(ConditionalOnRepositoryType.class.getName(), true);
Assert.state(attributes != null, "'attributes' must not be null");
String store = (String) attributes.get("store");
Assert.state(store != null, "'store' must not be null");
RepositoryType configuredType = getTypeProperty(context.getEnvironment(), store);
RepositoryType requiredType = (RepositoryType) attributes.get("type");
Assert.state(requiredType != null, "'requiredType' must not be null");
ConditionMessage.Builder message = ConditionMessage.forCondition(ConditionalOnRepositoryType.class);
if (configuredType == requiredType || configuredType == RepositoryType.AUTO) {
return ConditionOutcome
.match(message.because("configured type of '" + configuredType.name() + "' matched required type"));
}
return ConditionOutcome.noMatch(message.because("configured type (" + configuredType.name()
+ ") did not match required type (" + requiredType.name() + ")"));
}
private RepositoryType getTypeProperty(Environment environment, String store) {
return RepositoryType
.valueOf(environment.getProperty(String.format("spring.data.%s.repositories.type", store), "auto")
.toUpperCase(Locale.ENGLISH));
}
}
|
OnRepositoryTypeCondition
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationRowKey.java
|
{
"start": 4312,
"end": 10529
}
|
class ____
implements KeyConverter<SubApplicationRowKey>,
KeyConverterToString<SubApplicationRowKey> {
private SubApplicationRowKeyConverter() {
}
/**
* sub app row key is of the form
* subAppUserId!clusterId!entityType!entityPrefix!entityId!userId
* w. each segment separated by !.
*
* subAppUserId is usually the doAsUser.
* userId is the yarn user that the AM runs as.
*
* The sizes below indicate sizes of each one of these
* segments in sequence. clusterId, subAppUserId, entityType,
* entityId and userId are strings.
* entity prefix is a long hence 8 bytes in size. Strings are
* variable in size (i.e. end whenever separator is encountered).
* This is used while decoding and helps in determining where to split.
*/
private static final int[] SEGMENT_SIZES = {Separator.VARIABLE_SIZE,
Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG,
Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE};
/*
* (non-Javadoc)
*
* Encodes SubApplicationRowKey object into a byte array with each
* component/field in SubApplicationRowKey separated by
* Separator#QUALIFIERS.
* This leads to an sub app table row key of the form
* subAppUserId!clusterId!entityType!entityPrefix!entityId!userId
*
* subAppUserId is usually the doAsUser.
* userId is the yarn user that the AM runs as.
*
* If entityType in passed SubApplicationRowKey object is null (and the
* fields preceding it are not null i.e. clusterId, subAppUserId), this
* returns a row key prefix of the form subAppUserId!clusterId!
* If entityId in SubApplicationRowKey is null
* (other components are not null), this returns a row key prefix
* of the form subAppUserId!clusterId!entityType!
*
* @see org.apache.hadoop.yarn.server.timelineservice.storage.common
* .KeyConverter#encode(java.lang.Object)
*/
@Override
public byte[] encode(SubApplicationRowKey rowKey) {
byte[] subAppUser = Separator.encode(rowKey.getSubAppUserId(),
Separator.SPACE, Separator.TAB, Separator.QUALIFIERS);
byte[] cluster = Separator.encode(rowKey.getClusterId(), Separator.SPACE,
Separator.TAB, Separator.QUALIFIERS);
byte[] first = Separator.QUALIFIERS.join(subAppUser, cluster);
if (rowKey.getEntityType() == null) {
return first;
}
byte[] entityType = Separator.encode(rowKey.getEntityType(),
Separator.SPACE, Separator.TAB, Separator.QUALIFIERS);
if (rowKey.getEntityIdPrefix() == null) {
return Separator.QUALIFIERS.join(first, entityType,
Separator.EMPTY_BYTES);
}
byte[] entityIdPrefix = Bytes.toBytes(rowKey.getEntityIdPrefix());
if (rowKey.getEntityId() == null) {
return Separator.QUALIFIERS.join(first, entityType, entityIdPrefix,
Separator.EMPTY_BYTES);
}
byte[] entityId = Separator.encode(rowKey.getEntityId(), Separator.SPACE,
Separator.TAB, Separator.QUALIFIERS);
byte[] userId = Separator.encode(rowKey.getUserId(),
Separator.SPACE, Separator.TAB, Separator.QUALIFIERS);
byte[] second = Separator.QUALIFIERS.join(entityType, entityIdPrefix,
entityId, userId);
return Separator.QUALIFIERS.join(first, second);
}
/*
* (non-Javadoc)
*
* Decodes a sub application row key of the form
* subAppUserId!clusterId!entityType!entityPrefix!entityId!userId
*
* subAppUserId is usually the doAsUser.
* userId is the yarn user that the AM runs as.
*
* represented in byte format
* and converts it into an SubApplicationRowKey object.
*
* @see org.apache.hadoop.yarn.server.timelineservice.storage.common
* .KeyConverter#decode(byte[])
*/
@Override
public SubApplicationRowKey decode(byte[] rowKey) {
byte[][] rowKeyComponents =
Separator.QUALIFIERS.split(rowKey, SEGMENT_SIZES);
if (rowKeyComponents.length != 6) {
throw new IllegalArgumentException(
"the row key is not valid for " + "a sub app");
}
String subAppUserId =
Separator.decode(Bytes.toString(rowKeyComponents[0]),
Separator.QUALIFIERS, Separator.TAB, Separator.SPACE);
String clusterId = Separator.decode(Bytes.toString(rowKeyComponents[1]),
Separator.QUALIFIERS, Separator.TAB, Separator.SPACE);
String entityType = Separator.decode(Bytes.toString(rowKeyComponents[2]),
Separator.QUALIFIERS, Separator.TAB, Separator.SPACE);
Long entityPrefixId = Bytes.toLong(rowKeyComponents[3]);
String entityId = Separator.decode(Bytes.toString(rowKeyComponents[4]),
Separator.QUALIFIERS, Separator.TAB, Separator.SPACE);
String userId =
Separator.decode(Bytes.toString(rowKeyComponents[5]),
Separator.QUALIFIERS, Separator.TAB, Separator.SPACE);
return new SubApplicationRowKey(subAppUserId, clusterId, entityType,
entityPrefixId, entityId, userId);
}
@Override
public String encodeAsString(SubApplicationRowKey key) {
if (key.subAppUserId == null || key.clusterId == null
|| key.entityType == null || key.entityIdPrefix == null
|| key.entityId == null || key.userId == null) {
throw new IllegalArgumentException();
}
return TimelineReaderUtils.joinAndEscapeStrings(
new String[] {key.subAppUserId, key.clusterId, key.entityType,
key.entityIdPrefix.toString(), key.entityId, key.userId});
}
@Override
public SubApplicationRowKey decodeFromString(String encodedRowKey) {
List<String> split = TimelineReaderUtils.split(encodedRowKey);
if (split == null || split.size() != 6) {
throw new IllegalArgumentException(
"Invalid row key for sub app table.");
}
Long entityIdPrefix = Long.valueOf(split.get(3));
return new SubApplicationRowKey(split.get(0), split.get(1),
split.get(2), entityIdPrefix, split.get(4), split.get(5));
}
}
}
|
SubApplicationRowKeyConverter
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
|
{
"start": 60594,
"end": 61725
}
|
class ____ implements
FileSystemAccess.FileSystemExecutor<String> {
/**
* Creates a getSnapshottableDirListing executor.
*/
public FSGetSnapshottableDirListing() {
}
/**
* Executes the filesystem operation.
* @param fs filesystem instance to use.
* @return A JSON string of all snapshottable directories.
* @throws IOException thrown if an IO error occurred.
*/
@Override
public String execute(FileSystem fs) throws IOException {
SnapshottableDirectoryStatus[] sds = null;
if (fs instanceof DistributedFileSystem) {
DistributedFileSystem dfs = (DistributedFileSystem) fs;
sds = dfs.getSnapshottableDirListing();
} else {
throw new UnsupportedOperationException("getSnapshottableDirListing is "
+ "not supported for HttpFs on " + fs.getClass()
+ ". Please check your fs.defaultFS configuration");
}
return JsonUtil.toJsonString(sds);
}
}
/**
* Executor that performs a getSnapshotListing operation.
*/
@InterfaceAudience.Private
public static
|
FSGetSnapshottableDirListing
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/IgniteQueueComponentBuilderFactory.java
|
{
"start": 1382,
"end": 1883
}
|
interface ____ {
/**
* Ignite Queues (camel-ignite)
* Interact with Ignite Queue data structures.
*
* Category: cache,clustering,messaging
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ignite
*
* @return the dsl builder
*/
static IgniteQueueComponentBuilder igniteQueue() {
return new IgniteQueueComponentBuilderImpl();
}
/**
* Builder for the Ignite Queues component.
*/
|
IgniteQueueComponentBuilderFactory
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvtVO/一个中文名字的包/User.java
|
{
"start": 49,
"end": 344
}
|
class ____ {
Integer id ;
String name;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
User
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/mixins/MixinsWithBundlesTest.java
|
{
"start": 323,
"end": 577
}
|
class ____ extends DatabindTestUtil
{
@Target(value={ ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD })
@Retention(value=RetentionPolicy.RUNTIME)
@JacksonAnnotationsInside
@JsonProperty("bar")
public @
|
MixinsWithBundlesTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java
|
{
"start": 2787,
"end": 43170
}
|
class ____ extends MlNativeDataFrameAnalyticsIntegTestCase {
private static final String NUMERICAL_FEATURE_FIELD = "feature";
private static final String DISCRETE_NUMERICAL_FEATURE_FIELD = "discrete-feature";
static final String DEPENDENT_VARIABLE_FIELD = "variable";
// It's important that the values here do not work in a way where
// one of the feature is the average of the other features as it may
// result in empty feature importance and we want to assert it gets
// written out correctly.
private static final List<Double> NUMERICAL_FEATURE_VALUES = List.of(5.0, 2.0, 3.0);
private static final List<Long> DISCRETE_NUMERICAL_FEATURE_VALUES = List.of(50L, 20L, 30L);
private static final List<Double> DEPENDENT_VARIABLE_VALUES = List.of(500.0, 200.0, 300.0);
private String jobId;
private String sourceIndex;
private String destIndex;
@After
public void cleanup() {
cleanUp();
}
public void testSingleNumericFeatureAndMixedTrainingAndNonTrainingRows() throws Exception {
initialize("regression_single_numeric_feature_and_mixed_data_set");
String predictedClassField = DEPENDENT_VARIABLE_FIELD + "_prediction";
indexData(sourceIndex, 300, 50);
DataFrameAnalyticsConfig config = buildAnalytics(
jobId,
sourceIndex,
destIndex,
null,
new Regression(
DEPENDENT_VARIABLE_FIELD,
BoostedTreeParams.builder().setNumTopFeatureImportanceValues(1).build(),
null,
null,
null,
null,
null,
null,
null
)
);
putAnalytics(config);
assertIsStopped(jobId);
assertProgressIsZero(jobId);
startAnalytics(jobId);
waitUntilAnalyticsIsStopped(jobId);
// for debugging
List<Map<String, Object>> badDocuments = new ArrayList<>();
assertResponse(prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000), sourceData -> {
int trainingDocsWithEmptyFeatureImportance = 0;
int testDocsWithEmptyFeatureImportance = 0;
for (SearchHit hit : sourceData.getHits()) {
Map<String, Object> destDoc = getDestDoc(config, hit);
Map<String, Object> resultsObject = getMlResultsObjectFromDestDoc(destDoc);
// TODO reenable this assertion when the backend is stable
// it seems for this case values can be as far off as 2.0
// double featureValue = (double) destDoc.get(NUMERICAL_FEATURE_FIELD);
// double predictionValue = (double) resultsObject.get(predictedClassField);
// assertThat(predictionValue, closeTo(10 * featureValue, 2.0));
assertThat(resultsObject, hasKey(predictedClassField));
assertThat(resultsObject, hasEntry("is_training", destDoc.containsKey(DEPENDENT_VARIABLE_FIELD)));
@SuppressWarnings("unchecked")
List<Map<String, Object>> importanceArray = (List<Map<String, Object>>) resultsObject.get("feature_importance");
if (importanceArray.isEmpty()) {
badDocuments.add(destDoc);
if (Boolean.TRUE.equals(resultsObject.get("is_training"))) {
trainingDocsWithEmptyFeatureImportance++;
} else {
testDocsWithEmptyFeatureImportance++;
}
}
assertThat(
importanceArray,
hasItem(
either(Matchers.<String, Object>hasEntry("feature_name", NUMERICAL_FEATURE_FIELD)).or(
hasEntry("feature_name", DISCRETE_NUMERICAL_FEATURE_FIELD)
)
)
);
}
// If feature importance was empty for some of the docs this assertion helps us
// understand whether the offending docs were training or test docs.
assertThat(
"There were ["
+ trainingDocsWithEmptyFeatureImportance
+ "] training docs and ["
+ testDocsWithEmptyFeatureImportance
+ "] test docs with empty feature importance"
+ " from "
+ sourceData.getHits().getTotalHits().value()
+ " hits.\n"
+ badDocuments,
trainingDocsWithEmptyFeatureImportance + testDocsWithEmptyFeatureImportance,
equalTo(0)
);
});
assertProgressComplete(jobId);
assertStoredProgressHits(jobId, 1);
assertModelStatePersisted(stateDocId());
assertExactlyOneInferenceModelPersisted(jobId);
assertMlResultsFieldMappings(destIndex, predictedClassField, "double");
assertThatAuditMessagesMatch(
jobId,
"Created analytics with type [regression]",
"Estimated memory usage [",
"Starting analytics on node",
"Started analytics",
"Creating destination index [" + destIndex + "]",
"Started reindexing to destination index [" + destIndex + "]",
"Finished reindexing to destination index [" + destIndex + "]",
"Started loading data",
"Started analyzing",
"Started writing results",
"Finished analysis"
);
}
public void testWithOnlyTrainingRowsAndTrainingPercentIsHundred() throws Exception {
initialize("regression_only_training_data_and_training_percent_is_100");
String predictedClassField = DEPENDENT_VARIABLE_FIELD + "_prediction";
indexData(sourceIndex, 350, 0);
DataFrameAnalyticsConfig config = buildAnalytics(jobId, sourceIndex, destIndex, null, new Regression(DEPENDENT_VARIABLE_FIELD));
putAnalytics(config);
assertIsStopped(jobId);
assertProgressIsZero(jobId);
startAnalytics(jobId);
waitUntilAnalyticsIsStopped(jobId);
assertResponse(prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000), sourceData -> {
for (SearchHit hit : sourceData.getHits()) {
Map<String, Object> resultsObject = getMlResultsObjectFromDestDoc(getDestDoc(config, hit));
assertThat(resultsObject.containsKey(predictedClassField), is(true));
assertThat(resultsObject.containsKey("is_training"), is(true));
assertThat(resultsObject.get("is_training"), is(true));
}
});
assertProgressComplete(jobId);
assertStoredProgressHits(jobId, 1);
GetDataFrameAnalyticsStatsAction.Response.Stats stats = getAnalyticsStats(jobId);
assertThat(stats.getDataCounts().getJobId(), equalTo(jobId));
assertThat(stats.getDataCounts().getTrainingDocsCount(), equalTo(350L));
assertThat(stats.getDataCounts().getTestDocsCount(), equalTo(0L));
assertThat(stats.getDataCounts().getSkippedDocsCount(), equalTo(0L));
assertModelStatePersisted(stateDocId());
assertExactlyOneInferenceModelPersisted(jobId);
assertMlResultsFieldMappings(destIndex, predictedClassField, "double");
assertThatAuditMessagesMatch(
jobId,
"Created analytics with type [regression]",
"Estimated memory usage [",
"Starting analytics on node",
"Started analytics",
"Creating destination index [" + destIndex + "]",
"Started reindexing to destination index [" + destIndex + "]",
"Finished reindexing to destination index [" + destIndex + "]",
"Started loading data",
"Started analyzing",
"Started writing results",
"Finished analysis"
);
}
public void testWithOnlyTrainingRowsAndTrainingPercentIsFifty() throws Exception {
initialize("regression_only_training_data_and_training_percent_is_50");
String predictedClassField = DEPENDENT_VARIABLE_FIELD + "_prediction";
indexData(sourceIndex, 350, 0);
DataFrameAnalyticsConfig config = buildAnalytics(
jobId,
sourceIndex,
destIndex,
null,
new Regression(DEPENDENT_VARIABLE_FIELD, BoostedTreeParams.builder().build(), null, 50.0, null, null, null, null, null)
);
putAnalytics(config);
assertIsStopped(jobId);
assertProgressIsZero(jobId);
startAnalytics(jobId);
waitUntilAnalyticsIsStopped(jobId);
assertResponse(prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000), sourceData -> {
int trainingRowsCount = 0;
int nonTrainingRowsCount = 0;
for (SearchHit hit : sourceData.getHits()) {
Map<String, Object> resultsObject = getMlResultsObjectFromDestDoc(getDestDoc(config, hit));
assertThat(resultsObject.containsKey(predictedClassField), is(true));
assertThat(resultsObject.containsKey("is_training"), is(true));
// Let's just assert there's both training and non-training results
if ((boolean) resultsObject.get("is_training")) {
trainingRowsCount++;
} else {
nonTrainingRowsCount++;
}
}
assertThat(trainingRowsCount, greaterThan(0));
assertThat(nonTrainingRowsCount, greaterThan(0));
});
GetDataFrameAnalyticsStatsAction.Response.Stats stats = getAnalyticsStats(jobId);
assertThat(stats.getDataCounts().getJobId(), equalTo(jobId));
assertThat(stats.getDataCounts().getTrainingDocsCount(), greaterThan(0L));
assertThat(stats.getDataCounts().getTrainingDocsCount(), lessThan(350L));
assertThat(stats.getDataCounts().getTestDocsCount(), greaterThan(0L));
assertThat(stats.getDataCounts().getTestDocsCount(), lessThan(350L));
assertThat(stats.getDataCounts().getSkippedDocsCount(), equalTo(0L));
assertProgressComplete(jobId);
assertStoredProgressHits(jobId, 1);
assertModelStatePersisted(stateDocId());
assertExactlyOneInferenceModelPersisted(jobId);
assertMlResultsFieldMappings(destIndex, predictedClassField, "double");
assertThatAuditMessagesMatch(
jobId,
"Created analytics with type [regression]",
"Estimated memory usage [",
"Starting analytics on node",
"Started analytics",
"Creating destination index [" + destIndex + "]",
"Started reindexing to destination index [" + destIndex + "]",
"Finished reindexing to destination index [" + destIndex + "]",
"Started loading data",
"Started analyzing",
"Started writing results",
"Finished analysis"
);
}
public void testStopAndRestart() throws Exception {
initialize("regression_stop_and_restart");
String predictedClassField = DEPENDENT_VARIABLE_FIELD + "_prediction";
indexData(sourceIndex, 350, 0);
DataFrameAnalyticsConfig config = buildAnalytics(jobId, sourceIndex, destIndex, null, new Regression(DEPENDENT_VARIABLE_FIELD));
putAnalytics(config);
assertIsStopped(jobId);
assertProgressIsZero(jobId);
NodeAcknowledgedResponse response = startAnalytics(jobId);
assertThat(response.getNode(), not(emptyString()));
String phaseToWait = randomFrom("reindexing", "loading_data", "feature_selection", "fine_tuning_parameters");
waitUntilSomeProgressHasBeenMadeForPhase(jobId, phaseToWait);
stopAnalytics(jobId);
waitUntilAnalyticsIsStopped(jobId);
// Now let's start it again
try {
response = startAnalytics(jobId);
assertThat(response.getNode(), not(emptyString()));
} catch (Exception e) {
if (e.getMessage().equals("Cannot start because the job has already finished")) {
// That means the job had managed to complete
} else {
throw e;
}
}
waitUntilAnalyticsIsStopped(jobId);
assertResponse(prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000), sourceData -> {
for (SearchHit hit : sourceData.getHits()) {
Map<String, Object> resultsObject = getMlResultsObjectFromDestDoc(getDestDoc(config, hit));
assertThat(resultsObject.containsKey(predictedClassField), is(true));
assertThat(resultsObject.containsKey("is_training"), is(true));
assertThat(resultsObject.get("is_training"), is(true));
}
});
assertProgressComplete(jobId);
assertStoredProgressHits(jobId, 1);
assertModelStatePersisted(stateDocId());
assertAtLeastOneInferenceModelPersisted(jobId);
assertMlResultsFieldMappings(destIndex, predictedClassField, "double");
}
public void testTwoJobsWithSameRandomizeSeedUseSameTrainingSet() throws Exception {
String sourceIndex = "regression_two_jobs_with_same_randomize_seed_source";
indexData(sourceIndex, 100, 0);
// Force merge to single segment to ensure deterministic _doc sort order during reindexing
// Without this, multiple segments or segment merges can cause non-deterministic document processing order
client().admin().indices().prepareForceMerge(sourceIndex).setMaxNumSegments(1).setFlush(true).get();
String firstJobId = "regression_two_jobs_with_same_randomize_seed_1";
String firstJobDestIndex = firstJobId + "_dest";
BoostedTreeParams boostedTreeParams = BoostedTreeParams.builder()
.setLambda(1.0)
.setGamma(1.0)
.setEta(1.0)
.setFeatureBagFraction(1.0)
.setMaxTrees(1)
.build();
DataFrameAnalyticsConfig firstJob = buildAnalytics(
firstJobId,
sourceIndex,
firstJobDestIndex,
null,
new Regression(DEPENDENT_VARIABLE_FIELD, boostedTreeParams, null, 50.0, null, null, null, null, null)
);
putAnalytics(firstJob);
startAnalytics(firstJobId);
waitUntilAnalyticsIsStopped(firstJobId);
String secondJobId = "regression_two_jobs_with_same_randomize_seed_2";
String secondJobDestIndex = secondJobId + "_dest";
long randomizeSeed = ((Regression) firstJob.getAnalysis()).getRandomizeSeed();
DataFrameAnalyticsConfig secondJob = buildAnalytics(
secondJobId,
sourceIndex,
secondJobDestIndex,
null,
new Regression(DEPENDENT_VARIABLE_FIELD, boostedTreeParams, null, 50.0, randomizeSeed, null, null, null, null)
);
putAnalytics(secondJob);
startAnalytics(secondJobId);
waitUntilAnalyticsIsStopped(secondJobId);
// Now we compare they both used the same training rows
Set<String> firstRunTrainingRowsIds = getTrainingRowsIds(firstJobDestIndex);
Set<String> secondRunTrainingRowsIds = getTrainingRowsIds(secondJobDestIndex);
assertThat(secondRunTrainingRowsIds, equalTo(firstRunTrainingRowsIds));
}
public void testDeleteExpiredData_RemovesUnusedState() throws Exception {
initialize("regression_delete_expired_data");
String predictedClassField = DEPENDENT_VARIABLE_FIELD + "_prediction";
indexData(sourceIndex, 100, 0);
DataFrameAnalyticsConfig config = buildAnalytics(jobId, sourceIndex, destIndex, null, new Regression(DEPENDENT_VARIABLE_FIELD));
putAnalytics(config);
startAnalytics(jobId);
waitUntilAnalyticsIsStopped(jobId);
assertProgressComplete(jobId);
assertStoredProgressHits(jobId, 1);
assertModelStatePersisted(stateDocId());
assertExactlyOneInferenceModelPersisted(jobId);
assertMlResultsFieldMappings(destIndex, predictedClassField, "double");
// Call _delete_expired_data API and check nothing was deleted
assertThat(deleteExpiredData().isDeleted(), is(true));
assertStoredProgressHits(jobId, 1);
assertModelStatePersisted(stateDocId());
// Delete the config straight from the config index
DeleteResponse deleteResponse = client().prepareDelete(".ml-config", DataFrameAnalyticsConfig.documentId(jobId))
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.get();
assertThat(deleteResponse.status(), equalTo(RestStatus.OK));
// Now calling the _delete_expired_data API should remove unused state
assertThat(deleteExpiredData().isDeleted(), is(true));
assertHitCount(prepareSearch(".ml-state*"), 0L);
}
public void testDependentVariableIsLong() throws Exception {
initialize("regression_dependent_variable_is_long");
String predictedClassField = DISCRETE_NUMERICAL_FEATURE_FIELD + "_prediction";
indexData(sourceIndex, 100, 0);
DataFrameAnalyticsConfig config = buildAnalytics(
jobId,
sourceIndex,
destIndex,
null,
new Regression(DISCRETE_NUMERICAL_FEATURE_FIELD, BoostedTreeParams.builder().build(), null, null, null, null, null, null, null)
);
putAnalytics(config);
assertIsStopped(jobId);
assertProgressIsZero(jobId);
startAnalytics(jobId);
waitUntilAnalyticsIsStopped(jobId);
assertProgressComplete(jobId);
assertMlResultsFieldMappings(destIndex, predictedClassField, "double");
}
public void testWithDatastream() throws Exception {
initialize("regression_with_datastream");
String predictedClassField = DEPENDENT_VARIABLE_FIELD + "_prediction";
indexData(sourceIndex, 300, 50, true);
DataFrameAnalyticsConfig config = buildAnalytics(
jobId,
sourceIndex,
destIndex,
null,
new Regression(
DEPENDENT_VARIABLE_FIELD,
BoostedTreeParams.builder().setNumTopFeatureImportanceValues(1).build(),
null,
null,
null,
null,
null,
null,
null
)
);
putAnalytics(config);
assertIsStopped(jobId);
assertProgressIsZero(jobId);
startAnalytics(jobId);
waitUntilAnalyticsIsStopped(jobId);
assertResponse(prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000), sourceData -> {
for (SearchHit hit : sourceData.getHits()) {
Map<String, Object> destDoc = getDestDoc(config, hit);
Map<String, Object> resultsObject = getMlResultsObjectFromDestDoc(destDoc);
assertThat(resultsObject, hasKey(predictedClassField));
assertThat(resultsObject, hasEntry("is_training", destDoc.containsKey(DEPENDENT_VARIABLE_FIELD)));
@SuppressWarnings("unchecked")
List<Map<String, Object>> importanceArray = (List<Map<String, Object>>) resultsObject.get("feature_importance");
assertThat(
importanceArray,
hasItem(
either(Matchers.<String, Object>hasEntry("feature_name", NUMERICAL_FEATURE_FIELD)).or(
hasEntry("feature_name", DISCRETE_NUMERICAL_FEATURE_FIELD)
)
)
);
}
});
assertProgressComplete(jobId);
assertStoredProgressHits(jobId, 1);
assertModelStatePersisted(stateDocId());
assertExactlyOneInferenceModelPersisted(jobId);
assertMlResultsFieldMappings(destIndex, predictedClassField, "double");
assertThatAuditMessagesMatch(
jobId,
"Created analytics with type [regression]",
"Estimated memory usage [",
"Starting analytics on node",
"Started analytics",
"Creating destination index [" + destIndex + "]",
"Started reindexing to destination index [" + destIndex + "]",
"Finished reindexing to destination index [" + destIndex + "]",
"Started loading data",
"Started analyzing",
"Started writing results",
"Finished analysis"
);
}
public void testAliasFields() throws Exception {
// The goal of this test is to assert alias fields are included in the analytics job.
// We have a simple dataset with two integer fields: field_1 and field_2.
// field_2 is double the value of field_1.
// We also add an alias to field_1 and we exclude field_1 from the analysis forcing
// field_1_alias to be the feature and field_2 to be the dependent variable.
// Then we proceed to check the predictions are roughly double the feature value.
// If alias fields are not being extracted properly the predictions will be wrong.
initialize("regression_alias_fields");
String predictionField = "field_2_prediction";
String mapping = """
{
"properties": {
"field_1": {
"type": "integer"
},
"field_2": {
"type": "integer"
},
"field_1_alias": {
"type": "alias",
"path": "field_1"
}
}
}""";
client().admin().indices().prepareCreate(sourceIndex).setMapping(mapping).get();
int totalDocCount = 300;
BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int i = 0; i < totalDocCount; i++) {
List<Object> source = List.of("field_1", i, "field_2", 2 * i);
IndexRequest indexRequest = new IndexRequest(sourceIndex).source(source.toArray()).opType(DocWriteRequest.OpType.CREATE);
bulkRequestBuilder.add(indexRequest);
}
BulkResponse bulkResponse = bulkRequestBuilder.get();
if (bulkResponse.hasFailures()) {
fail("Failed to index data: " + bulkResponse.buildFailureMessage());
}
// Very infrequently this test may fail as the algorithm underestimates the
// required number of trees for this simple problem. This failure is irrelevant
// for non-trivial real-world problem and improving estimation of the number of trees
// would introduce unnecessary overhead. Hence, to reduce the noise from this test we fix the seed
// and use the hyperparameters that are known to work.
long seed = 1000L; // fix seed
Regression regression = new Regression(
"field_2",
BoostedTreeParams.builder()
.setDownsampleFactor(0.7520841625652861)
.setAlpha(547.9095715556235)
.setLambda(3.3008189603590044)
.setGamma(1.6082763366825203)
.setSoftTreeDepthLimit(4.733224114945455)
.setSoftTreeDepthTolerance(0.15)
.setEta(0.12371209659057758)
.setEtaGrowthRatePerTree(1.0618560482952888)
.setMaxTrees(30)
.setFeatureBagFraction(0.8)
.build(),
null,
90.0,
seed,
null,
null,
null,
null
);
DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder().setId(jobId)
.setSource(new DataFrameAnalyticsSource(new String[] { sourceIndex }, null, null, Collections.emptyMap()))
.setDest(new DataFrameAnalyticsDest(destIndex, null))
.setAnalysis(regression)
.setAnalyzedFields(FetchSourceContext.of(true, null, new String[] { "field_1" }))
.build();
putAnalytics(config);
assertIsStopped(jobId);
assertProgressIsZero(jobId);
startAnalytics(jobId);
waitUntilAnalyticsIsStopped(jobId);
assertResponse(prepareSearch(sourceIndex).setSize(totalDocCount), sourceData -> {
double predictionErrorSum = 0.0;
for (SearchHit hit : sourceData.getHits()) {
Map<String, Object> destDoc = getDestDoc(config, hit);
Map<String, Object> resultsObject = getMlResultsObjectFromDestDoc(destDoc);
assertThat(resultsObject.containsKey(predictionField), is(true));
assertThat(resultsObject.containsKey("is_training"), is(true));
int featureValue = ((Number) destDoc.get("field_1")).intValue();
double predictionValue = ((Number) resultsObject.get(predictionField)).doubleValue();
predictionErrorSum += Math.abs(predictionValue - 2 * featureValue);
}
// We assert on the mean prediction error in order to reduce the probability
// the test fails compared to asserting on the prediction of each individual doc.
double meanPredictionError = predictionErrorSum / sourceData.getHits().getHits().length;
String str = "Failure: failed for seed %d inferenceEntityId %s numberTrees %d\n";
assertThat(meanPredictionError, lessThanOrEqualTo(3.0));
});
assertProgressComplete(jobId);
assertStoredProgressHits(jobId, 1);
assertModelStatePersisted(stateDocId());
assertExactlyOneInferenceModelPersisted(jobId);
assertMlResultsFieldMappings(destIndex, predictionField, "double");
assertThatAuditMessagesMatch(
jobId,
"Created analytics with type [regression]",
"Estimated memory usage [",
"Starting analytics on node",
"Started analytics",
"Creating destination index [" + destIndex + "]",
"Started reindexing to destination index [" + destIndex + "]",
"Finished reindexing to destination index [" + destIndex + "]",
"Started loading data",
"Started analyzing",
"Started writing results",
"Finished analysis"
);
}
public void testWithCustomFeatureProcessors() throws Exception {
initialize("regression_with_custom_feature_processors");
String predictedClassField = DEPENDENT_VARIABLE_FIELD + "_prediction";
indexData(sourceIndex, 300, 50);
DataFrameAnalyticsConfig config = buildAnalytics(
jobId,
sourceIndex,
destIndex,
null,
new Regression(
DEPENDENT_VARIABLE_FIELD,
BoostedTreeParams.builder().setNumTopFeatureImportanceValues(1).build(),
null,
null,
null,
null,
null,
Arrays.asList(
new OneHotEncoding(
DISCRETE_NUMERICAL_FEATURE_FIELD,
Collections.singletonMap(DISCRETE_NUMERICAL_FEATURE_VALUES.get(0).toString(), "tenner"),
true
)
),
null
)
);
putAnalytics(config);
assertIsStopped(jobId);
assertProgressIsZero(jobId);
startAnalytics(jobId);
waitUntilAnalyticsIsStopped(jobId);
// for debugging
assertResponse(prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000), sourceData -> {
for (SearchHit hit : sourceData.getHits()) {
Map<String, Object> destDoc = getDestDoc(config, hit);
Map<String, Object> resultsObject = getMlResultsObjectFromDestDoc(destDoc);
assertThat(resultsObject.containsKey(predictedClassField), is(true));
assertThat(resultsObject.containsKey("is_training"), is(true));
assertThat(resultsObject.get("is_training"), is(destDoc.containsKey(DEPENDENT_VARIABLE_FIELD)));
}
});
assertProgressComplete(jobId);
assertStoredProgressHits(jobId, 1);
assertModelStatePersisted(stateDocId());
assertExactlyOneInferenceModelPersisted(jobId);
assertMlResultsFieldMappings(destIndex, predictedClassField, "double");
assertThatAuditMessagesMatch(
jobId,
"Created analytics with type [regression]",
"Estimated memory usage [",
"Starting analytics on node",
"Started analytics",
"Creating destination index [" + destIndex + "]",
"Started reindexing to destination index [" + destIndex + "]",
"Finished reindexing to destination index [" + destIndex + "]",
"Started loading data",
"Started analyzing",
"Started writing results",
"Finished analysis"
);
GetTrainedModelsAction.Response response = client().execute(
GetTrainedModelsAction.INSTANCE,
new GetTrainedModelsAction.Request(jobId + "*", Collections.emptyList(), Collections.singleton("definition"))
).actionGet();
assertThat(response.getResources().results().size(), equalTo(1));
TrainedModelConfig modelConfig = response.getResources().results().get(0);
modelConfig.ensureParsedDefinition(xContentRegistry());
assertThat(modelConfig.getModelDefinition().getPreProcessors().size(), greaterThan(0));
for (int i = 0; i < modelConfig.getModelDefinition().getPreProcessors().size(); i++) {
PreProcessor preProcessor = modelConfig.getModelDefinition().getPreProcessors().get(i);
assertThat(preProcessor.isCustom(), equalTo(i == 0));
}
}
public void testWithSearchRuntimeMappings() throws Exception {
initialize("regression_with_search_runtime_mappings");
indexData(sourceIndex, 300, 50);
String numericRuntimeField = NUMERICAL_FEATURE_FIELD + "_runtime";
String dependentVariableRuntimeField = DEPENDENT_VARIABLE_FIELD + "_runtime";
String predictedClassField = dependentVariableRuntimeField + "_prediction";
Map<String, Object> numericRuntimeFieldMapping = new HashMap<>();
numericRuntimeFieldMapping.put("type", "double");
numericRuntimeFieldMapping.put("script", "emit(doc['" + NUMERICAL_FEATURE_FIELD + "'].value)");
Map<String, Object> dependentVariableRuntimeFieldMapping = new HashMap<>();
dependentVariableRuntimeFieldMapping.put("type", "double");
dependentVariableRuntimeFieldMapping.put(
"script",
"if (doc['" + DEPENDENT_VARIABLE_FIELD + "'].size() > 0) { emit(doc['" + DEPENDENT_VARIABLE_FIELD + "'].value); }"
);
Map<String, Object> runtimeFields = new HashMap<>();
runtimeFields.put(numericRuntimeField, numericRuntimeFieldMapping);
runtimeFields.put(dependentVariableRuntimeField, dependentVariableRuntimeFieldMapping);
DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder().setId(jobId)
.setSource(new DataFrameAnalyticsSource(new String[] { sourceIndex }, null, null, runtimeFields))
.setDest(new DataFrameAnalyticsDest(destIndex, null))
.setAnalyzedFields(FetchSourceContext.of(true, new String[] { numericRuntimeField, dependentVariableRuntimeField }, null))
.setAnalysis(
new Regression(
dependentVariableRuntimeField,
BoostedTreeParams.builder().setNumTopFeatureImportanceValues(1).build(),
null,
null,
null,
null,
null,
null,
null
)
)
.build();
putAnalytics(config);
assertIsStopped(jobId);
assertProgressIsZero(jobId);
startAnalytics(jobId);
waitUntilAnalyticsIsStopped(jobId);
assertResponse(prepareSearch(destIndex).setTrackTotalHits(true).setSize(1000), destData -> {
for (SearchHit hit : destData.getHits()) {
Map<String, Object> destDoc = hit.getSourceAsMap();
Map<String, Object> resultsObject = getMlResultsObjectFromDestDoc(destDoc);
assertThat(resultsObject.containsKey(predictedClassField), is(true));
assertThat(resultsObject.containsKey("is_training"), is(true));
assertThat(resultsObject.get("is_training"), is(destDoc.containsKey(DEPENDENT_VARIABLE_FIELD)));
@SuppressWarnings("unchecked")
List<Map<String, Object>> importanceArray = (List<Map<String, Object>>) resultsObject.get("feature_importance");
assertThat(importanceArray, hasSize(1));
assertThat(importanceArray.get(0), hasEntry("feature_name", numericRuntimeField));
}
});
assertProgressComplete(jobId);
assertStoredProgressHits(jobId, 1);
assertModelStatePersisted(stateDocId());
assertExactlyOneInferenceModelPersisted(jobId);
assertMlResultsFieldMappings(destIndex, predictedClassField, "double");
assertThatAuditMessagesMatch(
jobId,
"Created analytics with type [regression]",
"Estimated memory usage [",
"Starting analytics on node",
"Started analytics",
"Creating destination index [" + destIndex + "]",
"Started reindexing to destination index [" + destIndex + "]",
"Finished reindexing to destination index [" + destIndex + "]",
"Started loading data",
"Started analyzing",
"Started writing results",
"Finished analysis"
);
}
public void testPreview() throws Exception {
initialize("preview_analytics");
indexData(sourceIndex, 300, 50);
DataFrameAnalyticsConfig config = buildAnalytics(jobId, sourceIndex, destIndex, null, new Regression(DEPENDENT_VARIABLE_FIELD));
putAnalytics(config);
List<Map<String, Object>> preview = previewDataFrame(jobId).getFeatureValues();
for (Map<String, Object> feature : preview) {
assertThat(feature.keySet(), hasItems(NUMERICAL_FEATURE_FIELD, DISCRETE_NUMERICAL_FEATURE_FIELD, DEPENDENT_VARIABLE_FIELD));
}
}
public void testPreviewWithProcessors() throws Exception {
initialize("processed_preview_analytics");
indexData(sourceIndex, 300, 50);
DataFrameAnalyticsConfig config = buildAnalytics(
jobId,
sourceIndex,
destIndex,
null,
new Regression(
DEPENDENT_VARIABLE_FIELD,
BoostedTreeParams.builder().setNumTopFeatureImportanceValues(1).build(),
null,
null,
null,
null,
null,
Arrays.asList(
new OneHotEncoding(
DISCRETE_NUMERICAL_FEATURE_FIELD,
Collections.singletonMap(DISCRETE_NUMERICAL_FEATURE_VALUES.get(0).toString(), "tenner"),
true
)
),
null
)
);
putAnalytics(config);
List<Map<String, Object>> preview = previewDataFrame(jobId).getFeatureValues();
for (Map<String, Object> feature : preview) {
assertThat(feature.keySet(), hasItems(NUMERICAL_FEATURE_FIELD, "tenner", DEPENDENT_VARIABLE_FIELD));
assertThat(feature, not(hasKey(DISCRETE_NUMERICAL_FEATURE_VALUES)));
}
}
private void initialize(String jobId) {
this.jobId = jobId;
this.sourceIndex = jobId + "_source_index";
this.destIndex = sourceIndex + "_results";
}
static void indexData(String sourceIndex, int numTrainingRows, int numNonTrainingRows) {
indexData(sourceIndex, numTrainingRows, numNonTrainingRows, false);
}
static void indexData(String sourceIndex, int numTrainingRows, int numNonTrainingRows, boolean dataStream) {
String mapping = Strings.format("""
{
"properties": {
"@timestamp": {
"type": "date"
},
"%s": {
"type": "double"
},
"%s": {
"type": "unsigned_long"
},
"%s": {
"type": "double"
}
}
}""", NUMERICAL_FEATURE_FIELD, DISCRETE_NUMERICAL_FEATURE_FIELD, DEPENDENT_VARIABLE_FIELD);
if (dataStream) {
try {
createDataStreamAndTemplate(sourceIndex, mapping);
} catch (IOException ex) {
throw new ElasticsearchException(ex);
}
} else {
client().admin().indices().prepareCreate(sourceIndex).setMapping(mapping).get();
}
BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int i = 0; i < numTrainingRows; i++) {
List<Object> source = List.of(
NUMERICAL_FEATURE_FIELD,
NUMERICAL_FEATURE_VALUES.get(i % NUMERICAL_FEATURE_VALUES.size()),
DISCRETE_NUMERICAL_FEATURE_FIELD,
DISCRETE_NUMERICAL_FEATURE_VALUES.get(i % DISCRETE_NUMERICAL_FEATURE_VALUES.size()),
DEPENDENT_VARIABLE_FIELD,
DEPENDENT_VARIABLE_VALUES.get(i % DEPENDENT_VARIABLE_VALUES.size()),
"@timestamp",
Instant.now().toEpochMilli()
);
IndexRequest indexRequest = new IndexRequest(sourceIndex).source(source.toArray()).opType(DocWriteRequest.OpType.CREATE);
bulkRequestBuilder.add(indexRequest);
}
for (int i = numTrainingRows; i < numTrainingRows + numNonTrainingRows; i++) {
List<Object> source = List.of(
NUMERICAL_FEATURE_FIELD,
NUMERICAL_FEATURE_VALUES.get(i % NUMERICAL_FEATURE_VALUES.size()),
DISCRETE_NUMERICAL_FEATURE_FIELD,
DISCRETE_NUMERICAL_FEATURE_VALUES.get(i % DISCRETE_NUMERICAL_FEATURE_VALUES.size()),
"@timestamp",
Instant.now().toEpochMilli()
);
IndexRequest indexRequest = new IndexRequest(sourceIndex).source(source.toArray()).opType(DocWriteRequest.OpType.CREATE);
bulkRequestBuilder.add(indexRequest);
}
BulkResponse bulkResponse = bulkRequestBuilder.get();
if (bulkResponse.hasFailures()) {
fail("Failed to index data: " + bulkResponse.buildFailureMessage());
}
}
private static Map<String, Object> getDestDoc(DataFrameAnalyticsConfig config, SearchHit hit) {
GetResponse destDocGetResponse = client().prepareGet().setIndex(config.getDest().getIndex()).setId(hit.getId()).get();
assertThat(destDocGetResponse.isExists(), is(true));
Map<String, Object> sourceDoc = hit.getSourceAsMap();
Map<String, Object> destDoc = destDocGetResponse.getSource();
for (String field : sourceDoc.keySet()) {
assertThat(destDoc.containsKey(field), is(true));
assertThat(destDoc.get(field), equalTo(sourceDoc.get(field)));
}
return destDoc;
}
private static Map<String, Object> getMlResultsObjectFromDestDoc(Map<String, Object> destDoc) {
return getFieldValue(destDoc, "ml");
}
protected String stateDocId() {
return jobId + "_regression_state#1";
}
@Override
boolean supportsInference() {
return true;
}
}
|
RegressionIT
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/load/model/ResourceUriLoader.java
|
{
"start": 4764,
"end": 5258
}
|
class ____ implements ModelLoaderFactory<Uri, InputStream> {
private final Context context;
InputStreamFactory(Context context) {
this.context = context;
}
@NonNull
@Override
public ModelLoader<Uri, InputStream> build(@NonNull MultiModelLoaderFactory multiFactory) {
return new ResourceUriLoader<>(context, multiFactory.build(Integer.class, InputStream.class));
}
@Override
public void teardown() {}
}
private static final
|
InputStreamFactory
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/core/v2/event/service/ServiceEvent.java
|
{
"start": 2060,
"end": 2503
}
|
class ____ extends ServiceEvent {
private static final long serialVersionUID = -2645441445867337345L;
private final String clientId;
public ServiceSubscribedEvent(Service service, String clientId) {
super(service);
this.clientId = clientId;
}
public String getClientId() {
return clientId;
}
}
}
|
ServiceSubscribedEvent
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/io/DefaultResourceLoader.java
|
{
"start": 3101,
"end": 5880
}
|
class ____ resources with.
* <p>Will get passed to ClassPathResource's constructor for all
* ClassPathResource objects created by this resource loader.
* @see ClassPathResource
*/
@Override
public @Nullable ClassLoader getClassLoader() {
return (this.classLoader != null ? this.classLoader : ClassUtils.getDefaultClassLoader());
}
/**
* Register the given resolver with this resource loader, allowing for
* additional protocols to be handled.
* <p>Any such resolver will be invoked ahead of this loader's standard
* resolution rules. It may therefore also override any default rules.
* @since 4.3
* @see #getProtocolResolvers()
*/
public void addProtocolResolver(ProtocolResolver resolver) {
Assert.notNull(resolver, "ProtocolResolver must not be null");
this.protocolResolvers.add(resolver);
}
/**
* Return the collection of currently registered protocol resolvers,
* allowing for introspection as well as modification.
* @since 4.3
* @see #addProtocolResolver(ProtocolResolver)
*/
public Collection<ProtocolResolver> getProtocolResolvers() {
return this.protocolResolvers;
}
/**
* Obtain a cache for the given value type, keyed by {@link Resource}.
* @param valueType the value type, for example, an ASM {@code MetadataReader}
* @return the cache {@link Map}, shared at the {@code ResourceLoader} level
* @since 5.0
*/
@SuppressWarnings("unchecked")
public <T> Map<Resource, T> getResourceCache(Class<T> valueType) {
return (Map<Resource, T>) this.resourceCaches.computeIfAbsent(valueType, key -> new ConcurrentHashMap<>());
}
/**
* Clear all resource caches in this resource loader.
* @since 5.0
* @see #getResourceCache
*/
public void clearResourceCaches() {
this.resourceCaches.clear();
}
@Override
public Resource getResource(String location) {
Assert.notNull(location, "Location must not be null");
for (ProtocolResolver protocolResolver : getProtocolResolvers()) {
Resource resource = protocolResolver.resolve(location, this);
if (resource != null) {
return resource;
}
}
if (location.startsWith("/")) {
return getResourceByPath(location);
}
else if (location.startsWith(CLASSPATH_URL_PREFIX)) {
return new ClassPathResource(location.substring(CLASSPATH_URL_PREFIX.length()), getClassLoader());
}
else {
try {
// Try to parse the location as a URL...
URL url = ResourceUtils.toURL(location);
return (ResourceUtils.isFileURL(url) ? new FileUrlResource(url) : new UrlResource(url));
}
catch (MalformedURLException ex) {
// No URL -> resolve as resource path.
return getResourceByPath(location);
}
}
}
/**
* Return a Resource handle for the resource at the given path.
* <p>The default implementation supports
|
path
|
java
|
google__guava
|
android/guava/src/com/google/common/io/Closer.java
|
{
"start": 3573,
"end": 9183
}
|
class ____ implements Closeable {
/** Creates a new {@link Closer}. */
public static Closer create() {
return new Closer(SUPPRESSING_SUPPRESSOR);
}
@VisibleForTesting final Suppressor suppressor;
// only need space for 2 elements in most cases, so try to use the smallest array possible
private final Deque<Closeable> stack = new ArrayDeque<>(4);
private @Nullable Throwable thrown;
@VisibleForTesting
Closer(Suppressor suppressor) {
this.suppressor = checkNotNull(suppressor); // checkNotNull to satisfy null tests
}
/**
* Registers the given {@code closeable} to be closed when this {@code Closer} is {@linkplain
* #close closed}.
*
* @return the given {@code closeable}
*/
// close. this word no longer has any meaning to me.
@CanIgnoreReturnValue
@ParametricNullness
public <C extends @Nullable Closeable> C register(@ParametricNullness C closeable) {
if (closeable != null) {
stack.addFirst(closeable);
}
return closeable;
}
/**
* Stores the given throwable and rethrows it. It will be rethrown as is if it is an {@code
* IOException}, {@code RuntimeException} or {@code Error}. Otherwise, it will be rethrown wrapped
* in a {@code RuntimeException}. <b>Note:</b> Be sure to declare all of the checked exception
* types your try block can throw when calling an overload of this method so as to avoid losing
* the original exception type.
*
* <p>This method always throws, and as such should be called as {@code throw closer.rethrow(e);}
* to ensure the compiler knows that it will throw.
*
* @return this method does not return; it always throws
* @throws IOException when the given throwable is an IOException
*/
public RuntimeException rethrow(Throwable e) throws IOException {
checkNotNull(e);
thrown = e;
throwIfInstanceOf(e, IOException.class);
throwIfUnchecked(e);
throw new RuntimeException(e);
}
/**
* Stores the given throwable and rethrows it. It will be rethrown as is if it is an {@code
* IOException}, {@code RuntimeException}, {@code Error} or a checked exception of the given type.
* Otherwise, it will be rethrown wrapped in a {@code RuntimeException}. <b>Note:</b> Be sure to
* declare all of the checked exception types your try block can throw when calling an overload of
* this method so as to avoid losing the original exception type.
*
* <p>This method always throws, and as such should be called as {@code throw closer.rethrow(e,
* ...);} to ensure the compiler knows that it will throw.
*
* @return this method does not return; it always throws
* @throws IOException when the given throwable is an IOException
* @throws X when the given throwable is of the declared type X
*/
public <X extends Exception> RuntimeException rethrow(Throwable e, Class<X> declaredType)
throws IOException, X {
checkNotNull(e);
thrown = e;
throwIfInstanceOf(e, IOException.class);
throwIfInstanceOf(e, declaredType);
throwIfUnchecked(e);
throw new RuntimeException(e);
}
/**
* Stores the given throwable and rethrows it. It will be rethrown as is if it is an {@code
* IOException}, {@code RuntimeException}, {@code Error} or a checked exception of either of the
* given types. Otherwise, it will be rethrown wrapped in a {@code RuntimeException}. <b>Note:</b>
* Be sure to declare all of the checked exception types your try block can throw when calling an
* overload of this method so as to avoid losing the original exception type.
*
* <p>This method always throws, and as such should be called as {@code throw closer.rethrow(e,
* ...);} to ensure the compiler knows that it will throw.
*
* @return this method does not return; it always throws
* @throws IOException when the given throwable is an IOException
* @throws X1 when the given throwable is of the declared type X1
* @throws X2 when the given throwable is of the declared type X2
*/
public <X1 extends Exception, X2 extends Exception> RuntimeException rethrow(
Throwable e, Class<X1> declaredType1, Class<X2> declaredType2) throws IOException, X1, X2 {
checkNotNull(e);
thrown = e;
throwIfInstanceOf(e, IOException.class);
throwIfInstanceOf(e, declaredType1);
throwIfInstanceOf(e, declaredType2);
throwIfUnchecked(e);
throw new RuntimeException(e);
}
/**
* Closes all {@code Closeable} instances that have been added to this {@code Closer}. If an
* exception was thrown in the try block and passed to one of the {@code exceptionThrown} methods,
* any exceptions thrown when attempting to close a closeable will be suppressed. Otherwise, the
* <i>first</i> exception to be thrown from an attempt to close a closeable will be thrown and any
* additional exceptions that are thrown after that will be suppressed.
*/
@Override
public void close() throws IOException {
Throwable throwable = thrown;
// close closeables in LIFO order
while (!stack.isEmpty()) {
Closeable closeable = stack.removeFirst();
try {
closeable.close();
} catch (Throwable e) {
if (throwable == null) {
throwable = e;
} else {
suppressor.suppress(closeable, throwable, e);
}
}
}
if (thrown == null && throwable != null) {
throwIfInstanceOf(throwable, IOException.class);
throwIfUnchecked(throwable);
throw new AssertionError(throwable); // not possible
}
}
/** Suppression strategy interface. */
@VisibleForTesting
|
Closer
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/main/java/io/quarkus/rest/client/reactive/deployment/devservices/DevServicesRestClientHttpProxyProcessor.java
|
{
"start": 10438,
"end": 12232
}
|
class ____ implements Runnable {
private final AtomicReference<Set<RestClientHttpProxyBuildItem>> runningProxiesRef;
private final AtomicReference<Set<Closeable>> providerCloseablesRef;
private final AtomicReference<Set<DevServicesRestClientProxyProvider>> runningProvidersRef;
public CloseTask(AtomicReference<Set<RestClientHttpProxyBuildItem>> runningProxiesRef,
AtomicReference<Set<Closeable>> providerCloseablesRef,
AtomicReference<Set<DevServicesRestClientProxyProvider>> runningProvidersRef) {
this.runningProxiesRef = runningProxiesRef;
this.providerCloseablesRef = providerCloseablesRef;
this.runningProvidersRef = runningProvidersRef;
}
@Override
public void run() {
Set<RestClientHttpProxyBuildItem> restClientHttpProxyBuildItems = runningProxiesRef.get();
for (var bi : restClientHttpProxyBuildItems) {
closeRunningProxy(bi);
}
runningProxiesRef.set(new HashSet<>());
Set<Closeable> providerCloseables = providerCloseablesRef.get();
for (Closeable closeable : providerCloseables) {
try {
if (closeable != null) {
log.debug("Attempting to close provider");
closeable.close();
log.debug("Closed provider");
}
} catch (IOException e) {
throw new UncheckedException(e);
}
}
providerCloseablesRef.set(Collections.newSetFromMap(new IdentityHashMap<>()));
runningProvidersRef.set(Collections.newSetFromMap(new IdentityHashMap<>()));
}
}
}
|
CloseTask
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/web/WebServerNamespaceTests.java
|
{
"start": 878,
"end": 1706
}
|
class ____ {
@Test
void fromWhenValueHasText() {
assertThat(WebServerNamespace.from("management")).isEqualTo(WebServerNamespace.MANAGEMENT);
}
@Test
void fromWhenValueIsNull() {
assertThat(WebServerNamespace.from(null)).isEqualTo(WebServerNamespace.SERVER);
}
@Test
void fromWhenValueIsEmpty() {
assertThat(WebServerNamespace.from("")).isEqualTo(WebServerNamespace.SERVER);
}
@Test
void namespaceWithSameValueAreEqual() {
assertThat(WebServerNamespace.from("value")).isEqualTo(WebServerNamespace.from("value"));
}
@Test
void namespaceWithDifferentValuesAreNotEqual() {
assertThat(WebServerNamespace.from("value")).isNotEqualTo(WebServerNamespace.from("other"));
}
@Test
void toStringReturnsString() {
assertThat(WebServerNamespace.from("value")).hasToString("value");
}
}
|
WebServerNamespaceTests
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/catalog/SqlCatalogViewTable.java
|
{
"start": 1520,
"end": 2304
}
|
class ____ extends ExpandingPreparingTable {
private final CatalogView view;
private final List<String> viewPath;
public SqlCatalogViewTable(
@Nullable RelOptSchema relOptSchema,
RelDataType rowType,
Iterable<String> names,
FlinkStatistic statistic,
CatalogView view,
List<String> viewPath) {
super(relOptSchema, rowType, names, statistic);
this.view = view;
this.viewPath = viewPath;
}
@Override
public RelNode convertToRel(ToRelContext context) {
RelNode original =
context.expandView(rowType, view.getExpandedQuery(), viewPath, names).project();
return RelOptUtil.createCastRel(original, rowType, true);
}
}
|
SqlCatalogViewTable
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
|
{
"start": 37296,
"end": 39000
}
|
class ____ extends ParserRuleContext {
public TerminalNode ROW() { return getToken(EsqlBaseParser.ROW, 0); }
public FieldsContext fields() {
return getRuleContext(FieldsContext.class,0);
}
@SuppressWarnings("this-escape")
public RowCommandContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_rowCommand; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterRowCommand(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitRowCommand(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor<? extends T>)visitor).visitRowCommand(this);
else return visitor.visitChildren(this);
}
}
public final RowCommandContext rowCommand() throws RecognitionException {
RowCommandContext _localctx = new RowCommandContext(_ctx, getState());
enterRule(_localctx, 14, RULE_rowCommand);
try {
enterOuterAlt(_localctx, 1);
{
setState(260);
match(ROW);
setState(261);
fields();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static
|
RowCommandContext
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/RestApiComponentBuilderFactory.java
|
{
"start": 1409,
"end": 1906
}
|
interface ____ {
/**
* REST API (camel-rest)
* Expose OpenAPI Specification of the REST services defined using Camel
* REST DSL.
*
* Category: core,rest
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-rest
*
* @return the dsl builder
*/
static RestApiComponentBuilder restApi() {
return new RestApiComponentBuilderImpl();
}
/**
* Builder for the REST API component.
*/
|
RestApiComponentBuilderFactory
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/TestContextAnnotationUtilsTests.java
|
{
"start": 26192,
"end": 26278
}
|
interface ____ extends InheritedAnnotationInterface {
}
|
SubInheritedAnnotationInterface
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/promql/PlaceholderRelation.java
|
{
"start": 880,
"end": 1900
}
|
class ____ extends LeafPlan {
public static final LogicalPlan INSTANCE = new PlaceholderRelation(Source.EMPTY);
public PlaceholderRelation(Source source) {
super(source);
}
@Override
public boolean expressionsResolved() {
return true;
}
@Override
protected NodeInfo<? extends LogicalPlan> info() {
return NodeInfo.create(this);
}
@Override
public int hashCode() {
return PlaceholderRelation.class.hashCode();
}
@Override
public boolean equals(Object obj) {
return obj instanceof PlaceholderRelation;
}
@Override
public List<Attribute> output() {
return List.of();
}
@Override
public String getWriteableName() {
throw new UnsupportedOperationException("does not support serialization");
}
@Override
public void writeTo(StreamOutput out) throws IOException {
throw new UnsupportedOperationException("does not support serialization");
}
}
|
PlaceholderRelation
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/jsontype/JsonTypeInfoIgnored2968Test.java
|
{
"start": 1757,
"end": 4342
}
|
class ____ {
public SimpleBall value;
}
/*
/**********************************************************
/* Tests
/**********************************************************
*/
@Test
public void testDeserializeParentPositiveWithTypeId() throws Exception {
String json = a2q("{'_class': '_cat', 'name': 'Cat-in-the-hat'} ");
Animal cat = MAPPER.readValue(json, Animal.class);
assertEquals("Cat-in-the-hat", cat.name);
}
@Test
public void testDeserializeParentNegativeWithOutTypeId() throws Exception {
String json = a2q("{'name': 'cat'} ");
try {
MAPPER.readValue(json, Animal.class);
} catch (InvalidTypeIdException e) {
assertTrue(e.getMessage().contains("missing type id property '_class'"));
}
}
@Test
public void testDeserializedAsConcreteTypeSuccessfulWithOutPropertySet() throws Exception {
String json = a2q("{'name': 'cat'} ");
Cat cat = MAPPER.readValue(json, Cat.class);
assertEquals("cat", cat.name);
}
@Test
public void testDeserializationWrapperWithDefaultTyping() throws Exception {
final PolymorphicTypeValidator ptv = BasicPolymorphicTypeValidator.builder()
.allowIfBaseType(SimpleBall.class)
.build();
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(ptv, DefaultTyping.NON_FINAL)
.build();
final String defaultTypedJson = a2q(
"{'value':" +
"['"+getClass().getName()+"$BasketBall'," +
"{'size':42}]}");
BallValueWrapper wrapper = mapper.readValue(defaultTypedJson, BallValueWrapper.class);
assertEquals(42, wrapper.value.size);
assertEquals(BasketBall.class, wrapper.value.getClass());
}
@Test
public void testDeserializationBaseClassWithDefaultTyping() throws Exception {
final PolymorphicTypeValidator ptv = BasicPolymorphicTypeValidator.builder()
.allowIfBaseType(SimpleBall.class)
.build();
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(ptv, DefaultTyping.NON_FINAL)
.build();
final String concreteTypeJson = a2q("{'size': 42}");
try {
mapper.readValue(concreteTypeJson, SimpleBall.class);
} catch (MismatchedInputException | InvalidDefinitionException e) {
verifyException(e, "Unexpected token", "START_OBJECT", "expected", "START_ARRAY");
}
}
}
|
BallValueWrapper
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringCustomProcessorFactoryTest.java
|
{
"start": 1047,
"end": 1328
}
|
class ____ extends CustomProcessorFactoryTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/CustomProcessorFactoryTest.xml");
}
}
|
SpringCustomProcessorFactoryTest
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/filter/factory/SecureHeadersGatewayFilterFactoryUnitTests.java
|
{
"start": 2750,
"end": 11996
}
|
class ____ {
private GatewayFilter filter;
private ServerWebExchange exchange;
private GatewayFilterChain filterChain;
private ArgumentCaptor<ServerWebExchange> captor;
@BeforeEach
public void setUp() {
MockServerHttpRequest request = MockServerHttpRequest.get("http://localhost").build();
exchange = MockServerWebExchange.from(request);
filterChain = mock(GatewayFilterChain.class);
captor = ArgumentCaptor.forClass(ServerWebExchange.class);
when(filterChain.filter(captor.capture())).thenReturn(Mono.empty());
}
@Test
public void addAllHeadersIfNothingIsDisabled() {
SecureHeadersGatewayFilterFactory filterFactory = new SecureHeadersGatewayFilterFactory(
new SecureHeadersProperties());
filter = filterFactory.apply(new Config());
filter.filter(exchange, filterChain).block();
ServerHttpResponse response = exchange.getResponse();
assertThat(response.getHeaders().headerNames()).containsOnly(X_XSS_PROTECTION_HEADER,
STRICT_TRANSPORT_SECURITY_HEADER, X_FRAME_OPTIONS_HEADER, X_CONTENT_TYPE_OPTIONS_HEADER,
REFERRER_POLICY_HEADER, CONTENT_SECURITY_POLICY_HEADER, X_DOWNLOAD_OPTIONS_HEADER,
X_PERMITTED_CROSS_DOMAIN_POLICIES_HEADER);
}
@Test
public void doNotAddDisabledHeaders() {
SecureHeadersProperties properties = new SecureHeadersProperties();
properties.setDisable(asList("x-xss-protection", "strict-transport-security", "x-frame-options",
"x-content-type-options", "referrer-policy", "content-security-policy", "x-download-options",
"x-permitted-cross-domain-policies"));
SecureHeadersGatewayFilterFactory filterFactory = new SecureHeadersGatewayFilterFactory(properties);
filter = filterFactory.apply(new Config());
filter.filter(exchange, filterChain).block();
ServerHttpResponse response = captor.getValue().getResponse();
assertThat(response.getHeaders().headerNames()).doesNotContain(X_XSS_PROTECTION_HEADER,
STRICT_TRANSPORT_SECURITY_HEADER, X_FRAME_OPTIONS_HEADER, X_CONTENT_TYPE_OPTIONS_HEADER,
REFERRER_POLICY_HEADER, CONTENT_SECURITY_POLICY_HEADER, X_DOWNLOAD_OPTIONS_HEADER,
X_PERMITTED_CROSS_DOMAIN_POLICIES_HEADER);
}
@Test
public void overrideSomeHeaders() {
SecureHeadersProperties properties = new SecureHeadersProperties();
SecureHeadersGatewayFilterFactory filterFactory = new SecureHeadersGatewayFilterFactory(
new SecureHeadersProperties());
Config config = new Config();
config.setStrictTransportSecurityHeaderValue("max-age=65535");
config.setReferrerPolicyHeaderValue("referrer");
filter = filterFactory.apply(config);
filter.filter(exchange, filterChain).block();
ServerHttpResponse response = exchange.getResponse();
assertThat(response.getHeaders().headerNames()).contains(X_XSS_PROTECTION_HEADER,
STRICT_TRANSPORT_SECURITY_HEADER, X_FRAME_OPTIONS_HEADER, X_CONTENT_TYPE_OPTIONS_HEADER,
REFERRER_POLICY_HEADER, CONTENT_SECURITY_POLICY_HEADER, X_DOWNLOAD_OPTIONS_HEADER,
X_PERMITTED_CROSS_DOMAIN_POLICIES_HEADER);
assertThat(response.getHeaders().get(STRICT_TRANSPORT_SECURITY_HEADER)).containsOnly("max-age=65535");
assertThat(response.getHeaders().get(REFERRER_POLICY_HEADER)).containsOnly("referrer");
assertThat(response.getHeaders().get(X_XSS_PROTECTION_HEADER))
.containsOnly(properties.getXssProtectionHeader());
assertThat(response.getHeaders().get(X_FRAME_OPTIONS_HEADER)).containsOnly(properties.getFrameOptions());
assertThat(response.getHeaders().get(X_CONTENT_TYPE_OPTIONS_HEADER))
.containsOnly(properties.getContentTypeOptions());
assertThat(response.getHeaders().get(CONTENT_SECURITY_POLICY_HEADER))
.containsOnly(properties.getContentSecurityPolicy());
assertThat(response.getHeaders().get(X_DOWNLOAD_OPTIONS_HEADER)).containsOnly(properties.getDownloadOptions());
assertThat(response.getHeaders().get(X_PERMITTED_CROSS_DOMAIN_POLICIES_HEADER))
.containsOnly(properties.getPermittedCrossDomainPolicies());
}
@Test
public void doesNotDuplicateHeaders() {
String originalHeaderValue = "original-header-value";
SecureHeadersProperties secureHeadersProperties = new SecureHeadersProperties();
secureHeadersProperties.setDisable(Collections.emptyList());
secureHeadersProperties.setEnable(List.of(PERMISSIONS_POLICY_HEADER));
SecureHeadersGatewayFilterFactory filterFactory = new SecureHeadersGatewayFilterFactory(
secureHeadersProperties);
Config config = new Config();
String[] headers = { X_XSS_PROTECTION_HEADER, STRICT_TRANSPORT_SECURITY_HEADER, X_FRAME_OPTIONS_HEADER,
X_CONTENT_TYPE_OPTIONS_HEADER, REFERRER_POLICY_HEADER, CONTENT_SECURITY_POLICY_HEADER,
X_DOWNLOAD_OPTIONS_HEADER, X_PERMITTED_CROSS_DOMAIN_POLICIES_HEADER, PERMISSIONS_POLICY_HEADER };
for (String header : headers) {
filter = filterFactory.apply(config);
MockServerHttpRequest request = MockServerHttpRequest.get("http://localhost").build();
exchange = MockServerWebExchange.from(request);
exchange.getResponse().getHeaders().set(header, originalHeaderValue);
filter.filter(exchange, filterChain).block();
ServerHttpResponse response = captor.getValue().getResponse();
assertThat(response.getHeaders().get(header)).containsOnly(originalHeaderValue);
}
}
@Test
public void toStringFormat() {
GatewayFilter filter = new SecureHeadersGatewayFilterFactory(new SecureHeadersProperties()).apply(new Config());
Assertions.assertThat(filter.toString()).contains("SecureHeaders");
}
@Test
public void doNotAddPermissionsPolicyWhenNotEnabled() {
SecureHeadersProperties properties = new SecureHeadersProperties();
SecureHeadersGatewayFilterFactory filterFactory = new SecureHeadersGatewayFilterFactory(properties);
filter = filterFactory.apply(new Config());
filter.filter(exchange, filterChain).block();
ServerHttpResponse response = captor.getValue().getResponse();
assertThat(response.getHeaders().headerNames()).doesNotContain(PERMISSIONS_POLICY_HEADER);
}
@Test
public void addPermissionsPolicyWhenEnabled() {
SecureHeadersProperties properties = new SecureHeadersProperties();
properties.setEnable(List.of("permissions-policy"));
SecureHeadersGatewayFilterFactory filterFactory = new SecureHeadersGatewayFilterFactory(properties);
filter = filterFactory.apply(new Config());
filter.filter(exchange, filterChain).block();
ServerHttpResponse response = captor.getValue().getResponse();
assertThat(response.getHeaders().headerNames()).contains(X_XSS_PROTECTION_HEADER,
STRICT_TRANSPORT_SECURITY_HEADER, X_FRAME_OPTIONS_HEADER, X_CONTENT_TYPE_OPTIONS_HEADER,
REFERRER_POLICY_HEADER, CONTENT_SECURITY_POLICY_HEADER, X_DOWNLOAD_OPTIONS_HEADER,
X_PERMITTED_CROSS_DOMAIN_POLICIES_HEADER);
assertThat(response.getHeaders().get(PERMISSIONS_POLICY_HEADER))
.containsExactly(SecureHeadersProperties.PERMISSIONS_POLICY_HEADER_OPT_IN_DEFAULT);
}
@Test
public void addPermissionsPolicyAndOverrideDefaults() {
SecureHeadersProperties properties = new SecureHeadersProperties();
properties.setEnable(List.of("permissions-policy"));
properties.setPermissionsPolicy("camera=*");
SecureHeadersGatewayFilterFactory filterFactory = new SecureHeadersGatewayFilterFactory(properties);
filter = filterFactory.apply(new Config());
filter.filter(exchange, filterChain).block();
ServerHttpResponse response = captor.getValue().getResponse();
assertThat(response.getHeaders().get(PERMISSIONS_POLICY_HEADER)).containsExactly("camera=*");
}
@Test
public void applyCompositionWithDisabledHeadersAndPermissionPolicy() {
SecureHeadersProperties properties = new SecureHeadersProperties();
properties.setDisable(asList("x-xss-protection", "strict-transport-security", "x-frame-options",
"x-content-type-options", "referrer-policy", "content-security-policy", "x-download-options"));
properties.setEnable(List.of("permissions-policy"));
SecureHeadersGatewayFilterFactory filterFactory = new SecureHeadersGatewayFilterFactory(properties);
filter = filterFactory.apply(new Config());
filter.filter(exchange, filterChain).block();
ServerHttpResponse response = captor.getValue().getResponse();
assertThat(response.getHeaders().headerNames()).containsOnly(X_PERMITTED_CROSS_DOMAIN_POLICIES_HEADER,
PERMISSIONS_POLICY_HEADER);
}
@Test
public void overrideDefaultInSecurityPropertiesWhenRouteConfigIsProvided() {
SecureHeadersGatewayFilterFactory filterFactory = new SecureHeadersGatewayFilterFactory(
new SecureHeadersProperties());
Config config = new Config();
config.setDisable(Set.of("strict-transport-security"));
config.setEnable(Set.of("permissions-policy"));
config.setPermissionsPolicy("camera=*");
filter = filterFactory.apply(config);
filter.filter(exchange, filterChain).block();
ServerHttpResponse response = exchange.getResponse();
assertThat(response.getHeaders().headerNames()).containsOnly(X_XSS_PROTECTION_HEADER, X_FRAME_OPTIONS_HEADER,
X_CONTENT_TYPE_OPTIONS_HEADER, REFERRER_POLICY_HEADER, CONTENT_SECURITY_POLICY_HEADER,
X_DOWNLOAD_OPTIONS_HEADER, X_PERMITTED_CROSS_DOMAIN_POLICIES_HEADER, PERMISSIONS_POLICY_HEADER);
assertThat(response.getHeaders().get(PERMISSIONS_POLICY_HEADER)).containsExactly("camera=*");
}
}
|
SecureHeadersGatewayFilterFactoryUnitTests
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/InstantSerializer.java
|
{
"start": 3778,
"end": 3969
}
|
class ____
extends SimpleTypeSerializerSnapshot<Instant> {
public InstantSerializerSnapshot() {
super(() -> INSTANCE);
}
}
}
|
InstantSerializerSnapshot
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/event/AnnotationDrivenEventListenerTests.java
|
{
"start": 27208,
"end": 27689
}
|
class ____ extends AbstractTestEventListener {
@EventListener
public void handle(TestEvent event) {
collectEvent(event);
}
@EventListener
public void handleString(String content) {
collectEvent(content);
}
@EventListener({Boolean.class, Integer.class})
public void handleBooleanOrInteger(Serializable content) {
collectEvent(content);
}
}
@EventListener(id = "foo")
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@
|
TestEventListener
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/BindableRuntimeHintsRegistrarTests.java
|
{
"start": 23253,
"end": 24612
}
|
class ____ implements Consumer<TypeHint> {
private final Class<?> type;
private Constructor<?> constructor;
private List<String> expectedMethods = Collections.emptyList();
private List<String> expectedFields = Collections.emptyList();
private JavaBeanBinding(Class<?> type) {
this.type = type;
this.constructor = this.type.getDeclaredConstructors()[0];
}
@Override
public void accept(TypeHint entry) {
assertThat(entry.getType()).isEqualTo(TypeReference.of(this.type));
assertThat(entry.constructors()).singleElement().satisfies(match(this.constructor));
assertThat(entry.getMemberCategories()).isEmpty();
assertThat(entry.methods()).as("Methods requiring reflection")
.extracting(ExecutableHint::getName)
.containsExactlyInAnyOrderElementsOf(this.expectedMethods);
assertThat(entry.fields()).as("Fields requiring reflection")
.extracting(FieldHint::getName)
.containsExactlyInAnyOrderElementsOf(this.expectedFields);
}
private JavaBeanBinding constructor(Constructor<?> constructor) {
this.constructor = constructor;
return this;
}
private JavaBeanBinding methods(String... methods) {
this.expectedMethods = List.of(methods);
return this;
}
private JavaBeanBinding fields(String... fields) {
this.expectedFields = List.of(fields);
return this;
}
}
}
|
JavaBeanBinding
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/state/InternalPriorityQueueTestBase.java
|
{
"start": 2146,
"end": 13583
}
|
class ____ {
protected static final KeyGroupRange KEY_GROUP_RANGE = new KeyGroupRange(0, 2);
protected static final KeyExtractorFunction<TestElement> KEY_EXTRACTOR_FUNCTION =
TestElement::getKey;
protected static final PriorityComparator<TestElement> TEST_ELEMENT_PRIORITY_COMPARATOR =
(left, right) -> Long.compare(left.getPriority(), right.getPriority());
protected static final Comparator<TestElement> TEST_ELEMENT_COMPARATOR =
new TestElementComparator();
protected Comparator<Long> getTestElementPriorityComparator() {
return Long::compareTo;
}
private long getHighestPriorityValueForComparator() {
return getTestElementPriorityComparator().compare(-1L, 1L) > 0
? Long.MAX_VALUE
: Long.MIN_VALUE;
}
protected static void insertRandomElements(
@Nonnull InternalPriorityQueue<TestElement> priorityQueue,
@Nonnull Set<TestElement> checkSet,
int count) {
ThreadLocalRandom localRandom = ThreadLocalRandom.current();
final int numUniqueKeys = Math.max(count / 4, 64);
long duplicatePriority = Long.MIN_VALUE;
final boolean checkEndSizes = priorityQueue.isEmpty();
for (int i = 0; i < count; ++i) {
TestElement element;
do {
long elementPriority;
if (duplicatePriority == Long.MIN_VALUE) {
elementPriority = localRandom.nextLong();
} else {
elementPriority = duplicatePriority;
duplicatePriority = Long.MIN_VALUE;
}
element = new TestElement(localRandom.nextInt(numUniqueKeys), elementPriority);
} while (!checkSet.add(element));
if (localRandom.nextInt(10) == 0) {
duplicatePriority = element.getPriority();
}
final boolean headChangedIndicated = priorityQueue.add(element);
if (element.equals(priorityQueue.peek())) {
assertThat(headChangedIndicated).isTrue();
}
}
if (checkEndSizes) {
assertThat(priorityQueue.size()).isEqualTo(count);
}
}
@Test
void testPeekPollOrder() {
final int initialCapacity = 4;
final int testSize = 1000;
final Comparator<Long> comparator = getTestElementPriorityComparator();
InternalPriorityQueue<TestElement> priorityQueue = newPriorityQueue(initialCapacity);
HashSet<TestElement> checkSet = new HashSet<>(testSize);
insertRandomElements(priorityQueue, checkSet, testSize);
long lastPriorityValue = getHighestPriorityValueForComparator();
int lastSize = priorityQueue.size();
assertThat(lastSize).isEqualTo(testSize);
TestElement testElement;
while ((testElement = priorityQueue.peek()) != null) {
assertThat(priorityQueue.isEmpty()).isFalse();
assertThat(priorityQueue.size()).isEqualTo(lastSize);
assertThat(priorityQueue.poll()).isEqualTo(testElement);
assertThat(checkSet.remove(testElement)).isTrue();
assertThat(testElement.getPriority()).isGreaterThanOrEqualTo(lastPriorityValue);
lastPriorityValue = testElement.getPriority();
--lastSize;
}
assertThat(priorityQueue.isEmpty()).isTrue();
assertThat(priorityQueue.size()).isZero();
assertThat(checkSet).isEmpty();
}
@Test
void testRemoveInsertMixKeepsOrder() {
InternalPriorityQueue<TestElement> priorityQueue = newPriorityQueue(3);
final Comparator<Long> comparator = getTestElementPriorityComparator();
final ThreadLocalRandom random = ThreadLocalRandom.current();
final int testSize = 300;
final int addCounterMax = testSize / 4;
int iterationsTillNextAdds = random.nextInt(addCounterMax);
HashSet<TestElement> checkSet = new HashSet<>(testSize);
insertRandomElements(priorityQueue, checkSet, testSize);
// check that the whole set is still in order
while (!checkSet.isEmpty()) {
final long highestPrioValue = getHighestPriorityValueForComparator();
Iterator<TestElement> iterator = checkSet.iterator();
TestElement element = iterator.next();
iterator.remove();
final boolean removesHead = element.equals(priorityQueue.peek());
if (removesHead) {
assertThat(priorityQueue.remove(element)).isTrue();
} else {
priorityQueue.remove(element);
}
long currentPriorityWatermark;
// test some bulk polling from time to time
if (removesHead) {
currentPriorityWatermark = element.getPriority();
} else {
currentPriorityWatermark = highestPrioValue;
}
while ((element = priorityQueue.poll()) != null) {
assertThat(element.getPriority()).isGreaterThanOrEqualTo(currentPriorityWatermark);
currentPriorityWatermark = element.getPriority();
if (--iterationsTillNextAdds == 0) {
// some random adds
iterationsTillNextAdds = random.nextInt(addCounterMax);
insertRandomElements(
priorityQueue, new HashSet<>(checkSet), 1 + random.nextInt(3));
currentPriorityWatermark = priorityQueue.peek().getPriority();
}
}
assertThat(priorityQueue.isEmpty()).isTrue();
priorityQueue.addAll(checkSet);
}
}
@Test
void testPoll() {
InternalPriorityQueue<TestElement> priorityQueue = newPriorityQueue(3);
final Comparator<Long> comparator = getTestElementPriorityComparator();
assertThat(priorityQueue.poll()).isNull();
final int testSize = 345;
HashSet<TestElement> checkSet = new HashSet<>(testSize);
insertRandomElements(priorityQueue, checkSet, testSize);
long lastPriorityValue = getHighestPriorityValueForComparator();
while (!priorityQueue.isEmpty()) {
TestElement removed = priorityQueue.poll();
assertThat(removed).isNotNull();
assertThat(checkSet.remove(removed)).isTrue();
assertThat(removed.getPriority()).isGreaterThanOrEqualTo(lastPriorityValue);
lastPriorityValue = removed.getPriority();
}
assertThat(checkSet).isEmpty();
assertThat(priorityQueue.poll()).isNull();
}
@Test
void testIsEmpty() {
InternalPriorityQueue<TestElement> priorityQueue = newPriorityQueue(1);
assertThat(priorityQueue.isEmpty()).isTrue();
assertThat(priorityQueue.add(new TestElement(4711L, 42L))).isTrue();
assertThat(priorityQueue.isEmpty()).isFalse();
priorityQueue.poll();
assertThat(priorityQueue.isEmpty()).isTrue();
}
@Test
void testBulkAddRestoredElements() throws Exception {
final int testSize = 10;
HashSet<TestElement> elementSet = new HashSet<>(testSize);
for (int i = 0; i < testSize; ++i) {
elementSet.add(new TestElement(i, i));
}
List<TestElement> twoTimesElementSet = new ArrayList<>(elementSet.size() * 2);
for (TestElement testElement : elementSet) {
twoTimesElementSet.add(testElement.deepCopy());
twoTimesElementSet.add(testElement.deepCopy());
}
InternalPriorityQueue<TestElement> priorityQueue = newPriorityQueue(1);
priorityQueue.addAll(twoTimesElementSet);
priorityQueue.addAll(elementSet);
final int expectedSize =
testSetSemanticsAgainstDuplicateElements()
? elementSet.size()
: 3 * elementSet.size();
assertThat(priorityQueue.size()).isEqualTo(expectedSize);
try (final CloseableIterator<TestElement> iterator = priorityQueue.iterator()) {
while (iterator.hasNext()) {
if (testSetSemanticsAgainstDuplicateElements()) {
assertThat(elementSet.remove(iterator.next())).isTrue();
} else {
assertThat(elementSet).contains(iterator.next());
}
}
}
if (testSetSemanticsAgainstDuplicateElements()) {
assertThat(elementSet).isEmpty();
}
}
@Test
void testIterator() throws Exception {
InternalPriorityQueue<TestElement> priorityQueue = newPriorityQueue(1);
// test empty iterator
try (CloseableIterator<TestElement> iterator = priorityQueue.iterator()) {
assertThat(iterator).isExhausted();
assertThatThrownBy(iterator::next).isInstanceOf(NoSuchElementException.class);
}
// iterate some data
final int testSize = 10;
HashSet<TestElement> checkSet = new HashSet<>(testSize);
insertRandomElements(priorityQueue, checkSet, testSize);
try (CloseableIterator<TestElement> iterator = priorityQueue.iterator()) {
while (iterator.hasNext()) {
assertThat(checkSet.remove(iterator.next())).isTrue();
}
assertThat(checkSet).isEmpty();
}
}
@Test
void testAdd() {
InternalPriorityQueue<TestElement> priorityQueue = newPriorityQueue(1);
final List<TestElement> testElements =
Arrays.asList(new TestElement(4711L, 42L), new TestElement(815L, 23L));
testElements.sort(
(l, r) -> getTestElementPriorityComparator().compare(r.priority, l.priority));
assertThat(priorityQueue.add(testElements.get(0))).isTrue();
if (testSetSemanticsAgainstDuplicateElements()) {
priorityQueue.add(testElements.get(0).deepCopy());
}
assertThat(priorityQueue.size()).isOne();
assertThat(priorityQueue.add(testElements.get(1))).isTrue();
assertThat(priorityQueue.size()).isEqualTo(2);
assertThat(priorityQueue.poll()).isEqualTo(testElements.get(1));
assertThat(priorityQueue.size()).isOne();
assertThat(priorityQueue.poll()).isEqualTo(testElements.get(0));
assertThat(priorityQueue.size()).isZero();
}
@Test
void testRemove() {
InternalPriorityQueue<TestElement> priorityQueue = newPriorityQueue(1);
final long key = 4711L;
final long priorityValue = 42L;
final TestElement testElement = new TestElement(key, priorityValue);
if (testSetSemanticsAgainstDuplicateElements()) {
assertThat(priorityQueue.remove(testElement)).isFalse();
}
assertThat(priorityQueue.add(testElement)).isTrue();
assertThat(priorityQueue.remove(testElement)).isTrue();
if (testSetSemanticsAgainstDuplicateElements()) {
assertThat(priorityQueue.remove(testElement)).isFalse();
}
assertThat(priorityQueue.isEmpty()).isTrue();
}
protected abstract InternalPriorityQueue<TestElement> newPriorityQueue(int initialCapacity);
protected abstract boolean testSetSemanticsAgainstDuplicateElements();
/** Payload for usage in the test. */
protected static
|
InternalPriorityQueueTestBase
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_filteredOn_in_Test.java
|
{
"start": 1099,
"end": 4073
}
|
class ____ extends IterableAssert_filtered_baseTest {
@Test
void should_apply_in_filter() {
assertThat(employees).filteredOn("age", in(800, 26)).containsOnly(yoda, obiwan, luke);
assertThat(employees).filteredOn("age", in(800)).containsOnly(yoda, obiwan);
}
@Test
void should_filter_iterable_under_test_on_property_not_backed_by_a_field_values() {
assertThat(employees).filteredOn("adult", in(false)).containsOnly(noname);
assertThat(employees).filteredOn("adult", in(true)).containsOnly(yoda, obiwan, luke);
}
@Test
void should_filter_iterable_under_test_on_public_field_values() {
assertThat(employees).filteredOn("id", 1L).containsOnly(yoda);
}
@Test
void should_filter_iterable_under_test_on_private_field_values() {
assertThat(employees).filteredOn("city", in("New York")).containsOnly(yoda, obiwan, luke, noname);
assertThat(employees).filteredOn("city", in("Paris")).isEmpty();
}
@Test
void should_fail_if_filter_is_on_private_field_and_reading_private_field_is_disabled() {
setAllowExtractingPrivateFields(false);
try {
assertThatExceptionOfType(IntrospectionError.class).isThrownBy(() -> {
assertThat(employees).filteredOn("city", in("New York")).isEmpty();
});
} finally {
setAllowExtractingPrivateFields(true);
}
}
@Test
void should_filter_stream_under_test_on_property_values() {
assertThat(employees.stream()).filteredOn("age", in(800))
.containsOnly(yoda, obiwan);
}
@Test
void should_filter_iterable_under_test_on_nested_property_values() {
assertThat(employees).filteredOn("name.first", in("Luke")).containsOnly(luke);
}
@Test
void should_filter_iterable_under_test_on_nested_mixed_property_and_field_values() {
assertThat(employees).filteredOn("name.last", in("Vader")).isEmpty();
assertThat(employees).filteredOn("name.last", in("Skywalker")).containsOnly(luke);
}
@Test
void should_fail_if_given_property_or_field_name_is_null() {
assertThatIllegalArgumentException().isThrownBy(() -> assertThat(employees).filteredOn((String) null, in(800)))
.withMessage("The property/field name to filter on should not be null or empty");
}
@Test
void should_fail_if_given_property_or_field_name_is_empty() {
assertThatIllegalArgumentException().isThrownBy(() -> assertThat(employees).filteredOn("", in(800)))
.withMessage("The property/field name to filter on should not be null or empty");
}
@Test
void should_fail_if_on_of_the_iterable_element_does_not_have_given_property_or_field() {
assertThatExceptionOfType(IntrospectionError.class).isThrownBy(() -> assertThat(employees).filteredOn("secret", in("???")))
.withMessageContaining("Can't find any field or property with name 'secret'");
}
}
|
IterableAssert_filteredOn_in_Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/manytoonewithformula/ContractId.java
|
{
"start": 282,
"end": 2494
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 1L;
private Integer companyCode;
private Long contractNumber;
private Integer contractSequenceNumber;
public ContractId() {
}
public ContractId(Integer companyCode, Long contractNumber,
Integer contractSequenceNumber) {
this.companyCode = companyCode;
this.contractNumber = contractNumber;
this.contractSequenceNumber = contractSequenceNumber;
}
@Column(name = "CDT_COMPANY_CODE")
public Integer getCompanyCode() {
return companyCode;
}
public void setCompanyCode(Integer companyCode) {
this.companyCode = companyCode;
}
@Column(name="CDT_NBR")
public Long getContractNumber() {
return contractNumber;
}
public void setContractNumber(Long contractNumber) {
this.contractNumber = contractNumber;
}
@Column(name="CDT_SEQ_NBR")
public Integer getContractSequenceNumber() {
return contractSequenceNumber;
}
public void setContractSequenceNumber(Integer contractSequenceNumber) {
this.contractSequenceNumber = contractSequenceNumber;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((companyCode == null) ? 0 : companyCode.hashCode());
result = prime * result
+ ((contractNumber == null) ? 0 : contractNumber.hashCode());
result = prime
* result
+ ((contractSequenceNumber == null) ? 0
: contractSequenceNumber.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ContractId other = (ContractId) obj;
if (companyCode == null) {
if (other.companyCode != null)
return false;
} else if (!companyCode.equals(other.companyCode))
return false;
if (contractNumber == null) {
if (other.contractNumber != null)
return false;
} else if (!contractNumber.equals(other.contractNumber))
return false;
if (contractSequenceNumber == null) {
if (other.contractSequenceNumber != null)
return false;
} else if (!contractSequenceNumber.equals(other.contractSequenceNumber))
return false;
return true;
}
}
|
ContractId
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/event/jfr/NoOpEventRecorder.java
|
{
"start": 146,
"end": 444
}
|
enum ____ implements EventRecorder, EventRecorder.RecordableEvent {
INSTANCE;
@Override
public void record(Event event) {
}
@Override
public RecordableEvent start(Event event) {
return this;
}
@Override
public void record() {
}
}
|
NoOpEventRecorder
|
java
|
apache__camel
|
components/camel-elasticsearch/src/main/java/org/apache/camel/component/es/ElasticsearchComponent.java
|
{
"start": 1346,
"end": 9376
}
|
class ____ extends DefaultComponent {
@Metadata(label = "advanced", autowired = true)
private RestClient client;
@Metadata
private String hostAddresses;
@Metadata(defaultValue = "" + ElasticsearchConstants.DEFAULT_SOCKET_TIMEOUT)
private int socketTimeout = ElasticsearchConstants.DEFAULT_SOCKET_TIMEOUT;
@Metadata(defaultValue = "" + ElasticsearchConstants.MAX_RETRY_TIMEOUT)
private int maxRetryTimeout = ElasticsearchConstants.MAX_RETRY_TIMEOUT;
@Metadata(defaultValue = "" + ElasticsearchConstants.DEFAULT_CONNECTION_TIMEOUT)
private int connectionTimeout = ElasticsearchConstants.DEFAULT_CONNECTION_TIMEOUT;
@Metadata(defaultValue = "false")
private boolean enableDocumentOnlyMode;
@Metadata(label = "security", secret = true)
private String user;
@Metadata(label = "security", secret = true)
private String password;
@Metadata(label = "security")
private boolean enableSSL;
@Metadata(label = "security")
private String certificatePath;
@Metadata(label = "advanced")
private boolean enableSniffer;
@Metadata(label = "advanced", defaultValue = "" + ElasticsearchConstants.DEFAULT_SNIFFER_INTERVAL)
private int snifferInterval = ElasticsearchConstants.DEFAULT_SNIFFER_INTERVAL;
@Metadata(label = "advanced", defaultValue = "" + ElasticsearchConstants.DEFAULT_AFTER_FAILURE_DELAY)
private int sniffAfterFailureDelay = ElasticsearchConstants.DEFAULT_AFTER_FAILURE_DELAY;
public ElasticsearchComponent() {
this(null);
}
public ElasticsearchComponent(CamelContext context) {
super(context);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
ElasticsearchConfiguration config = new ElasticsearchConfiguration();
config.setHostAddresses(this.getHostAddresses());
config.setSocketTimeout(this.getSocketTimeout());
config.setMaxRetryTimeout(this.getMaxRetryTimeout());
config.setConnectionTimeout(this.getConnectionTimeout());
config.setUser(this.getUser());
config.setEnableSSL(this.isEnableSSL());
config.setPassword(this.getPassword());
config.setCertificatePath(this.getCertificatePath());
config.setEnableSniffer(this.isEnableSniffer());
config.setSnifferInterval(this.getSnifferInterval());
config.setSniffAfterFailureDelay(this.getSniffAfterFailureDelay());
config.setClusterName(remaining);
config.setEnableDocumentOnlyMode(this.isEnableDocumentOnlyMode());
Endpoint endpoint = new ElasticsearchEndpoint(uri, this, config, client);
setProperties(endpoint, parameters);
config.setHostAddressesList(parseHostAddresses(config.getHostAddresses(), config));
return endpoint;
}
private List<HttpHost> parseHostAddresses(String ipsString, ElasticsearchConfiguration config) {
if (ipsString == null || ipsString.isEmpty()) {
return null;
}
List<String> addressesStr = Arrays.asList(ipsString.split(","));
List<HttpHost> addressesTrAd = new ArrayList<>(addressesStr.size());
for (String address : addressesStr) {
String[] split = address.split(":");
String hostname;
if (split.length > 0) {
hostname = split[0];
} else {
throw new IllegalArgumentException();
}
int port = split.length > 1 ? Integer.parseInt(split[1]) : ElasticsearchConstants.DEFAULT_PORT;
addressesTrAd.add(new HttpHost(hostname, port, config.isEnableSSL() ? "HTTPS" : "HTTP"));
}
return addressesTrAd;
}
public RestClient getClient() {
return client;
}
/**
* To use an existing configured Elasticsearch client, instead of creating a client per endpoint. This allows
* customizing the client with specific settings.
*/
public void setClient(RestClient client) {
this.client = client;
}
/**
* Comma separated list with ip:port formatted remote transport addresses to use. The ip and port options must be
* left blank for hostAddresses to be considered instead.
*/
public String getHostAddresses() {
return hostAddresses;
}
public void setHostAddresses(String hostAddresses) {
this.hostAddresses = hostAddresses;
}
/**
* The timeout in ms to wait before the socket will time out.
*/
public int getSocketTimeout() {
return socketTimeout;
}
public void setSocketTimeout(int socketTimeout) {
this.socketTimeout = socketTimeout;
}
/**
* The time in ms to wait before connection will time out.
*/
public int getConnectionTimeout() {
return connectionTimeout;
}
public void setConnectionTimeout(int connectionTimeout) {
this.connectionTimeout = connectionTimeout;
}
/**
* Basic authenticate user
*/
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
/**
* Password for authenticating
*/
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public boolean isEnableSSL() {
return enableSSL;
}
/**
* Enable SSL
*/
public void setEnableSSL(boolean enableSSL) {
this.enableSSL = enableSSL;
}
/**
* The path of the self-signed certificate to use to access to Elasticsearch.
*/
public String getCertificatePath() {
return certificatePath;
}
public void setCertificatePath(String certificatePath) {
this.certificatePath = certificatePath;
}
/**
* Indicates whether the body of the message contains only documents. By default, it is set to false to be able to
* do the same requests as what the Document API supports (see
* https://www.elastic.co/guide/en/elasticsearch/reference/current/docs.html for more details). To ease the
* migration of routes based on the legacy component camel-elasticsearch-rest, you should consider enabling the
* mode, especially if your routes do update operations.
*/
public boolean isEnableDocumentOnlyMode() {
return enableDocumentOnlyMode;
}
public void setEnableDocumentOnlyMode(boolean enableDocumentOnlyMode) {
this.enableDocumentOnlyMode = enableDocumentOnlyMode;
}
/**
* The time in ms before retry
*/
public int getMaxRetryTimeout() {
return maxRetryTimeout;
}
public void setMaxRetryTimeout(int maxRetryTimeout) {
this.maxRetryTimeout = maxRetryTimeout;
}
public boolean isEnableSniffer() {
return enableSniffer;
}
/**
* Enable automatically discover nodes from a running Elasticsearch cluster. If this option is used in conjunction
* with Spring Boot, then it's managed by the Spring Boot configuration (see: Disable Sniffer in Spring Boot).
*/
public void setEnableSniffer(boolean enableSniffer) {
this.enableSniffer = enableSniffer;
}
/**
* The interval between consecutive ordinary sniff executions in milliseconds. Will be honoured when sniffOnFailure
* is disabled or when there are no failures between consecutive sniff executions
*/
public int getSnifferInterval() {
return snifferInterval;
}
public void setSnifferInterval(int snifferInterval) {
this.snifferInterval = snifferInterval;
}
/**
* The delay of a sniff execution scheduled after a failure (in milliseconds)
*/
public int getSniffAfterFailureDelay() {
return sniffAfterFailureDelay;
}
public void setSniffAfterFailureDelay(int sniffAfterFailureDelay) {
this.sniffAfterFailureDelay = sniffAfterFailureDelay;
}
}
|
ElasticsearchComponent
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/record/LegacyRecord.java
|
{
"start": 1696,
"end": 22728
}
|
class ____ {
/**
* The current offset and size for all the fixed-length fields
*/
public static final int CRC_OFFSET = 0;
public static final int CRC_LENGTH = 4;
public static final int MAGIC_OFFSET = CRC_OFFSET + CRC_LENGTH;
public static final int MAGIC_LENGTH = 1;
public static final int ATTRIBUTES_OFFSET = MAGIC_OFFSET + MAGIC_LENGTH;
public static final int ATTRIBUTES_LENGTH = 1;
public static final int TIMESTAMP_OFFSET = ATTRIBUTES_OFFSET + ATTRIBUTES_LENGTH;
public static final int TIMESTAMP_LENGTH = 8;
public static final int KEY_SIZE_OFFSET_V0 = ATTRIBUTES_OFFSET + ATTRIBUTES_LENGTH;
public static final int KEY_SIZE_OFFSET_V1 = TIMESTAMP_OFFSET + TIMESTAMP_LENGTH;
public static final int KEY_SIZE_LENGTH = 4;
public static final int KEY_OFFSET_V0 = KEY_SIZE_OFFSET_V0 + KEY_SIZE_LENGTH;
public static final int KEY_OFFSET_V1 = KEY_SIZE_OFFSET_V1 + KEY_SIZE_LENGTH;
public static final int VALUE_SIZE_LENGTH = 4;
/**
* The size for the record header
*/
public static final int HEADER_SIZE_V0 = CRC_LENGTH + MAGIC_LENGTH + ATTRIBUTES_LENGTH;
public static final int HEADER_SIZE_V1 = CRC_LENGTH + MAGIC_LENGTH + ATTRIBUTES_LENGTH + TIMESTAMP_LENGTH;
/**
* The amount of overhead bytes in a record
*/
public static final int RECORD_OVERHEAD_V0 = HEADER_SIZE_V0 + KEY_SIZE_LENGTH + VALUE_SIZE_LENGTH;
/**
* The amount of overhead bytes in a record
*/
public static final int RECORD_OVERHEAD_V1 = HEADER_SIZE_V1 + KEY_SIZE_LENGTH + VALUE_SIZE_LENGTH;
/**
* Specifies the mask for the compression code. 3 bits to hold the compression codec. 0 is reserved to indicate no
* compression
*/
private static final byte COMPRESSION_CODEC_MASK = 0x07;
/**
* Specify the mask of timestamp type: 0 for CreateTime, 1 for LogAppendTime.
*/
private static final byte TIMESTAMP_TYPE_MASK = 0x08;
/**
* Timestamp value for records without a timestamp
*/
public static final long NO_TIMESTAMP = -1L;
private final ByteBuffer buffer;
private final Long wrapperRecordTimestamp;
private final TimestampType wrapperRecordTimestampType;
public LegacyRecord(ByteBuffer buffer) {
this(buffer, null, null);
}
public LegacyRecord(ByteBuffer buffer, Long wrapperRecordTimestamp, TimestampType wrapperRecordTimestampType) {
this.buffer = buffer;
this.wrapperRecordTimestamp = wrapperRecordTimestamp;
this.wrapperRecordTimestampType = wrapperRecordTimestampType;
}
/**
* Compute the checksum of the record from the record contents
*/
public long computeChecksum() {
return crc32(buffer, MAGIC_OFFSET, buffer.limit() - MAGIC_OFFSET);
}
/**
* Retrieve the previously computed CRC for this record
*/
public long checksum() {
return ByteUtils.readUnsignedInt(buffer, CRC_OFFSET);
}
/**
* Returns true if the crc stored with the record matches the crc computed off the record contents
*/
public boolean isValid() {
return sizeInBytes() >= RECORD_OVERHEAD_V0 && checksum() == computeChecksum();
}
/**
* Throw an CorruptRecordException if isValid is false for this record
*/
public void ensureValid() {
if (sizeInBytes() < RECORD_OVERHEAD_V0)
throw new CorruptRecordException("Record is corrupt (crc could not be retrieved as the record is too "
+ "small, size = " + sizeInBytes() + ")");
if (!isValid())
throw new CorruptRecordException("Record is corrupt (stored crc = " + checksum()
+ ", computed crc = " + computeChecksum() + ")");
}
/**
* The complete serialized size of this record in bytes (including crc, header attributes, etc), but
* excluding the log overhead (offset and record size).
* @return the size in bytes
*/
public int sizeInBytes() {
return buffer.limit();
}
/**
* The length of the key in bytes
* @return the size in bytes of the key (0 if the key is null)
*/
public int keySize() {
if (magic() == RecordBatch.MAGIC_VALUE_V0)
return buffer.getInt(KEY_SIZE_OFFSET_V0);
else
return buffer.getInt(KEY_SIZE_OFFSET_V1);
}
/**
* Does the record have a key?
* @return true if so, false otherwise
*/
public boolean hasKey() {
return keySize() >= 0;
}
/**
* The position where the value size is stored
*/
private int valueSizeOffset() {
if (magic() == RecordBatch.MAGIC_VALUE_V0)
return KEY_OFFSET_V0 + Math.max(0, keySize());
else
return KEY_OFFSET_V1 + Math.max(0, keySize());
}
/**
* The length of the value in bytes
* @return the size in bytes of the value (0 if the value is null)
*/
public int valueSize() {
return buffer.getInt(valueSizeOffset());
}
/**
* Check whether the value field of this record is null.
* @return true if the value is null, false otherwise
*/
public boolean hasNullValue() {
return valueSize() < 0;
}
/**
* The magic value (i.e. message format version) of this record
* @return the magic value
*/
public byte magic() {
return buffer.get(MAGIC_OFFSET);
}
/**
* The attributes stored with this record
* @return the attributes
*/
public byte attributes() {
return buffer.get(ATTRIBUTES_OFFSET);
}
/**
* When magic value is greater than 0, the timestamp of a record is determined in the following way:
* 1. wrapperRecordTimestampType = null and wrapperRecordTimestamp is null - Uncompressed message, timestamp is in the message.
* 2. wrapperRecordTimestampType = LOG_APPEND_TIME and WrapperRecordTimestamp is not null - Compressed message using LOG_APPEND_TIME
* 3. wrapperRecordTimestampType = CREATE_TIME and wrapperRecordTimestamp is not null - Compressed message using CREATE_TIME
*
* @return the timestamp as determined above
*/
public long timestamp() {
if (magic() == RecordBatch.MAGIC_VALUE_V0)
return RecordBatch.NO_TIMESTAMP;
else {
// case 2
if (wrapperRecordTimestampType == TimestampType.LOG_APPEND_TIME && wrapperRecordTimestamp != null)
return wrapperRecordTimestamp;
// Case 1, 3
else
return buffer.getLong(TIMESTAMP_OFFSET);
}
}
/**
* Get the timestamp type of the record.
*
* @return The timestamp type or {@link TimestampType#NO_TIMESTAMP_TYPE} if the magic is 0.
*/
public TimestampType timestampType() {
return timestampType(magic(), wrapperRecordTimestampType, attributes());
}
/**
* The compression type used with this record
*/
public CompressionType compressionType() {
return CompressionType.forId(buffer.get(ATTRIBUTES_OFFSET) & COMPRESSION_CODEC_MASK);
}
/**
* A ByteBuffer containing the value of this record
* @return the value or null if the value for this record is null
*/
public ByteBuffer value() {
return Utils.sizeDelimited(buffer, valueSizeOffset());
}
/**
* A ByteBuffer containing the message key
* @return the buffer or null if the key for this record is null
*/
public ByteBuffer key() {
if (magic() == RecordBatch.MAGIC_VALUE_V0)
return Utils.sizeDelimited(buffer, KEY_SIZE_OFFSET_V0);
else
return Utils.sizeDelimited(buffer, KEY_SIZE_OFFSET_V1);
}
/**
* Get the underlying buffer backing this record instance.
*
* @return the buffer
*/
public ByteBuffer buffer() {
return this.buffer;
}
public String toString() {
if (magic() > 0)
return String.format("Record(magic=%d, attributes=%d, compression=%s, crc=%d, %s=%d, key=%d bytes, value=%d bytes)",
magic(),
attributes(),
compressionType(),
checksum(),
timestampType(),
timestamp(),
key() == null ? 0 : key().limit(),
value() == null ? 0 : value().limit());
else
return String.format("Record(magic=%d, attributes=%d, compression=%s, crc=%d, key=%d bytes, value=%d bytes)",
magic(),
attributes(),
compressionType(),
checksum(),
key() == null ? 0 : key().limit(),
value() == null ? 0 : value().limit());
}
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
if (!other.getClass().equals(LegacyRecord.class))
return false;
LegacyRecord record = (LegacyRecord) other;
return this.buffer.equals(record.buffer);
}
public int hashCode() {
return buffer.hashCode();
}
/**
* Create a new record instance. If the record's compression type is not none, then
* its value payload should be already compressed with the specified type; the constructor
* would always write the value payload as is and will not do the compression itself.
*
* @param magic The magic value to use
* @param timestamp The timestamp of the record
* @param key The key of the record (null, if none)
* @param value The record value
* @param compressionType The compression type used on the contents of the record (if any)
* @param timestampType The timestamp type to be used for this record
*/
public static LegacyRecord create(byte magic,
long timestamp,
byte[] key,
byte[] value,
CompressionType compressionType,
TimestampType timestampType) {
int keySize = key == null ? 0 : key.length;
int valueSize = value == null ? 0 : value.length;
ByteBuffer buffer = ByteBuffer.allocate(recordSize(magic, keySize, valueSize));
write(buffer, magic, timestamp, wrapNullable(key), wrapNullable(value), compressionType, timestampType);
buffer.rewind();
return new LegacyRecord(buffer);
}
public static LegacyRecord create(byte magic, long timestamp, byte[] key, byte[] value) {
return create(magic, timestamp, key, value, CompressionType.NONE, TimestampType.CREATE_TIME);
}
/**
* Write the header for a compressed record set in-place (i.e. assuming the compressed record data has already
* been written at the value offset in a wrapped record). This lets you dynamically create a compressed message
* set, and then go back later and fill in its size and CRC, which saves the need for copying to another buffer.
*
* @param buffer The buffer containing the compressed record data positioned at the first offset of the
* @param magic The magic value of the record set
* @param recordSize The size of the record (including record overhead)
* @param timestamp The timestamp of the wrapper record
* @param compressionType The compression type used
* @param timestampType The timestamp type of the wrapper record
*/
public static void writeCompressedRecordHeader(ByteBuffer buffer,
byte magic,
int recordSize,
long timestamp,
CompressionType compressionType,
TimestampType timestampType) {
int recordPosition = buffer.position();
int valueSize = recordSize - recordOverhead(magic);
// write the record header with a null value (the key is always null for the wrapper)
write(buffer, magic, timestamp, null, null, compressionType, timestampType);
buffer.position(recordPosition);
// now fill in the value size
buffer.putInt(recordPosition + keyOffset(magic), valueSize);
// compute and fill the crc from the beginning of the message
long crc = crc32(buffer, MAGIC_OFFSET, recordSize - MAGIC_OFFSET);
ByteUtils.writeUnsignedInt(buffer, recordPosition + CRC_OFFSET, crc);
}
private static void write(ByteBuffer buffer,
byte magic,
long timestamp,
ByteBuffer key,
ByteBuffer value,
CompressionType compressionType,
TimestampType timestampType) {
try (DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer))) {
write(out, magic, timestamp, key, value, compressionType, timestampType);
} catch (IOException e) {
throw new KafkaException(e);
}
}
/**
* Write the record data with the given compression type and return the computed crc.
*
* @param out The output stream to write to
* @param magic The magic value to be used
* @param timestamp The timestamp of the record
* @param key The record key
* @param value The record value
* @param compressionType The compression type
* @param timestampType The timestamp type
* @return the computed CRC for this record.
* @throws IOException for any IO errors writing to the output stream.
*/
public static long write(DataOutputStream out,
byte magic,
long timestamp,
byte[] key,
byte[] value,
CompressionType compressionType,
TimestampType timestampType) throws IOException {
return write(out, magic, timestamp, wrapNullable(key), wrapNullable(value), compressionType, timestampType);
}
public static long write(DataOutputStream out,
byte magic,
long timestamp,
ByteBuffer key,
ByteBuffer value,
CompressionType compressionType,
TimestampType timestampType) throws IOException {
byte attributes = computeAttributes(magic, compressionType, timestampType);
long crc = computeChecksum(magic, attributes, timestamp, key, value);
write(out, magic, crc, attributes, timestamp, key, value);
return crc;
}
/**
* Write a record using raw fields (without validation). This should only be used in testing.
*/
public static void write(DataOutputStream out,
byte magic,
long crc,
byte attributes,
long timestamp,
byte[] key,
byte[] value) throws IOException {
write(out, magic, crc, attributes, timestamp, wrapNullable(key), wrapNullable(value));
}
// Write a record to the buffer, if the record's compression type is none, then
// its value payload should be already compressed with the specified type
private static void write(DataOutputStream out,
byte magic,
long crc,
byte attributes,
long timestamp,
ByteBuffer key,
ByteBuffer value) throws IOException {
if (magic != RecordBatch.MAGIC_VALUE_V0 && magic != RecordBatch.MAGIC_VALUE_V1)
throw new IllegalArgumentException("Invalid magic value " + magic);
if (timestamp < 0 && timestamp != RecordBatch.NO_TIMESTAMP)
throw new IllegalArgumentException("Invalid message timestamp " + timestamp);
// write crc
out.writeInt((int) (crc & 0xffffffffL));
// write magic value
out.writeByte(magic);
// write attributes
out.writeByte(attributes);
// maybe write timestamp
if (magic > RecordBatch.MAGIC_VALUE_V0)
out.writeLong(timestamp);
// write the key
if (key == null) {
out.writeInt(-1);
} else {
int size = key.remaining();
out.writeInt(size);
Utils.writeTo(out, key, size);
}
// write the value
if (value == null) {
out.writeInt(-1);
} else {
int size = value.remaining();
out.writeInt(size);
Utils.writeTo(out, value, size);
}
}
static int recordSize(byte magic, ByteBuffer key, ByteBuffer value) {
return recordSize(magic, key == null ? 0 : key.limit(), value == null ? 0 : value.limit());
}
public static int recordSize(byte magic, int keySize, int valueSize) {
return recordOverhead(magic) + keySize + valueSize;
}
// visible only for testing
public static byte computeAttributes(byte magic, CompressionType type, TimestampType timestampType) {
byte attributes = 0;
if (type.id > 0)
attributes |= (byte) (COMPRESSION_CODEC_MASK & type.id);
if (magic > RecordBatch.MAGIC_VALUE_V0) {
if (timestampType == TimestampType.NO_TIMESTAMP_TYPE)
throw new IllegalArgumentException("Timestamp type must be provided to compute attributes for " +
"message format v1");
if (timestampType == TimestampType.LOG_APPEND_TIME)
attributes |= TIMESTAMP_TYPE_MASK;
}
return attributes;
}
// visible only for testing
public static long computeChecksum(byte magic, byte attributes, long timestamp, byte[] key, byte[] value) {
return computeChecksum(magic, attributes, timestamp, wrapNullable(key), wrapNullable(value));
}
private static long crc32(ByteBuffer buffer, int offset, int size) {
CRC32 crc = new CRC32();
Checksums.update(crc, buffer, offset, size);
return crc.getValue();
}
/**
* Compute the checksum of the record from the attributes, key and value payloads
*/
private static long computeChecksum(byte magic, byte attributes, long timestamp, ByteBuffer key, ByteBuffer value) {
CRC32 crc = new CRC32();
crc.update(magic);
crc.update(attributes);
if (magic > RecordBatch.MAGIC_VALUE_V0)
Checksums.updateLong(crc, timestamp);
// update for the key
if (key == null) {
Checksums.updateInt(crc, -1);
} else {
int size = key.remaining();
Checksums.updateInt(crc, size);
Checksums.update(crc, key, size);
}
// update for the value
if (value == null) {
Checksums.updateInt(crc, -1);
} else {
int size = value.remaining();
Checksums.updateInt(crc, size);
Checksums.update(crc, value, size);
}
return crc.getValue();
}
static int recordOverhead(byte magic) {
if (magic == 0)
return RECORD_OVERHEAD_V0;
else if (magic == 1)
return RECORD_OVERHEAD_V1;
throw new IllegalArgumentException("Invalid magic used in LegacyRecord: " + magic);
}
static int headerSize(byte magic) {
if (magic == 0)
return HEADER_SIZE_V0;
else if (magic == 1)
return HEADER_SIZE_V1;
throw new IllegalArgumentException("Invalid magic used in LegacyRecord: " + magic);
}
private static int keyOffset(byte magic) {
if (magic == 0)
return KEY_OFFSET_V0;
else if (magic == 1)
return KEY_OFFSET_V1;
throw new IllegalArgumentException("Invalid magic used in LegacyRecord: " + magic);
}
public static TimestampType timestampType(byte magic, TimestampType wrapperRecordTimestampType, byte attributes) {
if (magic == 0)
return TimestampType.NO_TIMESTAMP_TYPE;
else if (wrapperRecordTimestampType != null)
return wrapperRecordTimestampType;
else
return (attributes & TIMESTAMP_TYPE_MASK) == 0 ? TimestampType.CREATE_TIME : TimestampType.LOG_APPEND_TIME;
}
}
|
LegacyRecord
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/view/xml/MappingJackson2XmlViewTests.java
|
{
"start": 8985,
"end": 9350
}
|
class ____ extends JsonSerializer<Object> {
@Override
public void serialize(Object value, JsonGenerator jgen, SerializerProvider provider) throws IOException {
jgen.writeStartObject();
jgen.writeFieldName("testBeanSimple");
jgen.writeString("custom");
jgen.writeEndObject();
}
}
@SuppressWarnings("serial")
public static
|
TestBeanSimpleSerializer
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.