language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
elastic__elasticsearch
|
libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java
|
{
"start": 1248,
"end": 4522
}
|
class ____ extends RandomVectorScorer.AbstractRandomVectorScorer {
final int vectorByteSize;
final MemorySegmentAccessInput input;
final MemorySegment query;
final float scoreCorrectionConstant;
final float queryCorrection;
byte[] scratch;
/** Return an optional whose value, if present, is the scorer. Otherwise, an empty optional is returned. */
public static Optional<RandomVectorScorer> create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) {
checkDimensions(queryVector.length, values.dimension());
var input = values.getSlice();
if (input == null) {
return Optional.empty();
}
input = FilterIndexInput.unwrapOnlyTest(input);
if (input instanceof MemorySegmentAccessInput == false) {
return Optional.empty();
}
MemorySegmentAccessInput msInput = (MemorySegmentAccessInput) input;
checkInvariants(values.size(), values.dimension(), input);
ScalarQuantizer scalarQuantizer = values.getScalarQuantizer();
// TODO assert scalarQuantizer.getBits() == 7 or 8 ?
byte[] quantizedQuery = new byte[queryVector.length];
float queryCorrection = ScalarQuantizedVectorScorer.quantizeQuery(queryVector, quantizedQuery, sim, scalarQuantizer);
return switch (sim) {
case COSINE, DOT_PRODUCT -> Optional.of(new DotProductScorer(msInput, values, quantizedQuery, queryCorrection));
case EUCLIDEAN -> Optional.of(new EuclideanScorer(msInput, values, quantizedQuery, queryCorrection));
case MAXIMUM_INNER_PRODUCT -> Optional.of(new MaxInnerProductScorer(msInput, values, quantizedQuery, queryCorrection));
};
}
Int7SQVectorScorer(MemorySegmentAccessInput input, QuantizedByteVectorValues values, byte[] queryVector, float queryCorrection) {
super(values);
this.input = input;
assert queryVector.length == values.getVectorByteLength();
this.vectorByteSize = values.getVectorByteLength();
this.query = MemorySegment.ofArray(queryVector);
this.queryCorrection = queryCorrection;
this.scoreCorrectionConstant = values.getScalarQuantizer().getConstantMultiplier();
}
final MemorySegment getSegment(int ord) throws IOException {
checkOrdinal(ord);
long byteOffset = (long) ord * (vectorByteSize + Float.BYTES);
MemorySegment seg = input.segmentSliceOrNull(byteOffset, vectorByteSize);
if (seg == null) {
if (scratch == null) {
scratch = new byte[vectorByteSize];
}
input.readBytes(byteOffset, scratch, 0, vectorByteSize);
seg = MemorySegment.ofArray(scratch);
}
return seg;
}
static void checkInvariants(int maxOrd, int vectorByteLength, IndexInput input) {
if (input.length() < (long) vectorByteLength * maxOrd) {
throw new IllegalArgumentException("input length is less than expected vector data");
}
}
final void checkOrdinal(int ord) {
if (ord < 0 || ord >= maxOrd()) {
throw new IllegalArgumentException("illegal ordinal: " + ord);
}
}
public static final
|
Int7SQVectorScorer
|
java
|
spring-projects__spring-security
|
core/src/main/java/org/springframework/security/authentication/CachingUserDetailsService.java
|
{
"start": 1688,
"end": 2272
}
|
class ____ defining a {@link org.springframework.context.annotation.Bean}
* that encapsulates an actual implementation of {@link UserDetailsService} and providing
* a {@link UserCache} implementation.
* </p>
* For example: <pre>
* @Bean
* public CachingUserDetailsService cachingUserDetailsService(UserCache userCache) {
* UserDetailsService delegate = ...;
* CachingUserDetailsService service = new CachingUserDetailsService(delegate);
* service.setUserCache(userCache);
* return service;
* }
* </pre>
*
* @author Luke Taylor
* @since 2.0
*/
public
|
by
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/VarCheckerTest.java
|
{
"start": 1737,
"end": 2125
}
|
class ____ {
// BUG: Diagnostic contains: public void x(@Var int y) {
public void x(int y) {
y++;
}
}
""")
.doTest();
}
@Test
public void negativeParam() {
compilationHelper
.addSourceLines(
"Test.java",
// TODO(b/21633565): force line break
"
|
Test
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/request/ErrorRequestCoordinator.java
|
{
"start": 273,
"end": 5568
}
|
class ____ implements RequestCoordinator, Request {
private final Object requestLock;
@Nullable private final RequestCoordinator parent;
private volatile Request primary;
private volatile Request error;
@GuardedBy("requestLock")
private RequestState primaryState = RequestState.CLEARED;
@GuardedBy("requestLock")
private RequestState errorState = RequestState.CLEARED;
public ErrorRequestCoordinator(Object requestLock, @Nullable RequestCoordinator parent) {
this.requestLock = requestLock;
this.parent = parent;
}
public void setRequests(Request primary, Request error) {
this.primary = primary;
this.error = error;
}
@Override
public void begin() {
synchronized (requestLock) {
if (primaryState != RequestState.RUNNING) {
primaryState = RequestState.RUNNING;
primary.begin();
}
}
}
@Override
public void clear() {
synchronized (requestLock) {
primaryState = RequestState.CLEARED;
primary.clear();
// Don't check primary's failed state here because it will have been reset by the clear call
// immediately before this.
if (errorState != RequestState.CLEARED) {
errorState = RequestState.CLEARED;
error.clear();
}
}
}
@Override
public void pause() {
synchronized (requestLock) {
if (primaryState == RequestState.RUNNING) {
primaryState = RequestState.PAUSED;
primary.pause();
}
if (errorState == RequestState.RUNNING) {
errorState = RequestState.PAUSED;
error.pause();
}
}
}
@Override
public boolean isRunning() {
synchronized (requestLock) {
return primaryState == RequestState.RUNNING || errorState == RequestState.RUNNING;
}
}
@Override
public boolean isComplete() {
synchronized (requestLock) {
return primaryState == RequestState.SUCCESS || errorState == RequestState.SUCCESS;
}
}
@Override
public boolean isCleared() {
synchronized (requestLock) {
return primaryState == RequestState.CLEARED && errorState == RequestState.CLEARED;
}
}
@Override
public boolean isEquivalentTo(Request o) {
if (o instanceof ErrorRequestCoordinator) {
ErrorRequestCoordinator other = (ErrorRequestCoordinator) o;
return primary.isEquivalentTo(other.primary) && error.isEquivalentTo(other.error);
}
return false;
}
@Override
public boolean canSetImage(Request request) {
synchronized (requestLock) {
// Only one of primary or error runs at a time, so if we've reached this point and nothing
// else is broken, we should have nothing else to enforce.
return parentCanSetImage();
}
}
@GuardedBy("requestLock")
private boolean parentCanSetImage() {
return parent == null || parent.canSetImage(this);
}
@Override
public boolean canNotifyStatusChanged(Request request) {
synchronized (requestLock) {
return parentCanNotifyStatusChanged() && isValidRequestForStatusChanged(request);
}
}
@Override
public boolean canNotifyCleared(Request request) {
synchronized (requestLock) {
return parentCanNotifyCleared() && request.equals(primary);
}
}
@GuardedBy("requestLock")
private boolean parentCanNotifyCleared() {
return parent == null || parent.canNotifyCleared(this);
}
@GuardedBy("requestLock")
private boolean parentCanNotifyStatusChanged() {
return parent == null || parent.canNotifyStatusChanged(this);
}
@GuardedBy("requestLock")
private boolean isValidRequestForStatusChanged(Request request) {
if (primaryState != RequestState.FAILED) {
return request.equals(primary);
} else {
return request.equals(error)
// We don't want to call onLoadStarted once for the primary request and then again
// if it fails and the error request starts. It's already running, so we might as well
// avoid the duplicate notification by only notifying about the error state when it's
// final.
&& (errorState == RequestState.SUCCESS || errorState == RequestState.FAILED);
}
}
@Override
public boolean isAnyResourceSet() {
synchronized (requestLock) {
return primary.isAnyResourceSet() || error.isAnyResourceSet();
}
}
@Override
public void onRequestSuccess(Request request) {
synchronized (requestLock) {
if (request.equals(primary)) {
primaryState = RequestState.SUCCESS;
} else if (request.equals(error)) {
errorState = RequestState.SUCCESS;
}
if (parent != null) {
parent.onRequestSuccess(this);
}
}
}
@Override
public void onRequestFailed(Request request) {
synchronized (requestLock) {
if (!request.equals(error)) {
primaryState = RequestState.FAILED;
if (errorState != RequestState.RUNNING) {
errorState = RequestState.RUNNING;
error.begin();
}
return;
}
errorState = RequestState.FAILED;
if (parent != null) {
parent.onRequestFailed(this);
}
}
}
@Override
public RequestCoordinator getRoot() {
synchronized (requestLock) {
return parent != null ? parent.getRoot() : this;
}
}
}
|
ErrorRequestCoordinator
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/oracle/OracleDropUserTest.java
|
{
"start": 1014,
"end": 2373
}
|
class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = //
"DROP USER sidney; ";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("DROP USER sidney;",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
assertEquals(0, visitor.getTables().size());
}
public void test_1() throws Exception {
String sql = //
"DROP USER sidney CASCADE; ";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("DROP USER sidney CASCADE;",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
assertEquals(0, visitor.getTables().size());
}
}
|
OracleDropUserTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/DocIdBatchedDocumentIterator.java
|
{
"start": 589,
"end": 1162
}
|
class ____ extends BatchedDocumentsIterator<String> {
private final QueryBuilder query;
public DocIdBatchedDocumentIterator(OriginSettingClient client, String index, QueryBuilder query) {
super(client, index);
this.query = Objects.requireNonNull(query);
}
@Override
protected QueryBuilder getQuery() {
return query;
}
@Override
protected String map(SearchHit hit) {
return hit.getId();
}
@Override
protected boolean shouldFetchSource() {
return false;
}
}
|
DocIdBatchedDocumentIterator
|
java
|
apache__camel
|
components/camel-cometd/src/test/java/org/apache/camel/component/cometd/SslContextParametersInUriCometdProducerConsumerTest.java
|
{
"start": 1563,
"end": 3881
}
|
class ____ extends CamelTestSupport {
private int port;
private String uri;
@BindToRegistry("sslContextParameters")
public SSLContextParameters addSslContextParameters() {
KeyStoreParameters ksp = new KeyStoreParameters();
ksp.setResource("jsse/localhost.p12");
ksp.setPassword("changeit");
KeyManagersParameters kmp = new KeyManagersParameters();
kmp.setKeyPassword("changeit");
kmp.setKeyStore(ksp);
TrustManagersParameters tmp = new TrustManagersParameters();
tmp.setKeyStore(ksp);
SSLContextParameters sslContextParameters = new SSLContextParameters();
sslContextParameters.setKeyManagers(kmp);
sslContextParameters.setTrustManagers(tmp);
return sslContextParameters;
}
@Test
void testProducer() {
Person person = new Person("David", "Greco");
template.requestBody("direct:input", person);
MockEndpoint ep = context.getEndpoint("mock:test", MockEndpoint.class);
List<Exchange> exchanges = ep.getReceivedExchanges();
for (Exchange exchange : exchanges) {
Person person1 = (Person) exchange.getIn().getBody();
assertEquals("David", person1.getName());
assertEquals("Greco", person1.getSurname());
}
}
@Override
public void doPreSetup() {
port = AvailablePortFinder.getNextAvailable();
uri = "cometds://127.0.0.1:" + port + "/service/test?baseResource=file:./target/test-classes/webapp&"
+ "timeout=240000&interval=0&maxInterval=30000&multiFrameInterval=1500&jsonCommented=true&logLevel=2";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// setup SSL on the component
CometdComponent cometds = context.getComponent("cometds", CometdComponent.class);
cometds.setSslContextParameters(
context.getRegistry().lookupByNameAndType("sslContextParameters", SSLContextParameters.class));
from("direct:input").to(uri);
from(uri).to("mock:test");
}
};
}
public static
|
SslContextParametersInUriCometdProducerConsumerTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java
|
{
"start": 39218,
"end": 39984
}
|
class ____ {
public static void main(String[] args) {
switch (args[0]) {
case String s when s.startsWith("a sale") -> System.out.println("it all starts with a sale");
case "one" -> System.out.println("one");
case "two", "three" -> System.out.println("two or three");
case String s -> System.out.println("some other string");
}
}
}
""")
.setArgs("-XepOpt:StatementSwitchToExpressionSwitch:EnableDirectConversion=true")
.doTest(TEXT_MATCH);
}
@Test
public void unnecessaryBreaks() {
refactoringHelper
.addInputLines(
"Test.java",
"""
public
|
Test
|
java
|
quarkusio__quarkus
|
extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/graal/MongoClientSubstitutions.java
|
{
"start": 1550,
"end": 2475
}
|
class ____ {
@Substitute
private List<MongoCompressor> buildCompressors(final String compressors, @Nullable final Integer zlibCompressionLevel) {
List<MongoCompressor> compressorsList = new ArrayList<>();
for (String cur : compressors.split(",")) {
if (cur.equals("zlib")) {
MongoCompressor zlibCompressor = MongoCompressor.createZlibCompressor();
zlibCompressor = zlibCompressor.withProperty(MongoCompressor.LEVEL, zlibCompressionLevel);
compressorsList.add(zlibCompressor);
} else if (cur.equals("snappy")) {
// DO NOTHING
} else if (!cur.isEmpty()) {
throw new IllegalArgumentException("Unsupported compressor '" + cur + "'");
}
}
return unmodifiableList(compressorsList);
}
}
@TargetClass(UnixServerAddress.class)
final
|
ConnectionStringSubstitution
|
java
|
netty__netty
|
codec-http3/src/main/java/io/netty/handler/codec/http3/HttpConversionUtil.java
|
{
"start": 4811,
"end": 27266
}
|
enum ____ {
/**
* HTTP extension header which will identify the stream id from the HTTP/3 event(s) responsible for
* generating an {@code HttpObject}
* <p>
* {@code "x-http3-stream-id"}
*/
STREAM_ID("x-http3-stream-id"),
/**
* HTTP extension header which will identify the scheme pseudo header from the HTTP/3 event(s) responsible for
* generating an {@code HttpObject}
* <p>
* {@code "x-http3-scheme"}
*/
SCHEME("x-http3-scheme"),
/**
* HTTP extension header which will identify the path pseudo header from the HTTP/3 event(s) responsible for
* generating an {@code HttpObject}
* <p>
* {@code "x-http3-path"}
*/
PATH("x-http3-path"),
/**
* HTTP extension header which will identify the stream id used to create this stream in an HTTP/3 push promise
* frame
* <p>
* {@code "x-http3-stream-promise-id"}
*/
STREAM_PROMISE_ID("x-http3-stream-promise-id");
private final AsciiString text;
ExtensionHeaderNames(String text) {
this.text = AsciiString.cached(text);
}
public AsciiString text() {
return text;
}
}
/**
* Apply HTTP/3 rules while translating status code to {@link HttpResponseStatus}
*
* @param status The status from an HTTP/3 frame
* @return The HTTP/1.x status
* @throws Http3Exception If there is a problem translating from HTTP/3 to HTTP/1.x
*/
private static HttpResponseStatus parseStatus(long streamId, @Nullable CharSequence status) throws Http3Exception {
HttpResponseStatus result;
try {
result = parseLine(status);
if (result == HttpResponseStatus.SWITCHING_PROTOCOLS) {
throw streamError(streamId, Http3ErrorCode.H3_MESSAGE_ERROR,
"Invalid HTTP/3 status code '" + status + "'", null);
}
} catch (Http3Exception e) {
throw e;
} catch (Throwable t) {
throw streamError(streamId, Http3ErrorCode.H3_MESSAGE_ERROR, "Unrecognized HTTP status code '"
+ status + "' encountered in translation to HTTP/1.x" + status, null);
}
return result;
}
/**
* Create a new object to contain the response data
*
* @param streamId The stream associated with the response
* @param http3Headers The initial set of HTTP/3 headers to create the response with
* @param alloc The {@link ByteBufAllocator} to use to generate the content of the message
* @param validateHttpHeaders <ul>
* <li>{@code true} to validate HTTP headers in the http-codec</li>
* <li>{@code false} not to validate HTTP headers in the http-codec</li>
* </ul>
* @return A new response object which represents headers/data
* @throws Http3Exception
*/
static FullHttpResponse toFullHttpResponse(long streamId, Http3Headers http3Headers, ByteBufAllocator alloc,
boolean validateHttpHeaders) throws Http3Exception {
ByteBuf content = alloc.buffer();
HttpResponseStatus status = parseStatus(streamId, http3Headers.status());
// HTTP/3 does not define a way to carry the version or reason phrase that is included in an
// HTTP/1.1 status line.
FullHttpResponse msg = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status, content,
validateHttpHeaders);
try {
addHttp3ToHttpHeaders(streamId, http3Headers, msg, false);
} catch (Http3Exception e) {
msg.release();
throw e;
} catch (Throwable t) {
msg.release();
throw streamError(streamId, Http3ErrorCode.H3_MESSAGE_ERROR,
"HTTP/3 to HTTP/1.x headers conversion error", t);
}
return msg;
}
private static CharSequence extractPath(CharSequence method, Http3Headers headers) {
if (HttpMethod.CONNECT.asciiName().contentEqualsIgnoreCase(method)) {
// See https://tools.ietf.org/html/rfc7231#section-4.3.6
return checkNotNull(headers.authority(),
"authority header cannot be null in the conversion to HTTP/1.x");
} else {
return checkNotNull(headers.path(),
"path header cannot be null in conversion to HTTP/1.x");
}
}
/**
* Create a new object to contain the request data
*
* @param streamId The stream associated with the request
* @param http3Headers The initial set of HTTP/3 headers to create the request with
* @param alloc The {@link ByteBufAllocator} to use to generate the content of the message
* @param validateHttpHeaders <ul>
* <li>{@code true} to validate HTTP headers in the http-codec</li>
* <li>{@code false} not to validate HTTP headers in the http-codec</li>
* </ul>
* @return A new request object which represents headers/data
* @throws Http3Exception
*/
static FullHttpRequest toFullHttpRequest(long streamId, Http3Headers http3Headers, ByteBufAllocator alloc,
boolean validateHttpHeaders) throws Http3Exception {
ByteBuf content = alloc.buffer();
// HTTP/3 does not define a way to carry the version identifier that is included in the HTTP/1.1 request line.
final CharSequence method = checkNotNull(http3Headers.method(),
"method header cannot be null in conversion to HTTP/1.x");
final CharSequence path = extractPath(method, http3Headers);
FullHttpRequest msg = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.valueOf(method
.toString()), path.toString(), content, validateHttpHeaders);
try {
addHttp3ToHttpHeaders(streamId, http3Headers, msg, false);
} catch (Http3Exception e) {
msg.release();
throw e;
} catch (Throwable t) {
msg.release();
throw streamError(streamId, Http3ErrorCode.H3_MESSAGE_ERROR,
"HTTP/3 to HTTP/1.x headers conversion error", t);
}
return msg;
}
/**
* Create a new object to contain the request data.
*
* @param streamId The stream associated with the request
* @param http3Headers The initial set of HTTP/3 headers to create the request with
* @param validateHttpHeaders <ul>
* <li>{@code true} to validate HTTP headers in the http-codec</li>
* <li>{@code false} not to validate HTTP headers in the http-codec</li>
* </ul>
* @return A new request object which represents headers for a chunked request
* @throws Http3Exception
*/
static HttpRequest toHttpRequest(long streamId, Http3Headers http3Headers, boolean validateHttpHeaders)
throws Http3Exception {
// HTTP/3 does not define a way to carry the version identifier that is included in the HTTP/1.1 request line.
final CharSequence method = checkNotNull(http3Headers.method(),
"method header cannot be null in conversion to HTTP/1.x");
final CharSequence path = extractPath(method, http3Headers);
HttpRequest msg = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.valueOf(method.toString()),
path.toString(), validateHttpHeaders);
try {
addHttp3ToHttpHeaders(streamId, http3Headers, msg.headers(), msg.protocolVersion(), false, true);
} catch (Http3Exception e) {
throw e;
} catch (Throwable t) {
throw streamError(streamId, Http3ErrorCode.H3_MESSAGE_ERROR,
"HTTP/3 to HTTP/1.x headers conversion error", t);
}
return msg;
}
/**
* Create a new object to contain the response data.
*
* @param streamId The stream associated with the response
* @param http3Headers The initial set of HTTP/3 headers to create the response with
* @param validateHttpHeaders <ul>
* <li>{@code true} to validate HTTP headers in the http-codec</li>
* <li>{@code false} not to validate HTTP headers in the http-codec</li>
* </ul>
* @return A new response object which represents headers for a chunked response
* @throws Http3Exception
*/
static HttpResponse toHttpResponse(final long streamId,
final Http3Headers http3Headers,
final boolean validateHttpHeaders) throws Http3Exception {
final HttpResponseStatus status = parseStatus(streamId, http3Headers.status());
// HTTP/3 does not define a way to carry the version or reason phrase that is included in an
// HTTP/1.1 status line.
final HttpResponse msg = new DefaultHttpResponse(HttpVersion.HTTP_1_1, status, validateHttpHeaders);
try {
addHttp3ToHttpHeaders(streamId, http3Headers, msg.headers(), msg.protocolVersion(), false, false);
} catch (final Http3Exception e) {
throw e;
} catch (final Throwable t) {
throw streamError(streamId, Http3ErrorCode.H3_MESSAGE_ERROR,
"HTTP/3 to HTTP/1.x headers conversion error", t);
}
return msg;
}
/**
* Translate and add HTTP/3 headers to HTTP/1.x headers.
*
* @param streamId The stream associated with {@code sourceHeaders}.
* @param inputHeaders The HTTP/3 headers to convert.
* @param destinationMessage The object which will contain the resulting HTTP/1.x headers.
* @param addToTrailer {@code true} to add to trailing headers. {@code false} to add to initial headers.
* @throws Http3Exception If not all HTTP/3 headers can be translated to HTTP/1.x.
*/
private static void addHttp3ToHttpHeaders(long streamId, Http3Headers inputHeaders,
FullHttpMessage destinationMessage, boolean addToTrailer) throws Http3Exception {
addHttp3ToHttpHeaders(streamId, inputHeaders,
addToTrailer ? destinationMessage.trailingHeaders() : destinationMessage.headers(),
destinationMessage.protocolVersion(), addToTrailer, destinationMessage instanceof HttpRequest);
}
/**
* Translate and add HTTP/3 headers to HTTP/1.x headers.
*
* @param streamId The stream associated with {@code sourceHeaders}.
* @param inputHeaders The HTTP/3 headers to convert.
* @param outputHeaders The object which will contain the resulting HTTP/1.x headers..
* @param httpVersion What HTTP/1.x version {@code outputHeaders} should be treated as when doing the conversion.
* @param isTrailer {@code true} if {@code outputHeaders} should be treated as trailing headers.
* {@code false} otherwise.
* @param isRequest {@code true} if the {@code outputHeaders} will be used in a request message.
* {@code false} for response message.
* @throws Http3Exception If not all HTTP/3 headers can be translated to HTTP/1.x.
*/
static void addHttp3ToHttpHeaders(long streamId, Http3Headers inputHeaders, HttpHeaders outputHeaders,
HttpVersion httpVersion, boolean isTrailer, boolean isRequest) throws Http3Exception {
Http3ToHttpHeaderTranslator translator = new Http3ToHttpHeaderTranslator(streamId, outputHeaders, isRequest);
try {
translator.translateHeaders(inputHeaders);
} catch (Http3Exception ex) {
throw ex;
} catch (Throwable t) {
throw streamError(streamId, Http3ErrorCode.H3_MESSAGE_ERROR,
"HTTP/3 to HTTP/1.x headers conversion error", t);
}
outputHeaders.remove(HttpHeaderNames.TRANSFER_ENCODING);
outputHeaders.remove(HttpHeaderNames.TRAILER);
if (!isTrailer) {
outputHeaders.set(ExtensionHeaderNames.STREAM_ID.text(), streamId);
HttpUtil.setKeepAlive(outputHeaders, httpVersion, true);
}
}
/**
* Converts the given HTTP/1.x headers into HTTP/3 headers.
* The following headers are only used if they can not be found in from the {@code HOST} header or the
* {@code Request-Line} as defined by <a href="https://tools.ietf.org/html/rfc7230">rfc7230</a>
* <ul>
* <li>{@link ExtensionHeaderNames#SCHEME}</li>
* </ul>
* {@link ExtensionHeaderNames#PATH} is ignored and instead extracted from the {@code Request-Line}.
*/
static Http3Headers toHttp3Headers(HttpMessage in, boolean validateHeaders) {
HttpHeaders inHeaders = in.headers();
final Http3Headers out = new DefaultHttp3Headers(validateHeaders, inHeaders.size());
if (in instanceof HttpRequest) {
HttpRequest request = (HttpRequest) in;
URI requestTargetUri = URI.create(request.uri());
out.path(toHttp3Path(requestTargetUri));
out.method(request.method().asciiName());
setHttp3Scheme(inHeaders, requestTargetUri, out);
// Attempt to take from HOST header before taking from the request-line
String host = inHeaders.getAsString(HttpHeaderNames.HOST);
if (host != null && !host.isEmpty()) {
setHttp3Authority(host, out);
} else {
if (!isOriginForm(request.uri()) && !isAsteriskForm(request.uri())) {
setHttp3Authority(requestTargetUri.getAuthority(), out);
}
}
} else if (in instanceof HttpResponse) {
HttpResponse response = (HttpResponse) in;
out.status(response.status().codeAsText());
}
// Add the HTTP headers which have not been consumed above
toHttp3Headers(inHeaders, out);
return out;
}
static Http3Headers toHttp3Headers(HttpHeaders inHeaders, boolean validateHeaders) {
if (inHeaders.isEmpty()) {
return new DefaultHttp3Headers();
}
final Http3Headers out = new DefaultHttp3Headers(validateHeaders, inHeaders.size());
toHttp3Headers(inHeaders, out);
return out;
}
private static CharSequenceMap<AsciiString> toLowercaseMap(Iterator<? extends CharSequence> valuesIter,
int arraySizeHint) {
UnsupportedValueConverter<AsciiString> valueConverter = UnsupportedValueConverter.<AsciiString>instance();
CharSequenceMap<AsciiString> result = new CharSequenceMap<AsciiString>(true, valueConverter, arraySizeHint);
while (valuesIter.hasNext()) {
AsciiString lowerCased = AsciiString.of(valuesIter.next()).toLowerCase();
try {
int index = lowerCased.forEachByte(FIND_COMMA);
if (index != -1) {
int start = 0;
do {
result.add(lowerCased.subSequence(start, index, false).trim(), EMPTY_STRING);
start = index + 1;
} while (start < lowerCased.length() &&
(index = lowerCased.forEachByte(start, lowerCased.length() - start, FIND_COMMA)) != -1);
result.add(lowerCased.subSequence(start, lowerCased.length(), false).trim(), EMPTY_STRING);
} else {
result.add(lowerCased.trim(), EMPTY_STRING);
}
} catch (Exception e) {
// This is not expect to happen because FIND_COMMA never throws but must be caught
// because of the ByteProcessor interface.
throw new IllegalStateException(e);
}
}
return result;
}
/**
* Filter the {@link HttpHeaderNames#TE} header according to the
* <a href="https://quicwg.org/base-drafts/draft-ietf-quic-http.html#section-4.1.1">
* special rules in the HTTP/3 RFC</a>.
* @param entry An entry whose name is {@link HttpHeaderNames#TE}.
* @param out the resulting HTTP/3 headers.
*/
private static void toHttp3HeadersFilterTE(Entry<CharSequence, CharSequence> entry,
Http3Headers out) {
if (indexOf(entry.getValue(), ',', 0) == -1) {
if (contentEqualsIgnoreCase(trim(entry.getValue()), TRAILERS)) {
out.add(TE, TRAILERS);
}
} else {
List<CharSequence> teValues = unescapeCsvFields(entry.getValue());
for (CharSequence teValue : teValues) {
if (contentEqualsIgnoreCase(trim(teValue), TRAILERS)) {
out.add(TE, TRAILERS);
break;
}
}
}
}
static void toHttp3Headers(HttpHeaders inHeaders, Http3Headers out) {
Iterator<Entry<CharSequence, CharSequence>> iter = inHeaders.iteratorCharSequence();
// Choose 8 as a default size because it is unlikely we will see more than 4 Connection headers values, but
// still allowing for "enough" space in the map to reduce the chance of hash code collision.
CharSequenceMap<AsciiString> connectionBlacklist =
toLowercaseMap(inHeaders.valueCharSequenceIterator(CONNECTION), 8);
while (iter.hasNext()) {
Entry<CharSequence, CharSequence> entry = iter.next();
final AsciiString aName = AsciiString.of(entry.getKey()).toLowerCase();
if (!HTTP_TO_HTTP3_HEADER_BLACKLIST.contains(aName) && !connectionBlacklist.contains(aName)) {
// https://quicwg.org/base-drafts/draft-ietf-quic-http.html#section-4.1.1 makes a special exception
// for TE
if (aName.contentEqualsIgnoreCase(TE)) {
toHttp3HeadersFilterTE(entry, out);
} else if (aName.contentEqualsIgnoreCase(COOKIE)) {
AsciiString value = AsciiString.of(entry.getValue());
// split up cookies to allow for better compression
try {
int index = value.forEachByte(FIND_SEMI_COLON);
if (index != -1) {
int start = 0;
do {
out.add(COOKIE, value.subSequence(start, index, false));
// skip 2 characters "; " (see https://tools.ietf.org/html/rfc6265#section-4.2.1)
start = index + 2;
} while (start < value.length() &&
(index = value.forEachByte(start, value.length() - start, FIND_SEMI_COLON)) != -1);
if (start >= value.length()) {
throw new IllegalArgumentException("cookie value is of unexpected format: " + value);
}
out.add(COOKIE, value.subSequence(start, value.length(), false));
} else {
out.add(COOKIE, value);
}
} catch (Exception e) {
// This is not expect to happen because FIND_SEMI_COLON never throws but must be caught
// because of the ByteProcessor interface.
throw new IllegalStateException(e);
}
} else {
out.add(aName, entry.getValue());
}
}
}
}
/**
* Generate an HTTP/3 {code :path} from a URI in accordance with
* <a href="https://quicwg.org/base-drafts/draft-ietf-quic-http.html#section-4.1.1.1">HTTP3 spec</a>.
*/
private static AsciiString toHttp3Path(URI uri) {
StringBuilder pathBuilder = new StringBuilder(length(uri.getRawPath()) +
length(uri.getRawQuery()) + length(uri.getRawFragment()) + 2);
if (!isNullOrEmpty(uri.getRawPath())) {
pathBuilder.append(uri.getRawPath());
}
if (!isNullOrEmpty(uri.getRawQuery())) {
pathBuilder.append('?');
pathBuilder.append(uri.getRawQuery());
}
if (!isNullOrEmpty(uri.getRawFragment())) {
pathBuilder.append('#');
pathBuilder.append(uri.getRawFragment());
}
String path = pathBuilder.toString();
return path.isEmpty() ? EMPTY_REQUEST_PATH : new AsciiString(path);
}
// package-private for testing only
static void setHttp3Authority(@Nullable String authority, Http3Headers out) {
// The authority MUST NOT include the deprecated "userinfo" subcomponent
if (authority != null) {
if (authority.isEmpty()) {
out.authority(EMPTY_STRING);
} else {
int start = authority.indexOf('@') + 1;
int length = authority.length() - start;
if (length == 0) {
throw new IllegalArgumentException("authority: " + authority);
}
out.authority(new AsciiString(authority, start, length));
}
}
}
private static void setHttp3Scheme(HttpHeaders in, URI uri, Http3Headers out) {
String value = uri.getScheme();
if (value != null) {
out.scheme(new AsciiString(value));
return;
}
// Consume the Scheme extension header if present
CharSequence cValue = in.get(ExtensionHeaderNames.SCHEME.text());
if (cValue != null) {
out.scheme(AsciiString.of(cValue));
return;
}
if (uri.getPort() == HTTPS.port()) {
out.scheme(HTTPS.name());
} else if (uri.getPort() == HTTP.port()) {
out.scheme(HTTP.name());
} else {
throw new IllegalArgumentException(":scheme must be specified. " +
"see https://quicwg.org/base-drafts/draft-ietf-quic-http.html#section-4.1.1.1");
}
}
/**
* Utility which translates HTTP/3 headers to HTTP/1 headers.
*/
private static final
|
ExtensionHeaderNames
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
|
{
"start": 34450,
"end": 34708
}
|
class ____ implements HealthMonitor.Callback {
@Override
public void enteredState(HealthMonitor.State newState) {
setLastHealthState(newState);
recheckElectability();
}
}
/**
* Callbacks for HAServiceStatus
*/
|
HealthCallbacks
|
java
|
quarkusio__quarkus
|
test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusIntegrationTest.java
|
{
"start": 1737,
"end": 1810
}
|
interface ____ {
/**
* If used as a field of
|
QuarkusIntegrationTest
|
java
|
apache__camel
|
components/camel-dns/src/test/java/org/apache/camel/component/dns/WikipediaEndpointTest.java
|
{
"start": 1363,
"end": 2900
}
|
class ____ extends CamelTestSupport {
private static final String RESPONSE_MONKEY
= "\"A monkey is a nonhuman " + "primate mammal with the exception usually of the lemurs and "
+ "tarsiers. More specifically, the term monkey refers to a subset "
+ "of monkeys: any of the smaller longer-tailed catarrhine or "
+ "platyrrhine primates as contrasted with the apes.\" " + "\" http://en.wikipedia.org/wiki/Monkey\"";
@EndpointInject("mock:result")
protected MockEndpoint resultEndpoint;
@Produce("direct:start")
protected ProducerTemplate template;
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("dns:wikipedia").to("mock:result");
}
};
}
@Test
@Disabled("Testing behind nat produces timeouts")
void testWikipediaForMonkey() throws Exception {
resultEndpoint.expectedMessageCount(1);
resultEndpoint.expectedMessagesMatches(new Predicate() {
public boolean matches(Exchange exchange) {
String str = (String) exchange.getIn().getBody();
return RESPONSE_MONKEY.equals(str);
}
});
Map<String, Object> headers = new HashMap<>();
headers.put("term", "monkey");
template.sendBodyAndHeaders(null, headers);
resultEndpoint.assertIsSatisfied();
}
}
|
WikipediaEndpointTest
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/graph/ProjectSelector.java
|
{
"start": 1484,
"end": 8005
}
|
class ____ {
private static final Logger LOGGER = LoggerFactory.getLogger(ProjectSelector.class);
public Set<MavenProject> getActiveProjects(
MavenExecutionRequest request,
List<MavenProject> projects,
List<ProjectActivation.ProjectActivationSettings> projectSelectors)
throws MavenExecutionException {
Set<MavenProject> resolvedOptionalProjects = new LinkedHashSet<>();
Set<ProjectActivation.ProjectActivationSettings> unresolvedSelectors = new HashSet<>();
File baseDirectory = getBaseDirectoryFromRequest(request);
for (ProjectActivation.ProjectActivationSettings activation : projectSelectors) {
if (activation.activationSettings().active()) {
String selector = activation.selector();
Optional<MavenProject> optSelectedProject =
findOptionalProjectBySelector(projects, baseDirectory, selector);
if (optSelectedProject.isPresent()) {
resolvedOptionalProjects.add(optSelectedProject.get());
if (activation.activationSettings().recurse()) {
resolvedOptionalProjects.addAll(getChildProjects(optSelectedProject.get(), request));
}
} else {
unresolvedSelectors.add(activation);
}
}
}
if (!unresolvedSelectors.isEmpty()) {
String requiredSelectors = unresolvedSelectors.stream()
.filter(pas -> !pas.activationSettings().optional())
.map(ProjectActivation.ProjectActivationSettings::selector)
.collect(Collectors.joining(", "));
if (!requiredSelectors.isEmpty()) {
throw new MavenExecutionException(
"The requested required projects " + requiredSelectors + " do not exist.", request.getPom());
} else {
String optionalSelectors = unresolvedSelectors.stream()
.map(ProjectActivation.ProjectActivationSettings::selector)
.collect(Collectors.joining(", "));
LOGGER.info("The requested optional projects {} do not exist.", optionalSelectors);
}
}
return resolvedOptionalProjects;
}
/**
* @deprecated use {@link #getActiveProjects(MavenExecutionRequest, List, List)}
*/
@Deprecated(since = "4.0.0")
public Set<MavenProject> getRequiredProjectsBySelectors(
MavenExecutionRequest request, List<MavenProject> projects, Set<String> projectSelectors)
throws MavenExecutionException {
Set<MavenProject> selectedProjects = new LinkedHashSet<>();
File baseDirectory = getBaseDirectoryFromRequest(request);
for (String selector : projectSelectors) {
Optional<MavenProject> optSelectedProject =
findOptionalProjectBySelector(projects, baseDirectory, selector);
if (!optSelectedProject.isPresent()) {
String message = "Could not find the selected project in the reactor: " + selector;
throw new MavenExecutionException(message, request.getPom());
}
MavenProject selectedProject = optSelectedProject.get();
selectedProjects.add(selectedProject);
selectedProjects.addAll(getChildProjects(selectedProject, request));
}
return selectedProjects;
}
/**
* @deprecated use {@link #getActiveProjects(MavenExecutionRequest, List, List)}
*/
@Deprecated(since = "4.0.0")
public Set<MavenProject> getOptionalProjectsBySelectors(
MavenExecutionRequest request, List<MavenProject> projects, Set<String> projectSelectors) {
Set<MavenProject> resolvedOptionalProjects = new LinkedHashSet<>();
Set<String> unresolvedOptionalSelectors = new HashSet<>();
File baseDirectory = getBaseDirectoryFromRequest(request);
for (String selector : projectSelectors) {
Optional<MavenProject> optSelectedProject =
findOptionalProjectBySelector(projects, baseDirectory, selector);
if (optSelectedProject.isPresent()) {
resolvedOptionalProjects.add(optSelectedProject.get());
resolvedOptionalProjects.addAll(getChildProjects(optSelectedProject.get(), request));
} else {
unresolvedOptionalSelectors.add(selector);
}
}
if (!unresolvedOptionalSelectors.isEmpty()) {
LOGGER.info("The requested optional projects {} do not exist.", unresolvedOptionalSelectors);
}
return resolvedOptionalProjects;
}
private List<MavenProject> getChildProjects(MavenProject parent, MavenExecutionRequest request) {
final List<MavenProject> children = parent.getCollectedProjects();
if (children != null && request.isRecursive()) {
return children;
} else {
return new ArrayList<>();
}
}
private Optional<MavenProject> findOptionalProjectBySelector(
List<MavenProject> projects, File reactorDirectory, String selector) {
return projects.stream()
.filter(project -> isMatchingProject(project, selector, reactorDirectory))
.findFirst();
}
File getBaseDirectoryFromRequest(MavenExecutionRequest request) {
return request.getBaseDirectory() != null ? new File(request.getBaseDirectory()) : null;
}
boolean isMatchingProject(MavenProject project, String selector, File reactorDirectory) {
// [groupId]:artifactId
if (selector.contains(":")) {
String id = ':' + project.getArtifactId();
if (id.equals(selector)) {
return true;
}
id = project.getGroupId() + id;
return id.equals(selector);
}
// relative path, e.g. "sub", "../sub" or "."
else if (reactorDirectory != null) {
File selectedProject =
new File(new File(reactorDirectory, selector).toURI().normalize());
if (selectedProject.isFile()) {
return selectedProject.equals(project.getFile());
} else if (selectedProject.isDirectory()) {
return selectedProject.equals(project.getBasedir());
}
}
return false;
}
}
|
ProjectSelector
|
java
|
micronaut-projects__micronaut-core
|
inject/src/main/java/io/micronaut/context/AnnotationReflectionUtils.java
|
{
"start": 14938,
"end": 18225
}
|
class ____ interface.
*
* @param cl The class
* @return A stream of supertypes
*/
private static Stream<AnnotatedType> getSupertypes(Class<?> cl) {
Stream<AnnotatedType> itf = Stream.of(cl.getAnnotatedInterfaces());
if (cl.isInterface()) {
return itf;
}
return Stream.concat(Stream.of(cl.getAnnotatedSuperclass()), itf);
}
/**
* Get the raw type of a given complex type.
*
* @param type The complex type
* @return The raw type
*/
private static Class<?> getRawType(Type type) {
if (type instanceof Class<?> cl) {
return cl;
} else if (type instanceof ParameterizedType ptype) {
return getRawType(ptype.getRawType());
} else if (type instanceof TypeVariable<?> tv) {
return getRawType(tv.getBounds()[0]);
} else if (type instanceof WildcardType wt) {
return getRawType(wt.getUpperBounds()[0]);
} else if (type instanceof GenericArrayType gat) {
Class<?> rawComponentType = getRawType(gat.getGenericComponentType());
return Array.newInstance(rawComponentType, 0).getClass();
} else {
throw new IllegalArgumentException("Unsupported type " + type.getClass().getName());
}
}
/**
* Wrapper around a {@link AnnotatedType} to signals that certain {@link TypeVariable}s should
* be substituted lazily. For example, if {@code actual} is {@code List<T>} and
* {@code substitutions} is {@code T -> @Ann1 String}, users should treat this type as
* {@code List<@Ann1 String>}.
*
* @param actual The type to delegate to
* @param substitutions Substitutions to apply to the type
*/
private record LazySubstitutingType(AnnotatedType actual,
Map<TypeVariable<?>, AnnotatedType> substitutions) implements AnnotatedType {
@Override
public Type getType() {
return actual.getType();
}
@Override
public <T extends Annotation> T getAnnotation(Class<T> annotationClass) {
return actual.getAnnotation(annotationClass);
}
@Override
public Annotation[] getAnnotations() {
return actual.getAnnotations();
}
@Override
public Annotation[] getDeclaredAnnotations() {
return actual.getDeclaredAnnotations();
}
}
/**
* Simple, annotation-less {@link AnnotatedType} implementation.
*
* @param actual The type
*/
private record SimpleAnnotatedType(Type actual) implements AnnotatedType {
@Override
public Type getType() {
return actual;
}
@Override
public <T extends Annotation> T getAnnotation(Class<T> annotationClass) {
return null;
}
@Override
public Annotation[] getAnnotations() {
return new Annotation[0];
}
@Override
public Annotation[] getDeclaredAnnotations() {
return new Annotation[0];
}
}
/**
* This record represents an {@link AnnotatedType} that merges the annotations of multiple
* different types. e.g. when {@code
|
or
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java
|
{
"start": 68464,
"end": 68637
}
|
class ____<T> {
private @Nullable T bar;
@Nullable T getBar() {
return this.bar;
}
void setBar(@Nullable T bar) {
this.bar = bar;
}
}
static
|
AGenericClass
|
java
|
elastic__elasticsearch
|
x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java
|
{
"start": 2718,
"end": 46823
}
|
class ____ extends AggregatorTestCase {
/**
* Script to return the {@code _value} provided by aggs framework.
*/
public static final String ADD_HALF_SCRIPT = "add_one";
public static final String TERM_FILTERING = "term_filtering";
@Override
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
return new TTestAggregationBuilder("foo").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName)
.setFilter(QueryBuilders.rangeQuery(fieldName).lt(10))
.build()
)
.b(
new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName)
.setFilter(QueryBuilders.rangeQuery(fieldName).gte(10))
.build()
);
} else if (fieldType.typeName().equals(DateFieldMapper.CONTENT_TYPE)
|| fieldType.typeName().equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) {
return new TTestAggregationBuilder("foo").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName)
.setFilter(QueryBuilders.rangeQuery(fieldName).lt(DateUtils.toInstant(10)))
.build()
)
.b(
new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName)
.setFilter(QueryBuilders.rangeQuery(fieldName).gte(DateUtils.toInstant(10)))
.build()
);
} else if (fieldType.typeName().equals(BooleanFieldMapper.CONTENT_TYPE)) {
return new TTestAggregationBuilder("foo").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName)
.setFilter(QueryBuilders.rangeQuery(fieldName).lt("true"))
.build()
)
.b(
new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName)
.setFilter(QueryBuilders.rangeQuery(fieldName).gte("false"))
.build()
);
}
// if it's "unsupported" just use matchall filters to avoid parsing issues
return new TTestAggregationBuilder("foo").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName).setFilter(QueryBuilders.matchAllQuery()).build()
).b(new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName).setFilter(QueryBuilders.matchAllQuery()).build());
}
@Override
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
return List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.DATE);
}
@Override
protected ScriptService getMockScriptService() {
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
scripts.put(ADD_HALF_SCRIPT, vars -> {
LeafDocLookup leafDocLookup = (LeafDocLookup) vars.get("doc");
String fieldname = (String) vars.get("fieldname");
ScriptDocValues<?> scriptDocValues = leafDocLookup.get(fieldname);
return ((Number) scriptDocValues.get(0)).doubleValue() + 0.5;
});
scripts.put(TERM_FILTERING, vars -> {
LeafDocLookup leafDocLookup = (LeafDocLookup) vars.get("doc");
int term = (Integer) vars.get("term");
ScriptDocValues<?> termDocValues = leafDocLookup.get("term");
int currentTerm = ((Number) termDocValues.get(0)).intValue();
if (currentTerm == term) {
return ((Number) leafDocLookup.get("field").get(0)).doubleValue();
}
return null;
});
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, scripts, Collections.emptyMap());
Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine);
return new ScriptService(
Settings.EMPTY,
engines,
ScriptModule.CORE_CONTEXTS,
() -> 1L,
TestProjectResolvers.singleProject(randomProjectIdOrDefault())
);
}
public void testNoMatchingField() throws IOException {
testCase(new MatchAllDocsQuery(), randomFrom(TTestType.values()), iw -> {
iw.addDocument(asList(new NumericDocValuesField("wrong_a", 102), new NumericDocValuesField("wrong_b", 89)));
iw.addDocument(asList(new NumericDocValuesField("wrong_a", 99), new NumericDocValuesField("wrong_b", 93)));
}, tTest -> assertEquals(Double.NaN, tTest.getValue(), 0));
}
public void testNotEnoughRecords() throws IOException {
testCase(new MatchAllDocsQuery(), randomFrom(TTestType.values()), iw -> {
iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89)));
}, tTest -> assertEquals(Double.NaN, tTest.getValue(), 0));
}
public void testSameValues() throws IOException {
TTestType tTestType = randomFrom(TTestType.values());
testCase(new MatchAllDocsQuery(), tTestType, iw -> {
iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 102)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 99)));
iw.addDocument(asList(new NumericDocValuesField("a", 111), new NumericDocValuesField("b", 111)));
iw.addDocument(asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 97)));
iw.addDocument(asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 101)));
}, tTest -> assertEquals(tTestType == TTestType.PAIRED ? Double.NaN : 1, tTest.getValue(), 0));
}
public void testMatchesSortedNumericDocValues() throws IOException {
testCase(new MatchAllDocsQuery(), TTestType.PAIRED, iw -> {
iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("b", 89)));
iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93)));
iw.addDocument(asList(new SortedNumericDocValuesField("a", 111), new SortedNumericDocValuesField("b", 72)));
iw.addDocument(asList(new SortedNumericDocValuesField("a", 97), new SortedNumericDocValuesField("b", 98)));
iw.addDocument(asList(new SortedNumericDocValuesField("a", 101), new SortedNumericDocValuesField("b", 102)));
iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 98)));
}, tTest -> assertEquals(0.09571844217 * 2, tTest.getValue(), 0.000001));
}
public void testMultiplePairedValues() {
IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> testCase(new MatchAllDocsQuery(), TTestType.PAIRED, iw -> {
iw.addDocument(
asList(
new SortedNumericDocValuesField("a", 102),
new SortedNumericDocValuesField("a", 103),
new SortedNumericDocValuesField("b", 89)
)
);
iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93)));
}, tTest -> fail("Should have thrown exception"))
);
assertEquals(
"Encountered more than one value for a single document. Use a script to combine multiple values per doc into a single value.",
ex.getMessage()
);
}
public void testSameFieldAndNoFilters() {
TTestType tTestType = randomFrom(TTestType.values());
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.INTEGER);
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName("field").setMissing(100).build()
).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("field").setMissing(100).build()).testType(tTestType);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> testCase(iw -> {
iw.addDocument(singleton(new SortedNumericDocValuesField("field", 102)));
iw.addDocument(singleton(new SortedNumericDocValuesField("field", 99)));
}, tTest -> fail("Should have thrown exception"), new AggTestConfig(aggregationBuilder, fieldType)));
assertEquals("The same field [field] is used for both population but no filters are specified.", ex.getMessage());
}
public void testMultipleUnpairedValues() throws IOException {
TTestType tTestType = randomFrom(TTestType.HETEROSCEDASTIC, TTestType.HOMOSCEDASTIC);
testCase(new MatchAllDocsQuery(), tTestType, iw -> {
iw.addDocument(
asList(
new SortedNumericDocValuesField("a", 102),
new SortedNumericDocValuesField("a", 103),
new SortedNumericDocValuesField("b", 89)
)
);
iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93)));
}, tTest -> assertEquals(tTestType == TTestType.HETEROSCEDASTIC ? 0.0607303911 : 0.01718374671, tTest.getValue(), 0.000001));
}
public void testUnpairedValuesWithFilters() throws IOException {
TTestType tTestType = randomFrom(TTestType.HETEROSCEDASTIC, TTestType.HOMOSCEDASTIC);
testCase(new MatchAllDocsQuery(), tTestType, iw -> {
iw.addDocument(
asList(
new SortedNumericDocValuesField("a", 102),
new SortedNumericDocValuesField("a", 103),
new SortedNumericDocValuesField("b", 89)
)
);
iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93)));
}, tTest -> assertEquals(tTestType == TTestType.HETEROSCEDASTIC ? 0.0607303911 : 0.01718374671, tTest.getValue(), 0.000001));
}
public void testMissingValues() throws IOException {
TTestType tTestType = randomFrom(TTestType.values());
testCase(new MatchAllDocsQuery(), tTestType, iw -> {
iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("b", 89)));
iw.addDocument(asList(new SortedNumericDocValuesField("a1", 99), new SortedNumericDocValuesField("b", 93)));
iw.addDocument(asList(new SortedNumericDocValuesField("a", 111), new SortedNumericDocValuesField("b1", 72)));
iw.addDocument(asList(new SortedNumericDocValuesField("a", 97), new SortedNumericDocValuesField("b", 98)));
iw.addDocument(asList(new SortedNumericDocValuesField("a", 101), new SortedNumericDocValuesField("b", 102)));
iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 98)));
}, tTest -> {
switch (tTestType) {
case PAIRED -> assertEquals(0.4385093524, tTest.getValue(), 0.000001);
case HOMOSCEDASTIC -> assertEquals(0.1066843841, tTest.getValue(), 0.000001);
case HETEROSCEDASTIC -> assertEquals(0.1068382282, tTest.getValue(), 0.000001);
default -> fail("unknown t-test type " + tTestType);
}
});
}
public void testUnmappedWithMissingField() throws IOException {
TTestType tTestType = randomFrom(TTestType.values());
boolean missA = randomBoolean();
boolean missB = missA == false || randomBoolean(); // at least one of the fields should be missing
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType(missA ? "not_a" : "a", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(missB ? "not_b" : "b", NumberFieldMapper.NumberType.INTEGER);
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setMissing(100).build()
).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").setMissing(100).build()).testType(tTestType);
testCase(iw -> {
iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 93)));
}, (Consumer<InternalTTest>) tTest -> {
if (missA && missB) {
assertEquals(Double.NaN, tTest.getValue(), 0);
} else {
if (missA) {
switch (tTestType) {
case PAIRED -> assertEquals(0.1392089745, tTest.getValue(), 0.000001);
case HOMOSCEDASTIC -> assertEquals(0.04600190799, tTest.getValue(), 0.000001);
case HETEROSCEDASTIC -> assertEquals(0.1392089745, tTest.getValue(), 0.000001);
default -> fail("unknown t-test type " + tTestType);
}
} else {
switch (tTestType) {
case PAIRED -> assertEquals(0.7951672353, tTest.getValue(), 0.000001);
case HOMOSCEDASTIC -> assertEquals(0.7705842661, tTest.getValue(), 0.000001);
case HETEROSCEDASTIC -> assertEquals(0.7951672353, tTest.getValue(), 0.000001);
default -> fail("unknown t-test type " + tTestType);
}
}
}
}, new AggTestConfig(aggregationBuilder, fieldType1, fieldType2));
}
public void testUnsupportedType() {
TTestType tTestType = randomFrom(TTestType.values());
boolean wrongA = randomBoolean();
boolean wrongB = wrongA == false || randomBoolean(); // at least one of the fields should have unsupported type
MappedFieldType fieldType1;
if (wrongA) {
fieldType1 = new KeywordFieldMapper.KeywordFieldType("a");
} else {
fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
}
MappedFieldType fieldType2;
if (wrongB) {
fieldType2 = new KeywordFieldMapper.KeywordFieldType("b");
} else {
fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER);
}
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()
).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()).testType(tTestType);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> testCase(iw -> {
iw.addDocument(
asList(
new SortedNumericDocValuesField("a", 102),
new SortedNumericDocValuesField("a", 103),
new SortedNumericDocValuesField("b", 89)
)
);
iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93)));
}, tTest -> fail("Should have thrown exception"), new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)));
assertEquals("Expected numeric type on field [" + (wrongA ? "a" : "b") + "], but got [keyword]", ex.getMessage());
}
public void testBadMissingField() {
TTestType tTestType = randomFrom(TTestType.values());
boolean missA = randomBoolean();
boolean missB = missA == false || randomBoolean(); // at least one of the fields should be have bad missing
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
MultiValuesSourceFieldConfig.Builder a = new MultiValuesSourceFieldConfig.Builder().setFieldName("a");
if (missA) {
a.setMissing("bad_number");
}
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER);
MultiValuesSourceFieldConfig.Builder b = new MultiValuesSourceFieldConfig.Builder().setFieldName("b");
if (missB) {
b.setMissing("bad_number");
}
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(a.build()).b(b.build()).testType(tTestType);
NumberFormatException ex = expectThrows(NumberFormatException.class, () -> testCase(iw -> {
iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("b", 89)));
iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93)));
}, tTest -> fail("Should have thrown exception"), new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)));
assertEquals("For input string: \"bad_number\"", ex.getMessage());
}
public void testUnmappedWithBadMissingField() {
TTestType tTestType = randomFrom(TTestType.values());
boolean missA = randomBoolean();
boolean missB = missA == false || randomBoolean(); // at least one of the fields should be have bad missing
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
MultiValuesSourceFieldConfig.Builder a = new MultiValuesSourceFieldConfig.Builder();
if (missA) {
a.setFieldName("not_a").setMissing("bad_number");
} else {
a.setFieldName("a");
}
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(missB ? "not_b" : "b", NumberFieldMapper.NumberType.INTEGER);
MultiValuesSourceFieldConfig.Builder b = new MultiValuesSourceFieldConfig.Builder();
if (missB) {
b.setFieldName("not_b").setMissing("bad_number");
} else {
b.setFieldName("b");
}
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(a.build()).b(b.build()).testType(tTestType);
NumberFormatException ex = expectThrows(NumberFormatException.class, () -> testCase(iw -> {
iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("b", 89)));
iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93)));
}, tTest -> fail("Should have thrown exception"), new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)));
assertEquals("For input string: \"bad_number\"", ex.getMessage());
}
public void testEmptyBucket() throws IOException {
TTestType tTestType = randomFrom(TTestType.values());
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldTypePart = new NumberFieldMapper.NumberFieldType("part", NumberFieldMapper.NumberType.INTEGER);
HistogramAggregationBuilder histogram = new HistogramAggregationBuilder("histo").field("part")
.interval(10)
.minDocCount(0)
.subAggregation(
new TTestAggregationBuilder("t_test").a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build())
.b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build())
.testType(tTestType)
);
testCase(iw -> {
iw.addDocument(
asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89), new NumericDocValuesField("part", 1))
);
iw.addDocument(
asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 93), new NumericDocValuesField("part", 1))
);
iw.addDocument(
asList(new NumericDocValuesField("a", 111), new NumericDocValuesField("b", 72), new NumericDocValuesField("part", 1))
);
iw.addDocument(
asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98), new NumericDocValuesField("part", 21))
);
iw.addDocument(
asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102), new NumericDocValuesField("part", 21))
);
iw.addDocument(
asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98), new NumericDocValuesField("part", 21))
);
}, (Consumer<InternalHistogram>) histo -> {
assertEquals(3, histo.getBuckets().size());
assertNotNull(histo.getBuckets().get(0).getAggregations().get("t_test"));
InternalTTest tTest = histo.getBuckets().get(0).getAggregations().get("t_test");
assertEquals(
tTestType == TTestType.PAIRED ? 0.1939778614 : tTestType == TTestType.HOMOSCEDASTIC ? 0.05878871029 : 0.07529006595,
tTest.getValue(),
0.000001
);
assertNotNull(histo.getBuckets().get(1).getAggregations().get("t_test"));
tTest = histo.getBuckets().get(1).getAggregations().get("t_test");
assertEquals(Double.NaN, tTest.getValue(), 0.000001);
assertNotNull(histo.getBuckets().get(2).getAggregations().get("t_test"));
tTest = histo.getBuckets().get(2).getAggregations().get("t_test");
assertEquals(
tTestType == TTestType.PAIRED ? 0.6666666667 : tTestType == TTestType.HOMOSCEDASTIC ? 0.8593081179 : 0.8594865044,
tTest.getValue(),
0.000001
);
}, new AggTestConfig(histogram, fieldType1, fieldType2, fieldTypePart));
}
public void testFormatter() throws IOException {
TTestType tTestType = randomFrom(TTestType.values());
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER);
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()
).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()).testType(tTestType).format("0.00%");
testCase(iw -> {
iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 93)));
iw.addDocument(asList(new NumericDocValuesField("a", 111), new NumericDocValuesField("b", 72)));
}, (Consumer<InternalTTest>) tTest -> {
assertEquals(
tTestType == TTestType.PAIRED ? 0.1939778614 : tTestType == TTestType.HOMOSCEDASTIC ? 0.05878871029 : 0.07529006595,
tTest.getValue(),
0.000001
);
assertEquals(
tTestType == TTestType.PAIRED ? "19.40%" : tTestType == TTestType.HOMOSCEDASTIC ? "5.88%" : "7.53%",
tTest.getValueAsString()
);
}, new AggTestConfig(aggregationBuilder, fieldType1, fieldType2));
}
public void testGetProperty() throws IOException {
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER);
GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation(
new TTestAggregationBuilder("t_test").a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build())
.b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build())
.testType(TTestType.PAIRED)
);
testCase(iw -> {
iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 93)));
iw.addDocument(asList(new NumericDocValuesField("a", 111), new NumericDocValuesField("b", 72)));
}, (Consumer<InternalGlobal>) global -> {
assertEquals(3, global.getDocCount());
assertTrue(AggregationInspectionHelper.hasValue(global));
assertNotNull(global.getAggregations().get("t_test"));
InternalTTest tTest = global.getAggregations().get("t_test");
assertEquals(tTest, global.getProperty("t_test"));
assertEquals(0.1939778614, (Double) global.getProperty("t_test.value"), 0.000001);
}, new AggTestConfig(globalBuilder, fieldType1, fieldType2));
}
public void testScript() throws IOException {
boolean fieldInA = randomBoolean();
TTestType tTestType = randomFrom(TTestType.values());
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.INTEGER);
MultiValuesSourceFieldConfig a = new MultiValuesSourceFieldConfig.Builder().setFieldName("field").build();
MultiValuesSourceFieldConfig b = new MultiValuesSourceFieldConfig.Builder().setScript(
new Script(ScriptType.INLINE, MockScriptEngine.NAME, ADD_HALF_SCRIPT, Collections.singletonMap("fieldname", "field"))
).build();
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(fieldInA ? a : b)
.b(fieldInA ? b : a)
.testType(tTestType);
testCase(iw -> {
iw.addDocument(singleton(new NumericDocValuesField("field", 1)));
iw.addDocument(singleton(new NumericDocValuesField("field", 2)));
iw.addDocument(singleton(new NumericDocValuesField("field", 3)));
},
(Consumer<InternalTTest>) tTest -> {
assertEquals(tTestType == TTestType.PAIRED ? 0 : 0.5733922538, tTest.getValue(), 0.000001);
},
new AggTestConfig(aggregationBuilder, fieldType)
);
}
public void testPaired() throws IOException {
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER);
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()
).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()).testType(TTestType.PAIRED);
int tails = randomIntBetween(1, 2);
if (tails == 1 || randomBoolean()) {
aggregationBuilder.tails(tails);
}
testCase(iw -> {
iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 93)));
iw.addDocument(asList(new NumericDocValuesField("a", 111), new NumericDocValuesField("b", 72)));
iw.addDocument(asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98)));
iw.addDocument(asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98)));
},
(Consumer<InternalTTest>) ttest -> { assertEquals(0.09571844217 * tails, ttest.getValue(), 0.00001); },
new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)
);
}
public void testHomoscedastic() throws IOException {
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER);
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()
).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()).testType(TTestType.HOMOSCEDASTIC);
int tails = randomIntBetween(1, 2);
if (tails == 1 || randomBoolean()) {
aggregationBuilder.tails(tails);
}
testCase(iw -> {
iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 93)));
iw.addDocument(asList(new NumericDocValuesField("a", 111), new NumericDocValuesField("b", 72)));
iw.addDocument(asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98)));
iw.addDocument(asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98)));
},
(Consumer<InternalTTest>) ttest -> { assertEquals(0.03928288693 * tails, ttest.getValue(), 0.00001); },
new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)
);
}
public void testHeteroscedastic() throws IOException {
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER);
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()
).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build());
if (randomBoolean()) {
aggregationBuilder.testType(TTestType.HETEROSCEDASTIC);
}
int tails = randomIntBetween(1, 2);
if (tails == 1 || randomBoolean()) {
aggregationBuilder.tails(tails);
}
testCase(iw -> {
iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 93)));
iw.addDocument(asList(new NumericDocValuesField("a", 111), new NumericDocValuesField("b", 72)));
iw.addDocument(asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98)));
iw.addDocument(asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98)));
},
(Consumer<InternalTTest>) ttest -> { assertEquals(0.04538666214 * tails, ttest.getValue(), 0.00001); },
new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)
);
}
public void testFiltered() throws IOException {
TTestType tTestType = randomFrom(TTestType.values());
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER);
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setFilter(QueryBuilders.termQuery("b", 1)).build()
)
.b(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setFilter(QueryBuilders.termQuery("b", 2)).build())
.testType(tTestType);
int tails = randomIntBetween(1, 2);
if (tails == 1 || randomBoolean()) {
aggregationBuilder.tails(tails);
}
CheckedConsumer<RandomIndexWriter, IOException> buildIndex = iw -> {
iw.addDocument(asList(new NumericDocValuesField("a", 102), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("a", 111), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("a", 97), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("a", 101), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("a", 99), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("a", 89), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("a", 93), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("a", 72), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("a", 98), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("a", 102), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("a", 98), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("a", 189), new IntPoint("b", 3)));
iw.addDocument(asList(new NumericDocValuesField("a", 193), new IntPoint("b", 3)));
iw.addDocument(asList(new NumericDocValuesField("a", 172), new IntPoint("b", 3)));
iw.addDocument(asList(new NumericDocValuesField("a", 198), new IntPoint("b", 3)));
iw.addDocument(asList(new NumericDocValuesField("a", 1102), new IntPoint("b", 3)));
iw.addDocument(asList(new NumericDocValuesField("a", 198), new IntPoint("b", 3)));
};
if (tTestType == TTestType.PAIRED) {
IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> testCase(
buildIndex,
tTest -> fail("Should have thrown exception"),
new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)
)
);
assertEquals("Paired t-test doesn't support filters", ex.getMessage());
} else {
testCase(buildIndex, (Consumer<InternalTTest>) ttest -> {
if (tTestType == TTestType.HOMOSCEDASTIC) {
assertEquals(0.03928288693 * tails, ttest.getValue(), 0.00001);
} else {
assertEquals(0.04538666214 * tails, ttest.getValue(), 0.00001);
}
}, new AggTestConfig(aggregationBuilder, fieldType1, fieldType2));
}
}
public void testFilteredAsSubAgg() throws IOException {
TTestType tTestType = randomFrom(TTestType.values());
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("h", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType3 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER);
TTestAggregationBuilder ttestAggregationBuilder = new TTestAggregationBuilder("t_test").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setFilter(QueryBuilders.termQuery("b", 1)).build()
)
.b(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setFilter(QueryBuilders.termQuery("b", 2)).build())
.testType(tTestType);
int tails = randomIntBetween(1, 2);
if (tails == 1 || randomBoolean()) {
ttestAggregationBuilder.tails(tails);
}
HistogramAggregationBuilder aggregationBuilder = new HistogramAggregationBuilder("h").field("h")
.interval(1)
.subAggregation(ttestAggregationBuilder);
int buckets = randomInt(100);
CheckedConsumer<RandomIndexWriter, IOException> buildIndex = iw -> {
for (int i = 0; i < buckets; i++) {
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 102), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 99), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 111), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 97), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 101), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 99), new IntPoint("b", 1)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 89), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 93), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 72), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 98), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 102), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 98), new IntPoint("b", 2)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 189), new IntPoint("b", 3)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 193), new IntPoint("b", 3)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 172), new IntPoint("b", 3)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 198), new IntPoint("b", 3)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 1102), new IntPoint("b", 3)));
iw.addDocument(asList(new NumericDocValuesField("h", i), new NumericDocValuesField("a", 198), new IntPoint("b", 3)));
}
};
if (tTestType == TTestType.PAIRED) {
IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> testCase(
buildIndex,
tTest -> fail("Should have thrown exception"),
new AggTestConfig(aggregationBuilder, fieldType1, fieldType2, fieldType3)
)
);
assertEquals("Paired t-test doesn't support filters", ex.getMessage());
} else {
testCase(buildIndex, (Consumer<InternalHistogram>) histogram -> {
if (tTestType == TTestType.HOMOSCEDASTIC) {
assertEquals(buckets, histogram.getBuckets().size());
for (int i = 0; i < buckets; i++) {
InternalTTest ttest = histogram.getBuckets().get(i).getAggregations().get("t_test");
assertEquals(0.03928288693 * tails, ttest.getValue(), 0.00001);
}
} else {
assertEquals(buckets, histogram.getBuckets().size());
for (int i = 0; i < buckets; i++) {
InternalTTest ttest = histogram.getBuckets().get(i).getAggregations().get("t_test");
assertEquals(0.04538666214 * tails, ttest.getValue(), 0.00001);
}
}
}, new AggTestConfig(aggregationBuilder, fieldType1, fieldType2, fieldType3));
}
}
public void testFilterByFilterOrScript() throws IOException {
boolean fieldInA = randomBoolean();
TTestType tTestType = randomFrom(TTestType.HOMOSCEDASTIC, TTestType.HETEROSCEDASTIC);
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("term", NumberFieldMapper.NumberType.INTEGER);
boolean filterTermOne = randomBoolean();
MultiValuesSourceFieldConfig.Builder a = new MultiValuesSourceFieldConfig.Builder().setFieldName("field")
.setFilter(QueryBuilders.termQuery("term", filterTermOne ? 1 : 2));
MultiValuesSourceFieldConfig.Builder b = new MultiValuesSourceFieldConfig.Builder().setScript(
new Script(ScriptType.INLINE, MockScriptEngine.NAME, TERM_FILTERING, Collections.singletonMap("term", filterTermOne ? 2 : 1))
);
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(fieldInA ? a.build() : b.build())
.b(fieldInA ? b.build() : a.build())
.testType(tTestType);
testCase(iw -> {
iw.addDocument(asList(new NumericDocValuesField("field", 1), new IntPoint("term", 1), new NumericDocValuesField("term", 1)));
iw.addDocument(asList(new NumericDocValuesField("field", 2), new IntPoint("term", 1), new NumericDocValuesField("term", 1)));
iw.addDocument(asList(new NumericDocValuesField("field", 3), new IntPoint("term", 1), new NumericDocValuesField("term", 1)));
iw.addDocument(asList(new NumericDocValuesField("field", 4), new IntPoint("term", 2), new NumericDocValuesField("term", 2)));
iw.addDocument(asList(new NumericDocValuesField("field", 5), new IntPoint("term", 2), new NumericDocValuesField("term", 2)));
iw.addDocument(asList(new NumericDocValuesField("field", 6), new IntPoint("term", 2), new NumericDocValuesField("term", 2)));
},
(Consumer<InternalTTest>) tTest -> { assertEquals(0.02131164113, tTest.getValue(), 0.000001); },
new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)
);
}
private void testCase(
Query query,
TTestType type,
CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
Consumer<InternalTTest> verify
) throws IOException {
MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER);
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER);
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(
new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()
).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build());
if (type != TTestType.HETEROSCEDASTIC || randomBoolean()) {
aggregationBuilder.testType(type);
}
testCase(buildIndex, verify, new AggTestConfig(aggregationBuilder, fieldType1, fieldType2).withQuery(query));
}
@Override
protected List<SearchPlugin> getSearchPlugins() {
return Collections.singletonList(new AnalyticsPlugin());
}
}
|
TTestAggregatorTests
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java
|
{
"start": 19310,
"end": 21465
}
|
class ____ {
private final DotProductInterface function;
@SuppressWarnings("unchecked")
public DotProduct(ScoreScript scoreScript, Object queryVector, String fieldName) {
DenseVectorDocValuesField field = (DenseVectorDocValuesField) scoreScript.field(fieldName);
function = switch (field.getElementType()) {
case BIT -> {
if (queryVector instanceof List) {
yield new BitDotProduct(scoreScript, field, (List<Number>) queryVector);
} else if (queryVector instanceof String s) {
byte[] parsedQueryVector = HexFormat.of().parseHex(s);
yield new BitDotProduct(scoreScript, field, parsedQueryVector);
}
throw new IllegalArgumentException("Unsupported input object for bit vectors: " + queryVector.getClass().getName());
}
case BYTE -> {
if (queryVector instanceof List) {
yield new ByteDotProduct(scoreScript, field, (List<Number>) queryVector);
} else if (queryVector instanceof String s) {
byte[] parsedQueryVector = HexFormat.of().parseHex(s);
yield new ByteDotProduct(scoreScript, field, parsedQueryVector);
}
throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName());
}
case FLOAT, BFLOAT16 -> {
if (queryVector instanceof List) {
yield new FloatDotProduct(scoreScript, field, (List<Number>) queryVector);
}
throw new IllegalArgumentException("Unsupported input object for float vectors: " + queryVector.getClass().getName());
}
};
}
public double dotProduct() {
return function.dotProduct();
}
}
// Calculate cosine similarity between a query's dense vector and documents' dense vectors
public
|
DotProduct
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/actionqueue/JtaCustomAfterCompletionTest.java
|
{
"start": 1817,
"end": 4787
}
|
class ____ {
@AfterEach
public void afterEach(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().unwrap( SessionFactoryImplementor.class ).getSchemaManager()
.truncateMappedObjects();
}
@Test
public void success(EntityManagerFactoryScope scope) {
AtomicBoolean called = new AtomicBoolean( false );
try {
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().begin();
scope.inEntityManager( session -> {
session.unwrap( SessionImplementor.class ).getActionQueue()
.registerCallback( new AfterTransactionCompletionProcess() {
@Override
public void doAfterTransactionCompletion(boolean success, SharedSessionContractImplementor session) {
called.set( true );
}
} );
assertFalse( called.get() );
session.persist( new SimpleEntity( "jack" ) );
} );
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().commit();
assertTrue( called.get() );
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().begin();
// Check that the transaction was committed
scope.inEntityManager( session -> {
long count = session.createQuery( "select count(*) from SimpleEntity", Long.class )
.getSingleResult();
assertEquals( 1L, count );
} );
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().commit();
}
catch (Exception e) {
// TestingJtaPlatformImpl.INSTANCE.getTransactionManager().getTransaction().rollback();
fail( "Should not have thrown an exception" );
}
}
@Test
public void rollback(EntityManagerFactoryScope scope) {
try {
AtomicBoolean called = new AtomicBoolean( false );
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().begin();
scope.inEntityManager( session -> {
session.unwrap( SessionImplementor.class ).getActionQueue()
.registerCallback( new AfterTransactionCompletionProcess() {
@Override
public void doAfterTransactionCompletion(boolean success, SharedSessionContractImplementor session) {
called.set( true );
}
} );
assertFalse( called.get() );
scope.inEntityManager( theSession -> {
theSession.persist( new SimpleEntity( "jack" ) );
theSession.getTransaction().setRollbackOnly();
} );
} );
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().rollback();
assertTrue( called.get() );
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().begin();
// Check that the transaction was not committed
scope.inEntityManager( session -> {
long count = session.createQuery( "select count(*) from SimpleEntity", Long.class )
.getSingleResult();
assertEquals( 0L, count );
} );
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().commit();
}
catch (Exception e) {
// TestingJtaPlatformImpl.INSTANCE.getTransactionManager().getTransaction().rollback();
fail( "Should not have thrown an exception", e );
}
}
@Entity(name = "SimpleEntity")
public static
|
JtaCustomAfterCompletionTest
|
java
|
quarkusio__quarkus
|
devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/extension-codestarts/config-yaml-codestart/java/src/main/java/org/acme/GreetingConfig.java
|
{
"start": 140,
"end": 217
}
|
interface ____ {
@WithName("message")
String message();
}
|
GreetingConfig
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
|
{
"start": 20202,
"end": 20616
}
|
class ____ implements Checksum {
public ChecksumNull() {}
//Dummy interface
@Override
public long getValue() { return 0; }
@Override
public void reset() {}
@Override
public void update(byte[] b, int off, int len) {}
@Override
public void update(int b) {}
};
/**
* Holds constructor handle to let it be initialized on demand.
*/
private static
|
ChecksumNull
|
java
|
square__moshi
|
moshi/src/test/java/com/squareup/moshi/MoshiTest.java
|
{
"start": 51736,
"end": 52590
}
|
class ____ implements JsonAdapter.Factory {
@Override
public JsonAdapter<?> create(Type type, Set<? extends Annotation> annotations, Moshi moshi) {
if (!type.equals(String.class)) return null;
if (!Util.isAnnotationPresent(annotations, Uppercase.class)) return null;
final JsonAdapter<String> stringAdapter =
moshi.nextAdapter(this, String.class, Util.NO_ANNOTATIONS);
return new JsonAdapter<String>() {
@Override
public String fromJson(JsonReader reader) throws IOException {
String s = stringAdapter.fromJson(reader);
return s.toUpperCase(Locale.US);
}
@Override
public void toJson(JsonWriter writer, String value) throws IOException {
stringAdapter.toJson(writer, value.toUpperCase());
}
};
}
}
|
UppercaseAdapterFactory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java
|
{
"start": 2736,
"end": 18724
}
|
class ____ extends ESTestCase {
private ThreadContext threadContext;
private ClusterService clusterService;
private ApiKeyService apiKeyService;
private AuthenticationService authenticationService;
private CrossClusterAccessAuthenticationService crossClusterAccessAuthenticationService;
private CrossClusterApiKeySignatureManager.Verifier verifier;
private CrossClusterApiKeySignatureManager.Signer signer;
@Before
public void init() throws Exception {
this.threadContext = new ThreadContext(Settings.EMPTY);
this.apiKeyService = mock(ApiKeyService.class);
this.authenticationService = mock(AuthenticationService.class);
this.verifier = mock(CrossClusterApiKeySignatureManager.Verifier.class);
this.signer = mock(CrossClusterApiKeySignatureManager.Signer.class);
this.clusterService = mock(ClusterService.class, Mockito.RETURNS_DEEP_STUBS);
when(clusterService.state().getMinTransportVersion()).thenReturn(TransportVersion.current());
when(clusterService.threadPool().getThreadContext()).thenReturn(threadContext);
crossClusterAccessAuthenticationService = new CrossClusterAccessAuthenticationService(
clusterService,
apiKeyService,
authenticationService,
verifier
);
}
public void testAuthenticationSuccessOnSuccessfulAuthentication() throws IOException, ExecutionException, InterruptedException {
final var crossClusterAccessHeaders = new CrossClusterAccessHeaders(
CrossClusterAccessHeadersTests.randomEncodedApiKeyHeader(),
AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo()
);
crossClusterAccessHeaders.writeToContext(threadContext, null);
final AuthenticationService.AuditableRequest auditableRequest = mock(AuthenticationService.AuditableRequest.class);
final ArgumentCaptor<Authentication> authenticationCapture = ArgumentCaptor.forClass(Authentication.class);
doNothing().when(auditableRequest).authenticationSuccess(authenticationCapture.capture());
doAnswer(invocationOnMock -> {
AuthenticationToken authenticationToken = (AuthenticationToken) invocationOnMock.getArguments()[2];
assertThat(authenticationToken.principal(), is(crossClusterAccessHeaders.credentials().principal()));
assertThat(authenticationToken.credentials(), is(crossClusterAccessHeaders.credentials().credentials()));
return new Authenticator.Context(
threadContext,
auditableRequest,
mock(Realms.class),
(AuthenticationToken) invocationOnMock.getArguments()[2]
);
}).when(authenticationService).newContext(anyString(), any(), any());
@SuppressWarnings("unchecked")
final ArgumentCaptor<ActionListener<Authentication>> listenerCaptor = ArgumentCaptor.forClass(ActionListener.class);
doAnswer(i -> null).when(authenticationService).authenticate(any(Authenticator.Context.class), listenerCaptor.capture());
final PlainActionFuture<Authentication> future = new PlainActionFuture<>();
crossClusterAccessAuthenticationService.authenticate("action", mock(TransportRequest.class), future);
final Authentication apiKeyAuthentication = AuthenticationTestHelper.builder().apiKey().build(false);
listenerCaptor.getValue().onResponse(apiKeyAuthentication);
future.get();
final Authentication expectedAuthentication = apiKeyAuthentication.toCrossClusterAccess(
crossClusterAccessHeaders.getCleanAndValidatedSubjectInfo()
);
verify(auditableRequest).authenticationSuccess(expectedAuthentication);
verifyNoMoreInteractions(auditableRequest);
}
public void testExceptionProcessingRequestOnInvalidCrossClusterAccessSubjectInfo() throws IOException {
final var crossClusterAccessHeaders = new CrossClusterAccessHeaders(
CrossClusterAccessHeadersTests.randomEncodedApiKeyHeader(),
new CrossClusterAccessSubjectInfo(
// Invalid internal user
AuthenticationTestHelper.builder().internal(InternalUsers.XPACK_USER).build(),
new RoleDescriptorsIntersection(
new RoleDescriptor("invalid_role", new String[] { "all" }, null, null, null, null, null, null, null, null, null, null)
)
)
);
crossClusterAccessHeaders.writeToContext(threadContext, null);
final AuthenticationService.AuditableRequest auditableRequest = mock(AuthenticationService.AuditableRequest.class);
final ArgumentCaptor<Authentication> authenticationCapture = ArgumentCaptor.forClass(Authentication.class);
doNothing().when(auditableRequest).authenticationSuccess(authenticationCapture.capture());
when(auditableRequest.exceptionProcessingRequest(any(), any())).thenAnswer(
i -> new ElasticsearchSecurityException("potato", (Exception) i.getArguments()[0])
);
doAnswer(invocationOnMock -> {
AuthenticationToken authenticationToken = (AuthenticationToken) invocationOnMock.getArguments()[2];
assertThat(authenticationToken.principal(), is(crossClusterAccessHeaders.credentials().principal()));
assertThat(authenticationToken.credentials(), is(crossClusterAccessHeaders.credentials().credentials()));
return new Authenticator.Context(
threadContext,
auditableRequest,
mock(Realms.class),
(AuthenticationToken) invocationOnMock.getArguments()[2]
);
}).when(authenticationService).newContext(anyString(), any(), any());
@SuppressWarnings("unchecked")
final ArgumentCaptor<ActionListener<Authentication>> listenerCaptor = ArgumentCaptor.forClass(ActionListener.class);
doAnswer(i -> null).when(authenticationService).authenticate(any(Authenticator.Context.class), listenerCaptor.capture());
final PlainActionFuture<Authentication> future = new PlainActionFuture<>();
crossClusterAccessAuthenticationService.authenticate("action", mock(TransportRequest.class), future);
final Authentication apiKeyAuthentication = AuthenticationTestHelper.builder().apiKey().build(false);
listenerCaptor.getValue().onResponse(apiKeyAuthentication);
final ExecutionException actual = expectThrows(ExecutionException.class, future::get);
assertThat(actual.getCause().getCause(), instanceOf(IllegalArgumentException.class));
assertThat(
actual.getCause().getCause().getMessage(),
containsString("received cross cluster request from an unexpected internal user [" + InternalUsers.XPACK_USER.principal() + "]")
);
verify(auditableRequest).exceptionProcessingRequest(
any(Exception.class),
credentialsArgMatches(crossClusterAccessHeaders.credentials())
);
verifyNoMoreInteractions(auditableRequest);
}
public void testAuthenticationSuccessfulCrossClusterApiKeySignature() throws IOException, GeneralSecurityException, ExecutionException,
InterruptedException {
var subjectInfo = AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo();
var apiKeyHeader = CrossClusterAccessHeadersTests.randomEncodedApiKeyHeader();
var certs = PemUtils.readCertificates(List.of(getDataPath("/org/elasticsearch/xpack/security/signature/signing_rsa.crt")))
.stream()
.map(cert -> (X509Certificate) cert)
.toArray(X509Certificate[]::new);
final var crossClusterAccessHeaders = new CrossClusterAccessHeaders(apiKeyHeader, subjectInfo);
when(signer.sign(anyString(), anyString())).thenReturn(new X509CertificateSignature(certs, "", mock(BytesReference.class)));
crossClusterAccessHeaders.writeToContext(threadContext, signer);
when(verifier.verify(any(X509CertificateSignature.class), anyString(), anyString())).thenReturn(true);
final AuthenticationService.AuditableRequest auditableRequest = mock(AuthenticationService.AuditableRequest.class);
final ArgumentCaptor<Authentication> authenticationCapture = ArgumentCaptor.forClass(Authentication.class);
doNothing().when(auditableRequest).authenticationSuccess(authenticationCapture.capture());
var authContext = new Authenticator.Context(
threadContext,
auditableRequest,
mock(Realms.class),
crossClusterAccessHeaders.credentials()
);
var action = "action";
var request = mock(TransportRequest.class);
when(authenticationService.newContext(anyString(), any(TransportRequest.class), any(ApiKeyService.ApiKeyCredentials.class)))
.thenReturn(authContext);
@SuppressWarnings("unchecked")
final ArgumentCaptor<ActionListener<Authentication>> listenerCaptor = ArgumentCaptor.forClass(ActionListener.class);
doAnswer(i -> null).when(authenticationService).authenticate(any(Authenticator.Context.class), listenerCaptor.capture());
final PlainActionFuture<Authentication> future = new PlainActionFuture<>();
crossClusterAccessAuthenticationService.authenticate(action, request, future);
final Authentication apiKeyAuthentication = AuthenticationTestHelper.builder().apiKey().build(false);
listenerCaptor.getValue().onResponse(apiKeyAuthentication);
future.get();
final Authentication expectedAuthentication = apiKeyAuthentication.toCrossClusterAccess(
crossClusterAccessHeaders.getCleanAndValidatedSubjectInfo()
);
verify(auditableRequest).authenticationSuccess(expectedAuthentication);
verifyNoMoreInteractions(auditableRequest);
}
public void testAuthenticationExceptionOnBadCrossClusterApiKeySignature() throws IOException, GeneralSecurityException {
var subjectInfo = AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo();
var apiKeyHeader = CrossClusterAccessHeadersTests.randomEncodedApiKeyHeader();
var certs = PemUtils.readCertificates(List.of(getDataPath("/org/elasticsearch/xpack/security/signature/signing_rsa.crt")))
.stream()
.map(cert -> (X509Certificate) cert)
.toArray(X509Certificate[]::new);
final var crossClusterAccessHeaders = new CrossClusterAccessHeaders(apiKeyHeader, subjectInfo);
var verifyMock = when(verifier.verify(any(X509CertificateSignature.class), anyString(), anyString()));
boolean badCert = randomBoolean();
if (badCert) {
verifyMock.thenThrow(new GeneralSecurityException("bad certificate"));
} else {
verifyMock.thenReturn(false);
}
when(signer.sign(anyString(), anyString())).thenReturn(new X509CertificateSignature(certs, "", mock(BytesReference.class)));
crossClusterAccessHeaders.writeToContext(threadContext, signer);
var auditableRequest = mock(AuthenticationService.AuditableRequest.class);
doAnswer(invocationOnMock -> invocationOnMock.getArguments()[0]).when(auditableRequest).exceptionProcessingRequest(any(), any());
var authContext = new Authenticator.Context(
threadContext,
auditableRequest,
mock(Realms.class),
crossClusterAccessHeaders.credentials()
);
var action = "action";
var request = mock(TransportRequest.class);
when(authenticationService.newContext(anyString(), any(TransportRequest.class), any(ApiKeyService.ApiKeyCredentials.class)))
.thenReturn(authContext);
final PlainActionFuture<Authentication> future = new PlainActionFuture<>();
crossClusterAccessAuthenticationService.authenticate(action, request, future);
final ExecutionException actual = expectThrows(ExecutionException.class, future::get);
assertThat(actual.getCause(), instanceOf(ElasticsearchSecurityException.class));
assertThat(
actual.getMessage(),
containsString(
(badCert
? "Failed to verify cross cluster api key signature certificate from ["
: "Invalid cross cluster api key signature from [") + X509CertificateSignature.certificateToString(certs[0]) + "]"
)
);
}
public void testNoInteractionWithAuditableRequestOnInitialAuthenticationFailure() throws IOException {
final var crossClusterAccessHeaders = new CrossClusterAccessHeaders(
CrossClusterAccessHeadersTests.randomEncodedApiKeyHeader(),
AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo()
);
crossClusterAccessHeaders.writeToContext(threadContext, null);
final AuthenticationService.AuditableRequest auditableRequest = mock(AuthenticationService.AuditableRequest.class);
doAnswer(invocationOnMock -> {
AuthenticationToken authenticationToken = (AuthenticationToken) invocationOnMock.getArguments()[2];
assertThat(authenticationToken.principal(), is(crossClusterAccessHeaders.credentials().principal()));
assertThat(authenticationToken.credentials(), is(crossClusterAccessHeaders.credentials().credentials()));
return new Authenticator.Context(
threadContext,
auditableRequest,
mock(Realms.class),
(AuthenticationToken) invocationOnMock.getArguments()[2]
);
}).when(authenticationService).newContext(anyString(), any(), any());
@SuppressWarnings("unchecked")
final ArgumentCaptor<ActionListener<Authentication>> listenerCaptor = ArgumentCaptor.forClass(ActionListener.class);
doAnswer(i -> null).when(authenticationService).authenticate(any(Authenticator.Context.class), listenerCaptor.capture());
final PlainActionFuture<Authentication> future = new PlainActionFuture<>();
crossClusterAccessAuthenticationService.authenticate("action", mock(TransportRequest.class), future);
final ElasticsearchSecurityException authenticationFailure = new ElasticsearchSecurityException("authentication failure");
listenerCaptor.getValue().onFailure(authenticationFailure);
final ExecutionException actual = expectThrows(ExecutionException.class, future::get);
assertThat(actual.getCause(), equalTo(authenticationFailure));
verifyNoInteractions(auditableRequest);
}
public void testTerminateExceptionBubblesUpWithAuthenticateHeaders() {
@SuppressWarnings("unchecked")
final ArgumentCaptor<ActionListener<AuthenticationResult<User>>> listenerCaptor = ArgumentCaptor.forClass(ActionListener.class);
doAnswer(i -> null).when(apiKeyService)
.tryAuthenticate(any(), any(ApiKeyService.ApiKeyCredentials.class), listenerCaptor.capture());
final PlainActionFuture<Void> future = new PlainActionFuture<>();
crossClusterAccessAuthenticationService.tryAuthenticate(
new ApiKeyService.ApiKeyCredentials(UUIDs.randomBase64UUID(), UUIDs.randomBase64UUIDSecureString(), ApiKey.Type.CROSS_CLUSTER),
future
);
Exception ex = new IllegalArgumentException("terminator");
listenerCaptor.getValue().onResponse(AuthenticationResult.terminate("authentication failure", ex));
final ExecutionException actual = expectThrows(ExecutionException.class, future::get);
assertThat(actual.getCause(), equalTo(ex));
}
private static AuthenticationToken credentialsArgMatches(AuthenticationToken credentials) {
return argThat(arg -> arg.principal().equals(credentials.principal()) && arg.credentials().equals(credentials.credentials()));
}
}
|
CrossClusterAccessAuthenticationServiceTests
|
java
|
dropwizard__dropwizard
|
dropwizard-jersey/src/main/java/io/dropwizard/jersey/DropwizardResourceConfig.java
|
{
"start": 8619,
"end": 14676
}
|
class ____ implements ApplicationEventListener {
private final DropwizardResourceConfig config;
private List<Resource> resources = Collections.emptyList();
private Set<Class<?>> providers = Collections.emptySet();
ComponentLoggingListener(DropwizardResourceConfig config) {
this.config = config;
}
@Override
@SuppressWarnings("Slf4jFormatShouldBeConst")
public void onEvent(ApplicationEvent event) {
if (event.getType() == ApplicationEvent.Type.INITIALIZATION_APP_FINISHED) {
resources = event.getResourceModel().getResources();
providers = event.getProviders();
final String resourceClasses = resources.stream()
.map(x -> x.getClass().getCanonicalName())
.collect(Collectors.joining(", "));
final String providerClasses = providers.stream()
.map(Class::getCanonicalName)
.collect(Collectors.joining(", "));
LOGGER.debug("resources = {}", resourceClasses);
LOGGER.debug("providers = {}", providerClasses);
LOGGER.info(getEndpointsInfo());
}
}
private List<EndpointLogLine> logMethodLines(Resource resource, String contextPath) {
final List<EndpointLogLine> methodLines = new ArrayList<>();
for (ResourceMethod method : resource.getAllMethods()) {
if ("OPTIONS".equalsIgnoreCase(method.getHttpMethod())) {
continue;
}
final String path = mergePaths(contextPath, resource.getPath());
final Class<?> handler = method.getInvocable().getHandler().getHandlerClass();
switch (method.getType()) {
case RESOURCE_METHOD:
methodLines.add(new EndpointLogLine(method.getHttpMethod(), path, handler));
break;
case SUB_RESOURCE_LOCATOR:
final ResolvedType responseType = TYPE_RESOLVER
.resolve(method.getInvocable().getResponseType());
final Class<?> erasedType = !responseType.getTypeBindings().isEmpty() ?
responseType.getTypeBindings().getBoundType(0).getErasedType() :
responseType.getErasedType();
final Resource res = Resource.from(erasedType);
if (res == null) {
methodLines.add(new EndpointLogLine(method.getHttpMethod(), path, handler));
} else {
methodLines.addAll(logResourceLines(res, path));
}
break;
default:
break;
}
}
return methodLines;
}
private static String mergePaths(@NotNull String context, String subPath) {
if (subPath == null || subPath.isEmpty()) {
return cleanUpPath(context);
}
final StringBuilder path = new StringBuilder(context);
if (!context.endsWith("/")) {
path.append('/');
}
if (!"/".equals(subPath)) {
final int startIndex = subPath.startsWith("/") ? 1 : 0;
final int endIndex = subPath.endsWith("/") ? subPath.length() - 1 : subPath.length();
path.append(subPath, startIndex, endIndex);
}
return cleanUpPath(path.toString());
}
private List<EndpointLogLine> logResourceLines(Resource resource, String contextPath) {
final List<EndpointLogLine> resourceLines = new ArrayList<>();
for (Resource child : resource.getChildResources()) {
resourceLines.addAll(logResourceLines(child, mergePaths(contextPath, resource.getPath())));
}
resourceLines.addAll(logMethodLines(resource, contextPath));
return resourceLines;
}
String getEndpointsInfo() {
final StringBuilder msg = new StringBuilder(1024);
final Set<EndpointLogLine> endpointLogLines = new TreeSet<>(new EndpointComparator());
final String contextPath = config.getContextPath();
final String normalizedContextPath = contextPath.isEmpty() || contextPath.equals("/") ? "" :
contextPath.startsWith("/") ? contextPath : "/" + contextPath;
final String pattern = config.getUrlPattern().endsWith("/*") ?
config.getUrlPattern().substring(0, config.getUrlPattern().length() - 1) :
config.getUrlPattern();
final String path = mergePaths(normalizedContextPath, pattern);
msg.append("The following paths were found for the configured resources:");
msg.append(NEWLINE).append(NEWLINE);
for (Resource resource : resources) {
endpointLogLines.addAll(logResourceLines(resource, path));
}
final List<EndpointLogLine> providerLines = providers.stream()
.map(Resource::from)
.filter(Objects::nonNull)
.flatMap(res -> logResourceLines(res, path).stream())
.collect(Collectors.toList());
endpointLogLines.addAll(providerLines);
if (!endpointLogLines.isEmpty()) {
for (EndpointLogLine line : endpointLogLines) {
msg.append(line).append(NEWLINE);
}
} else {
msg.append(" NONE").append(NEWLINE);
}
return msg.toString();
}
@Override
@Nullable
public RequestEventListener onRequest(RequestEvent requestEvent) {
return null;
}
}
static final
|
ComponentLoggingListener
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryAsyncTest.java
|
{
"start": 1846,
"end": 2180
}
|
class ____ {
int test() {
var ai = new AtomicInteger();
ai.set(1);
return ai.get();
}
}
""")
.addOutputLines(
"Test.java",
"""
import java.util.concurrent.atomic.AtomicInteger;
|
Test
|
java
|
quarkusio__quarkus
|
extensions/smallrye-graphql/deployment/src/main/java/io/quarkus/smallrye/graphql/deployment/OverridableIndex.java
|
{
"start": 606,
"end": 5806
}
|
class ____ implements IndexView {
private final IndexView original;
private final IndexView override;
private OverridableIndex(IndexView original, IndexView override) {
this.original = original;
this.override = override;
}
public static OverridableIndex create(IndexView original, IndexView override) {
return new OverridableIndex(original, override);
}
@Override
public Collection<ClassInfo> getKnownClasses() {
return overrideCollection(original.getKnownClasses(), override.getKnownClasses(), classInfoComparator);
}
@Override
public ClassInfo getClassByName(DotName dn) {
return overrideObject(original.getClassByName(dn), override.getClassByName(dn));
}
@Override
public Collection<ClassInfo> getKnownDirectSubclasses(DotName dn) {
return overrideCollection(original.getKnownDirectSubclasses(dn), override.getKnownDirectSubclasses(dn),
classInfoComparator);
}
@Override
public Collection<ClassInfo> getAllKnownSubclasses(DotName dn) {
return overrideCollection(original.getAllKnownSubclasses(dn), override.getAllKnownSubclasses(dn), classInfoComparator);
}
@Override
public Collection<ClassInfo> getKnownDirectSubinterfaces(DotName dn) {
return overrideCollection(original.getKnownDirectSubinterfaces(dn), override.getKnownDirectSubinterfaces(dn),
classInfoComparator);
}
@Override
public Collection<ClassInfo> getAllKnownSubinterfaces(DotName dn) {
return overrideCollection(original.getAllKnownSubinterfaces(dn), override.getAllKnownSubinterfaces(dn),
classInfoComparator);
}
@Override
public Collection<ClassInfo> getKnownDirectImplementations(DotName dn) {
return overrideCollection(original.getKnownDirectImplementations(dn), override.getKnownDirectImplementations(dn),
classInfoComparator);
}
@Override
public Collection<ClassInfo> getAllKnownImplementations(DotName dn) {
return overrideCollection(original.getAllKnownImplementations(dn), override.getAllKnownImplementations(dn),
classInfoComparator);
}
@Override
public Collection<ClassInfo> getKnownDirectImplementors(DotName dn) {
return overrideCollection(original.getKnownDirectImplementors(dn), override.getKnownDirectImplementors(dn),
classInfoComparator);
}
@Override
public Collection<ClassInfo> getAllKnownImplementors(DotName dn) {
return overrideCollection(original.getAllKnownImplementors(dn), override.getAllKnownImplementors(dn),
classInfoComparator);
}
@Override
public Collection<AnnotationInstance> getAnnotations(DotName dn) {
return overrideCollection(original.getAnnotations(dn), override.getAnnotations(dn), annotationInstanceComparator);
}
@Override
public Collection<AnnotationInstance> getAnnotationsWithRepeatable(DotName dn, IndexView iv) {
return overrideCollection(original.getAnnotationsWithRepeatable(dn, iv), override.getAnnotationsWithRepeatable(dn, iv),
annotationInstanceComparator);
}
@Override
public Collection<ModuleInfo> getKnownModules() {
return overrideCollection(original.getKnownModules(), override.getKnownModules(), moduleInfoComparator);
}
@Override
public ModuleInfo getModuleByName(DotName dn) {
return overrideObject(original.getModuleByName(dn), override.getModuleByName(dn));
}
@Override
public Collection<ClassInfo> getKnownUsers(DotName dn) {
return overrideCollection(original.getKnownUsers(dn), override.getKnownUsers(dn), classInfoComparator);
}
@Override
public Collection<ClassInfo> getClassesInPackage(DotName pn) {
return overrideCollection(original.getClassesInPackage(pn), override.getClassesInPackage(pn), classInfoComparator);
}
@Override
public Set<DotName> getSubpackages(DotName pn) {
return new HashSet<>(overrideCollection(original.getSubpackages(pn), override.getSubpackages(pn),
Comparator.naturalOrder()));
}
private Comparator<ClassInfo> classInfoComparator = new Comparator<ClassInfo>() {
@Override
public int compare(ClassInfo t, ClassInfo t1) {
return t.name().toString().compareTo(t1.name().toString());
}
};
private Comparator<Type> typeComparator = new Comparator<Type>() {
@Override
public int compare(Type t, Type t1) {
return t.name().toString().compareTo(t1.name().toString());
}
};
private Comparator<ModuleInfo> moduleInfoComparator = new Comparator<ModuleInfo>() {
@Override
public int compare(ModuleInfo t, ModuleInfo t1) {
return t.name().toString().compareTo(t1.name().toString());
}
};
private Comparator<FieldInfo> fieldInfoComparator = new Comparator<FieldInfo>() {
@Override
public int compare(FieldInfo t, FieldInfo t1) {
if (classInfoComparator.compare(t.declaringClass(), t1.declaringClass()) == 0) { // Same
|
OverridableIndex
|
java
|
apache__kafka
|
storage/src/main/java/org/apache/kafka/storage/internals/log/UnifiedLog.java
|
{
"start": 110583,
"end": 151202
}
|
interface ____ {
boolean execute(LogSegment segment, Optional<LogSegment> nextSegmentOpt) throws IOException;
}
private int deleteRetentionMsBreachedSegments() throws IOException {
long retentionMs = UnifiedLog.localRetentionMs(config(), remoteLogEnabledAndRemoteCopyEnabled());
if (retentionMs < 0) return 0;
long startMs = time().milliseconds();
DeletionCondition shouldDelete = (segment, nextSegmentOpt) -> {
if (startMs < segment.largestTimestamp()) {
futureTimestampLogger.warn("{} contains future timestamp(s), making it ineligible to be deleted", segment);
}
boolean delete = startMs - segment.largestTimestamp() > retentionMs;
logger.debug("{} retentionMs breached: {}, startMs={}, retentionMs={}",
segment, delete, startMs, retentionMs);
return delete;
};
return deleteOldSegments(shouldDelete, toDelete -> {
long localRetentionMs = UnifiedLog.localRetentionMs(config(), remoteLogEnabledAndRemoteCopyEnabled());
for (LogSegment segment : toDelete) {
if (segment.largestRecordTimestamp().isPresent()) {
if (remoteLogEnabledAndRemoteCopyEnabled()) {
logger.info("Deleting segment {} due to local log retention time {}ms breach based on the largest " +
"record timestamp in the segment", segment, localRetentionMs);
} else {
logger.info("Deleting segment {} due to log retention time {}ms breach based on the largest " +
"record timestamp in the segment", segment, localRetentionMs);
}
} else {
if (remoteLogEnabledAndRemoteCopyEnabled()) {
logger.info("Deleting segment {} due to local log retention time {}ms breach based on the " +
"last modified time of the segment", segment, localRetentionMs);
} else {
logger.info("Deleting segment {} due to log retention time {}ms breach based on the " +
"last modified time of the segment", segment, localRetentionMs);
}
}
}
});
}
private int deleteRetentionSizeBreachedSegments() throws IOException {
long retentionSize = UnifiedLog.localRetentionSize(config(), remoteLogEnabledAndRemoteCopyEnabled());
long logSize = size();
if (retentionSize < 0 || logSize < retentionSize) return 0;
final AtomicLong diff = new AtomicLong(logSize - retentionSize);
DeletionCondition shouldDelete = (segment, nextSegmentOpt) -> {
int segmentSize = segment.size();
boolean delete = diff.get() - segmentSize >= 0;
logger.debug("{} retentionSize breached: {}, log size before delete segment={}, after delete segment={}",
segment, delete, diff.get(), diff.get() - segmentSize);
if (delete) {
diff.addAndGet(-segmentSize);
}
return delete;
};
return deleteOldSegments(shouldDelete, toDelete -> {
long size = size();
for (LogSegment segment : toDelete) {
size -= segment.size();
if (remoteLogEnabledAndRemoteCopyEnabled()) {
logger.info("Deleting segment {} due to local log retention size {} breach. Local log size after deletion will be {}.",
segment, UnifiedLog.localRetentionSize(config(), true), size);
} else {
logger.info("Deleting segment {} due to log retention size {} breach. Log size after deletion will be {}.",
segment, config().retentionSize, size);
}
}
});
}
private int deleteLogStartOffsetBreachedSegments() throws IOException {
DeletionCondition shouldDelete = (segment, nextSegmentOpt) -> {
boolean isRemoteLogEnabled = remoteLogEnabled();
long localLSO = localLogStartOffset();
long logStartOffsetValue = isRemoteLogEnabled ? localLSO : logStartOffset();
boolean delete = nextSegmentOpt
.map(nextSegment -> nextSegment.baseOffset() <= logStartOffsetValue)
.orElse(false);
logger.debug("{} logStartOffset breached: {}, nextSegmentOpt={}, {}",
segment, delete, nextSegmentOpt, isRemoteLogEnabled ? "localLogStartOffset=" + localLSO : "logStartOffset=" + logStartOffset);
return delete;
};
return deleteOldSegments(shouldDelete, toDelete -> {
if (remoteLogEnabledAndRemoteCopyEnabled()) {
logger.info("Deleting segments due to local log start offset {} breach: {}",
localLogStartOffset(), toDelete.stream().map(LogSegment::toString).collect(Collectors.joining(",")));
} else {
logger.info("Deleting segments due to log start offset {} breach: {}",
logStartOffset, toDelete.stream().map(LogSegment::toString).collect(Collectors.joining(",")));
}
});
}
public boolean isFuture() {
return localLog.isFuture();
}
/**
* The size of the log in bytes
*/
public long size() {
return LogSegments.sizeInBytes(logSegments());
}
/**
* The log size in bytes for all segments that are only in local log but not yet in remote log.
*/
public long onlyLocalLogSegmentsSize() {
return LogSegments.sizeInBytes(logSegments().stream().filter(s -> s.baseOffset() >= highestOffsetInRemoteStorage()).collect(Collectors.toList()));
}
/**
* The number of segments that are only in local log but not yet in remote log.
*/
public long onlyLocalLogSegmentsCount() {
return logSegments().stream().filter(s -> s.baseOffset() >= highestOffsetInRemoteStorage()).count();
}
/**
* The offset of the next message that will be appended to the log
*/
public long logEndOffset() {
return localLog.logEndOffset();
}
/**
* The offset metadata of the next message that will be appended to the log
*/
public LogOffsetMetadata logEndOffsetMetadata() {
return localLog.logEndOffsetMetadata();
}
/**
* Roll the log over to a new empty log segment if necessary.
* The segment will be rolled if one of the following conditions met:
* 1. The logSegment is full
* 2. The maxTime has elapsed since the timestamp of first message in the segment (or since the
* create time if the first message does not have a timestamp)
* 3. The index is full
*
* @param messagesSize The messages set size in bytes.
* @param appendInfo log append information
*
* @return The currently active segment after (perhaps) rolling to a new segment
*/
private LogSegment maybeRoll(int messagesSize, LogAppendInfo appendInfo) throws IOException {
synchronized (lock) {
LogSegment segment = localLog.segments().activeSegment();
long now = time().milliseconds();
long maxTimestampInMessages = appendInfo.maxTimestamp();
long maxOffsetInMessages = appendInfo.lastOffset();
if (segment.shouldRoll(new RollParams(config().maxSegmentMs(), config().segmentSize(), appendInfo.maxTimestamp(), appendInfo.lastOffset(), messagesSize, now))) {
logger.debug("Rolling new log segment (log_size = {}/{}}, " +
"offset_index_size = {}/{}, " +
"time_index_size = {}/{}, " +
"inactive_time_ms = {}/{}).",
segment.size(), config().segmentSize(),
segment.offsetIndex().entries(), segment.offsetIndex().maxEntries(),
segment.timeIndex().entries(), segment.timeIndex().maxEntries(),
segment.timeWaitedForRoll(now, maxTimestampInMessages), config().segmentMs - segment.rollJitterMs());
/*
maxOffsetInMessages - Integer.MAX_VALUE is a heuristic value for the first offset in the set of messages.
Since the offset in messages will not differ by more than Integer.MAX_VALUE, this is guaranteed <= the real
first offset in the set. Determining the true first offset in the set requires decompression, which the follower
is trying to avoid during log append. Prior behavior assigned new baseOffset = logEndOffset from old segment.
This was problematic in the case that two consecutive messages differed in offset by
Integer.MAX_VALUE.toLong + 2 or more. In this case, the prior behavior would roll a new log segment whose
base offset was too low to contain the next message. This edge case is possible when a replica is recovering a
highly compacted topic from scratch.
Note that this is only required for pre-V2 message formats because these do not store the first message offset
in the header.
*/
long rollOffset = appendInfo.firstOffset() == UnifiedLog.UNKNOWN_OFFSET
? maxOffsetInMessages - Integer.MAX_VALUE
: appendInfo.firstOffset();
return roll(Optional.of(rollOffset));
} else {
return segment;
}
}
}
/**
* Roll the local log over to a new active segment starting with the localLog.logEndOffset.
* This will trim the index to the exact size of the number of entries it currently contains.
*
* @return The newly rolled segment
*/
public LogSegment roll() throws IOException {
return roll(Optional.empty());
}
/**
* Roll the local log over to a new active segment starting with the expectedNextOffset (when provided),
* or localLog.logEndOffset otherwise. This will trim the index to the exact size of the number of entries
* it currently contains.
*
* @return The newly rolled segment
*/
public LogSegment roll(Optional<Long> expectedNextOffset) throws IOException {
synchronized (lock) {
long nextOffset = expectedNextOffset.orElse(0L);
LogSegment newSegment = localLog.roll(nextOffset);
// Take a snapshot of the producer state to facilitate recovery. It is useful to have the snapshot
// offset align with the new segment offset since this ensures we can recover the segment by beginning
// with the corresponding snapshot file and scanning the segment data. Because the segment base offset
// may actually be ahead of the current producer state end offset (which corresponds to the log end offset),
// we manually override the state offset here prior to taking the snapshot.
producerStateManager.updateMapEndOffset(newSegment.baseOffset());
// We avoid potentially-costly fsync call, since we acquire UnifiedLog#lock here
// which could block subsequent produces in the meantime.
// flush is done in the scheduler thread along with segment flushing below
Optional<File> maybeSnapshot = producerStateManager.takeSnapshot(false);
updateHighWatermarkWithLogEndOffset();
// Schedule an asynchronous flush of the old segment
scheduler().scheduleOnce("flush-log", () -> {
maybeSnapshot.ifPresent(f -> flushProducerStateSnapshot(f.toPath()));
flushUptoOffsetExclusive(newSegment.baseOffset());
});
return newSegment;
}
}
/**
* Flush all local log segments
*
* @param forceFlushActiveSegment should be true during a clean shutdown, and false otherwise. The reason is that
* we have to pass logEndOffset + 1 to the `localLog.flush(offset: Long): Unit` function to flush empty
* active segments, which is important to make sure we persist the active segment file during shutdown, particularly
* when it's empty.
*/
public void flush(boolean forceFlushActiveSegment) {
flush(logEndOffset(), forceFlushActiveSegment);
}
/**
* Flush local log segments for all offsets up to offset-1
*
* @param offset The offset to flush up to (non-inclusive); the new recovery point
*/
public void flushUptoOffsetExclusive(long offset) {
flush(offset, false);
}
/**
* Flush local log segments for all offsets up to offset-1 if includingOffset=false; up to offset
* if includingOffset=true. The recovery point is set to offset.
*
* @param offset The offset to flush up to; the new recovery point
* @param includingOffset Whether the flush includes the provided offset.
*/
private void flush(long offset, boolean includingOffset) {
long flushOffset = includingOffset ? offset + 1 : offset;
String includingOffsetStr = includingOffset ? "inclusive" : "exclusive";
maybeHandleIOException(
() -> "Error while flushing log for " + topicPartition() + " in dir " + dir().getParent() + " with offset " + offset +
" (" + includingOffsetStr + ") and recovery point " + offset,
() -> {
if (flushOffset > localLog.recoveryPoint()) {
logger.debug("Flushing log up to offset {} ({}) with recovery point {}, last flushed: {}, current time: {}, unflushed: {}",
offset, includingOffsetStr, offset, lastFlushTime(), time().milliseconds(), localLog.unflushedMessages());
localLog.flush(flushOffset);
synchronized (lock) {
localLog.markFlushed(offset);
}
}
return null;
});
}
/**
* Completely delete the local log directory and all contents from the file system with no delay
*/
public void delete() {
maybeHandleIOException(
() -> "Error while deleting log for " + topicPartition() + " in dir " + dir().getParent(),
() -> {
synchronized (lock) {
localLog.checkIfMemoryMappedBufferClosed();
producerExpireCheck.cancel(true);
leaderEpochCache.clear();
List<LogSegment> deletedSegments = localLog.deleteAllSegments();
deleteProducerSnapshots(deletedSegments, false);
localLog.deleteEmptyDir();
}
return null;
});
}
// visible for testing
public void takeProducerSnapshot() throws IOException {
synchronized (lock) {
localLog.checkIfMemoryMappedBufferClosed();
producerStateManager.takeSnapshot();
}
}
// visible for testing
public OptionalLong latestProducerSnapshotOffset() {
synchronized (lock) {
return producerStateManager.latestSnapshotOffset();
}
}
// visible for testing
public OptionalLong oldestProducerSnapshotOffset() {
synchronized (lock) {
return producerStateManager.oldestSnapshotOffset();
}
}
// visible for testing
public long latestProducerStateEndOffset() {
synchronized (lock) {
return producerStateManager.mapEndOffset();
}
}
// visible for testing
public void flushProducerStateSnapshot(Path snapshot) {
maybeHandleIOException(
() -> "Error while deleting producer state snapshot " + snapshot + " for " + topicPartition() + " in dir " + dir().getParent(),
() -> {
Utils.flushFileIfExists(snapshot);
return null;
});
}
/**
* Truncate this log so that it ends with the greatest offset < targetOffset.
*
* @param targetOffset The offset to truncate to, an upper bound on all offsets in the log after truncation is complete.
* @return True if targetOffset < logEndOffset
*/
public boolean truncateTo(long targetOffset) {
return maybeHandleIOException(
() -> "Error while truncating log to offset " + targetOffset + " for " + topicPartition() + " in dir " + dir().getParent(),
() -> {
if (targetOffset < 0) {
throw new IllegalArgumentException("Cannot truncate partition " + topicPartition() + " to a negative offset (" + targetOffset + ").");
}
long hwm = highWatermark();
if (targetOffset < hwm) {
logger.warn("Truncating {}{} to offset {} below high watermark {}", isFuture() ? "future " : "", topicPartition(), targetOffset, hwm);
}
if (targetOffset >= localLog.logEndOffset()) {
logger.info("Truncating to {} has no effect as the largest offset in the log is {}", targetOffset, localLog.logEndOffset() - 1);
// Always truncate epoch cache since we may have a conflicting epoch entry at the
// end of the log from the leader. This could happen if this broker was a leader
// and inserted the first start offset entry, but then failed to append any entries
// before another leader was elected.
synchronized (lock) {
leaderEpochCache.truncateFromEndAsyncFlush(logEndOffset());
}
return false;
} else {
logger.info("Truncating to offset {}", targetOffset);
synchronized (lock) {
localLog.checkIfMemoryMappedBufferClosed();
if (localLog.segments().firstSegmentBaseOffset().getAsLong() > targetOffset) {
truncateFullyAndStartAt(targetOffset, Optional.empty());
} else {
Collection<LogSegment> deletedSegments = localLog.truncateTo(targetOffset);
deleteProducerSnapshots(deletedSegments, true);
leaderEpochCache.truncateFromEndAsyncFlush(targetOffset);
logStartOffset = Math.min(targetOffset, logStartOffset);
rebuildProducerState(targetOffset, producerStateManager);
if (highWatermark() >= localLog.logEndOffset())
updateHighWatermark(localLog.logEndOffsetMetadata());
}
return true;
}
}
});
}
/**
* Delete all data in the log and start at the new offset
*
* @param newOffset The new offset to start the log with
* @param logStartOffsetOpt The log start offset to set for the log. If None, the new offset will be used.
*/
public void truncateFullyAndStartAt(long newOffset, Optional<Long> logStartOffsetOpt) {
maybeHandleIOException(
() -> "Error while truncating the entire log for " + topicPartition() + " in dir " + dir().getParent(),
() -> {
logger.debug("Truncate and start at offset {}, logStartOffset: {}", newOffset, logStartOffsetOpt.orElse(newOffset));
synchronized (lock) {
localLog.truncateFullyAndStartAt(newOffset);
leaderEpochCache.clearAndFlush();
producerStateManager.truncateFullyAndStartAt(newOffset);
logStartOffset = logStartOffsetOpt.orElse(newOffset);
if (remoteLogEnabled()) localLogStartOffset = newOffset;
rebuildProducerState(newOffset, producerStateManager);
return updateHighWatermark(localLog.logEndOffsetMetadata());
}
});
}
/**
* The time this log is last known to have been fully flushed to disk
*/
public long lastFlushTime() {
return localLog.lastFlushTime();
}
/**
* The active segment that is currently taking appends
*/
public LogSegment activeSegment() {
return localLog.segments().activeSegment();
}
/**
* All the log segments in this log ordered from oldest to newest
*/
public List<LogSegment> logSegments() {
synchronized (lock) {
return List.copyOf(localLog.segments().values());
}
}
/**
* Get all segments beginning with the segment that includes "from" and ending with the segment
* that includes up to "to-1" or the end of the log (if to > logEndOffset).
*/
public List<LogSegment> logSegments(long from, long to) {
synchronized (lock) {
return List.copyOf(localLog.segments().values(from, to));
}
}
public List<LogSegment> nonActiveLogSegmentsFrom(long from) {
synchronized (lock) {
return List.copyOf(localLog.segments().nonActiveLogSegmentsFrom(from));
}
}
@Override
public String toString() {
StringBuilder logString = new StringBuilder();
logString.append("Log(dir=");
logString.append(dir());
topicId.ifPresent(id -> {
logString.append(", topicId=");
logString.append(id);
});
logString.append(", topic=");
logString.append(topicPartition().topic());
logString.append(", partition=");
logString.append(topicPartition().partition());
logString.append(", highWatermark=");
logString.append(highWatermark());
logString.append(", lastStableOffset=");
logString.append(lastStableOffset());
logString.append(", logStartOffset=");
logString.append(logStartOffset());
logString.append(", logEndOffset=");
logString.append(logEndOffset());
logString.append(")");
return logString.toString();
}
public void replaceSegments(List<LogSegment> newSegments, List<LogSegment> oldSegments) throws IOException {
synchronized (lock) {
localLog.checkIfMemoryMappedBufferClosed();
List<LogSegment> deletedSegments = LocalLog.replaceSegments(localLog.segments(), newSegments, oldSegments, dir(), topicPartition(),
config(), scheduler(), logDirFailureChannel(), logIdent, false);
deleteProducerSnapshots(deletedSegments, true);
}
}
/**
* This function does not acquire Log.lock. The caller has to make sure log segments don't get deleted during
* this call, and also protects against calling this function on the same segment in parallel.
*
* <p>Currently, it is used by LogCleaner threads on log compact non-active segments only with LogCleanerManager's lock
* to ensure no other LogCleaner threads and retention thread can work on the same segment.
*/
public Collection<Long> getFirstBatchTimestampForSegments(Collection<LogSegment> segments) {
return segments.stream().map(LogSegment::getFirstBatchTimestamp).toList();
}
/**
* Remove deleted log metrics
*/
public void removeLogMetrics() {
metricNames.forEach(metricsGroup::removeMetric);
metricNames.clear();
}
private <T> T maybeHandleIOException(Supplier<String> msg, StorageAction<T, IOException> fun) throws KafkaStorageException {
return LocalLog.maybeHandleIOException(logDirFailureChannel(), parentDir(), msg, fun);
}
public List<LogSegment> splitOverflowedSegment(LogSegment segment) throws IOException {
synchronized (lock) {
LocalLog.SplitSegmentResult result = LocalLog.splitOverflowedSegment(segment, localLog.segments(), dir(), topicPartition(), config(), scheduler(), logDirFailureChannel(), logIdent);
deleteProducerSnapshots(result.deletedSegments(), true);
return result.newSegments();
}
}
private void deleteProducerSnapshots(Collection<LogSegment> segments, boolean asyncDelete) throws IOException {
UnifiedLog.deleteProducerSnapshots(segments, producerStateManager, asyncDelete, scheduler(), config(), logDirFailureChannel(), parentDir(), topicPartition());
}
private static <T> Optional<T> findFirst(Iterable<T> iterable, Predicate<T> predicate) {
for (T item : iterable) {
if (predicate.test(item)) {
return Optional.of(item);
}
}
return Optional.empty();
}
/**
* Rebuilds producer state until the provided lastOffset. This function may be called from the
* recovery code path, and thus must be free of all side effects, i.e. it must not update any
* log-specific state.
*
* @param producerStateManager The {@link ProducerStateManager} instance to be rebuilt.
* @param segments The segments of the log whose producer state is being rebuilt
* @param logStartOffset The log start offset
* @param lastOffset The last offset upto which the producer state needs to be rebuilt
* @param time The time instance used for checking the clock
* @param reloadFromCleanShutdown True if the producer state is being built after a clean shutdown, false otherwise.
* @param logPrefix The logging prefix
*/
public static void rebuildProducerState(ProducerStateManager producerStateManager,
LogSegments segments,
long logStartOffset,
long lastOffset,
Time time,
boolean reloadFromCleanShutdown,
String logPrefix) throws IOException {
List<Long> offsetsToSnapshot = new ArrayList<>();
segments.lastSegment().ifPresent(lastSegment -> {
long lastSegmentBaseOffset = lastSegment.baseOffset();
segments.lowerSegment(lastSegmentBaseOffset).ifPresent(s -> offsetsToSnapshot.add(s.baseOffset()));
offsetsToSnapshot.add(lastSegmentBaseOffset);
});
offsetsToSnapshot.add(lastOffset);
LOG.info("{}Loading producer state till offset {}", logPrefix, lastOffset);
// We want to avoid unnecessary scanning of the log to build the producer state when the broker is being
// upgraded. The basic idea is to use the absence of producer snapshot files to detect the upgrade case,
// but we have to be careful not to assume too much in the presence of broker failures. The most common
// upgrade case in which we expect to find no snapshots is the following:
//
// * The broker has been upgraded, and we had a clean shutdown.
//
// If we hit this case, we skip producer state loading and write a new snapshot at the log end
// offset (see below). The next time the log is reloaded, we will load producer state using this snapshot
// (or later snapshots). Otherwise, if there is no snapshot file, then we have to rebuild producer state
// from the first segment.
if (producerStateManager.latestSnapshotOffset().isEmpty() && reloadFromCleanShutdown) {
// To avoid an expensive scan through all the segments, we take empty snapshots from the start of the
// last two segments and the last offset. This should avoid the full scan in the case that the log needs
// truncation.
for (long offset : offsetsToSnapshot) {
producerStateManager.updateMapEndOffset(offset);
producerStateManager.takeSnapshot();
}
} else {
LOG.info("{}Reloading from producer snapshot and rebuilding producer state from offset {}", logPrefix, lastOffset);
boolean isEmptyBeforeTruncation = producerStateManager.isEmpty() && producerStateManager.mapEndOffset() >= lastOffset;
long producerStateLoadStart = time.milliseconds();
producerStateManager.truncateAndReload(logStartOffset, lastOffset, time.milliseconds());
long segmentRecoveryStart = time.milliseconds();
// Only do the potentially expensive reloading if the last snapshot offset is lower than the log end
// offset (which would be the case on first startup) and there were active producers prior to truncation
// (which could be the case if truncating after initial loading). If there weren't, then truncating
// shouldn't change that fact (although it could cause a producerId to expire earlier than expected),
// and we can skip the loading. This is an optimization for users which are not yet using
// idempotent/transactional features yet.
if (lastOffset > producerStateManager.mapEndOffset() && !isEmptyBeforeTruncation) {
Optional<LogSegment> segmentOfLastOffset = segments.floorSegment(lastOffset);
for (LogSegment segment : segments.values(producerStateManager.mapEndOffset(), lastOffset)) {
long startOffset = Utils.max(segment.baseOffset(), producerStateManager.mapEndOffset(), logStartOffset);
producerStateManager.updateMapEndOffset(startOffset);
if (offsetsToSnapshot.contains(segment.baseOffset())) {
producerStateManager.takeSnapshot();
}
int maxPosition = segment.size();
if (segmentOfLastOffset.isPresent() && segmentOfLastOffset.get() == segment) {
FileRecords.LogOffsetPosition lop = segment.translateOffset(lastOffset);
maxPosition = lop != null ? lop.position : segment.size();
}
FetchDataInfo fetchDataInfo = segment.read(startOffset, Integer.MAX_VALUE, maxPosition);
if (fetchDataInfo != null) {
loadProducersFromRecords(producerStateManager, fetchDataInfo.records);
}
}
}
producerStateManager.updateMapEndOffset(lastOffset);
producerStateManager.takeSnapshot();
LOG.info("{}Producer state recovery took {}ms for snapshot load and {}ms for segment recovery from offset {}",
logPrefix, segmentRecoveryStart - producerStateLoadStart, time.milliseconds() - segmentRecoveryStart, lastOffset);
}
}
public static void deleteProducerSnapshots(Collection<LogSegment> segments,
ProducerStateManager producerStateManager,
boolean asyncDelete,
Scheduler scheduler,
LogConfig config,
LogDirFailureChannel logDirFailureChannel,
String parentDir,
TopicPartition topicPartition) throws IOException {
List<SnapshotFile> snapshotsToDelete = new ArrayList<>();
for (LogSegment segment : segments) {
Optional<SnapshotFile> snapshotFile = producerStateManager.removeAndMarkSnapshotForDeletion(segment.baseOffset());
snapshotFile.ifPresent(snapshotsToDelete::add);
}
Runnable deleteProducerSnapshots = () -> deleteProducerSnapshots(snapshotsToDelete, logDirFailureChannel, parentDir, topicPartition);
if (asyncDelete) {
scheduler.scheduleOnce("delete-producer-snapshot", deleteProducerSnapshots, config.fileDeleteDelayMs);
} else {
deleteProducerSnapshots.run();
}
}
private static void deleteProducerSnapshots(List<SnapshotFile> snapshotsToDelete, LogDirFailureChannel logDirFailureChannel, String parentDir, TopicPartition topicPartition) {
LocalLog.maybeHandleIOException(
logDirFailureChannel,
parentDir,
() -> "Error while deleting producer state snapshots for " + topicPartition + " in dir " + parentDir,
() -> {
for (SnapshotFile snapshotFile : snapshotsToDelete) {
snapshotFile.deleteIfExists();
}
return null;
});
}
private static void loadProducersFromRecords(ProducerStateManager producerStateManager, Records records) {
Map<Long, ProducerAppendInfo> loadedProducers = new HashMap<>();
final List<CompletedTxn> completedTxns = new ArrayList<>();
records.batches().forEach(batch -> {
if (batch.hasProducerId()) {
Optional<CompletedTxn> maybeCompletedTxn = updateProducers(
producerStateManager,
batch,
loadedProducers,
Optional.empty(),
AppendOrigin.REPLICATION,
(short) 0);
maybeCompletedTxn.ifPresent(completedTxns::add);
}
});
loadedProducers.values().forEach(producerStateManager::update);
completedTxns.forEach(producerStateManager::completeTxn);
}
public static Optional<CompletedTxn> updateProducers(ProducerStateManager producerStateManager,
RecordBatch batch,
Map<Long, ProducerAppendInfo> producers,
Optional<LogOffsetMetadata> firstOffsetMetadata,
AppendOrigin origin,
short transactionVersion) {
long producerId = batch.producerId();
ProducerAppendInfo appendInfo = producers.computeIfAbsent(producerId, __ -> producerStateManager.prepareUpdate(producerId, origin));
Optional<CompletedTxn> completedTxn = appendInfo.append(batch, firstOffsetMetadata, transactionVersion);
// Whether we wrote a control marker or a data batch, we may be able to remove VerificationGuard since either the transaction is complete or we have a first offset.
if (batch.isTransactional()) {
VerificationStateEntry entry = producerStateManager.verificationStateEntry(producerId);
// The only case we should not remove the verification guard is if the marker was a control marker, we are using TV2 and the epochs match.
// This is safe because we always bump epoch upon upgrading to TV2.
boolean isV2NextTransactionStarted = entry != null && entry.supportsEpochBump() && batch.isControlBatch() && batch.producerEpoch() == entry.epoch();
if (!isV2NextTransactionStarted)
producerStateManager.clearVerificationStateEntry(producerId);
}
return completedTxn;
}
public static boolean isRemoteLogEnabled(boolean remoteStorageSystemEnable, LogConfig config, String topic) {
// Remote log is enabled only for non-compact and non-internal topics
return remoteStorageSystemEnable &&
!(config.compact || Topic.isInternal(topic)
|| TopicBasedRemoteLogMetadataManagerConfig.REMOTE_LOG_METADATA_TOPIC_NAME.equals(topic)
|| Topic.CLUSTER_METADATA_TOPIC_NAME.equals(topic)) &&
config.remoteStorageEnable();
}
// Visible for benchmarking
public static LogValidator.MetricsRecorder newValidatorMetricsRecorder(BrokerTopicMetrics allTopicsStats) {
return new LogValidator.MetricsRecorder() {
public void recordInvalidMagic() {
allTopicsStats.invalidMagicNumberRecordsPerSec().mark();
}
public void recordInvalidOffset() {
allTopicsStats.invalidOffsetOrSequenceRecordsPerSec().mark();
}
public void recordInvalidSequence() {
allTopicsStats.invalidOffsetOrSequenceRecordsPerSec().mark();
}
public void recordInvalidChecksums() {
allTopicsStats.invalidMessageCrcRecordsPerSec().mark();
}
public void recordNoKeyCompactedTopic() {
allTopicsStats.noKeyCompactedTopicRecordsPerSec().mark();
}
};
}
/**
* Create a new LeaderEpochFileCache instance and load the epoch entries from the backing checkpoint file or
* the provided currentCache (if not empty).
*
* @param dir The directory in which the log will reside
* @param topicPartition The topic partition
* @param logDirFailureChannel The LogDirFailureChannel to asynchronously handle log dir failure
* @param currentCache The current LeaderEpochFileCache instance (if any)
* @param scheduler The scheduler for executing asynchronous tasks
* @return The new LeaderEpochFileCache instance
*/
public static LeaderEpochFileCache createLeaderEpochCache(File dir,
TopicPartition topicPartition,
LogDirFailureChannel logDirFailureChannel,
Optional<LeaderEpochFileCache> currentCache,
Scheduler scheduler) throws IOException {
File leaderEpochFile = LeaderEpochCheckpointFile.newFile(dir);
LeaderEpochCheckpointFile checkpointFile = new LeaderEpochCheckpointFile(leaderEpochFile, logDirFailureChannel);
return currentCache.map(cache -> cache.withCheckpoint(checkpointFile))
.orElse(new LeaderEpochFileCache(topicPartition, checkpointFile, scheduler));
}
public static LogSegment createNewCleanedSegment(File dir, LogConfig logConfig, long baseOffset) throws IOException {
return LocalLog.createNewCleanedSegment(dir, logConfig, baseOffset);
}
public static long localRetentionMs(LogConfig config, boolean remoteLogEnabledAndRemoteCopyEnabled) {
return remoteLogEnabledAndRemoteCopyEnabled ? config.localRetentionMs() : config.retentionMs;
}
public static long localRetentionSize(LogConfig config, boolean remoteLogEnabledAndRemoteCopyEnabled) {
return remoteLogEnabledAndRemoteCopyEnabled ? config.localRetentionBytes() : config.retentionSize;
}
public static String logDeleteDirName(TopicPartition topicPartition) {
return LocalLog.logDeleteDirName(topicPartition);
}
public static String logFutureDirName(TopicPartition topicPartition) {
return LocalLog.logFutureDirName(topicPartition);
}
public static String logStrayDirName(TopicPartition topicPartition) {
return LocalLog.logStrayDirName(topicPartition);
}
public static String logDirName(TopicPartition topicPartition) {
return LocalLog.logDirName(topicPartition);
}
public static File transactionIndexFile(File dir, long offset, String suffix) {
return LogFileUtils.transactionIndexFile(dir, offset, suffix);
}
public static long offsetFromFile(File file) {
return LogFileUtils.offsetFromFile(file);
}
public static long sizeInBytes(Collection<LogSegment> segments) {
return LogSegments.sizeInBytes(segments);
}
public static TopicPartition parseTopicPartitionName(File dir) throws IOException {
return LocalLog.parseTopicPartitionName(dir);
}
}
|
DeletionCondition
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/BasicTypeRegistry.java
|
{
"start": 1802,
"end": 14893
}
|
class ____ implements Serializable {
private final TypeConfiguration typeConfiguration;
private boolean primed;
private final Map<String, BasicType<?>> typesByName = new ConcurrentHashMap<>();
private final Map<String, BasicTypeReference<?>> typeReferencesByName = new ConcurrentHashMap<>();
public BasicTypeRegistry(TypeConfiguration typeConfiguration){
this.typeConfiguration = typeConfiguration;
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Access
private JavaTypeRegistry getJavaTypeRegistry() {
return typeConfiguration.getJavaTypeRegistry();
}
private JdbcTypeRegistry getJdbcTypeRegistry() {
return typeConfiguration.getJdbcTypeRegistry();
}
public @Nullable BasicType<?> getRegisteredType(String key) {
var basicType = typesByName.get( key );
if ( basicType == null ) {
basicType = resolveTypeReference( key );
}
return basicType;
}
private @Nullable BasicType<?> resolveTypeReference(String name) {
final var typeReference = typeReferencesByName.get( name );
if ( typeReference == null ) {
return null;
}
else if ( !name.equals( typeReference.getName() ) ) {
final var basicType = typesByName.get( typeReference.getName() );
if ( basicType != null ) {
return basicType;
}
}
return createBasicType( name, typeReference );
}
private <T> BasicType<T> createBasicType(String name, BasicTypeReference<T> typeReference) {
final var javaType = getJavaTypeRegistry().resolveDescriptor( typeReference.getJavaType() );
final var jdbcType = getJdbcTypeRegistry().getDescriptor( typeReference.getSqlTypeCode() );
final var createdType = createBasicType( typeReference, javaType, jdbcType );
typesByName.put( typeReference.getName(), createdType );
typesByName.put( name, createdType );
return createdType;
}
private static <T> BasicType<T> createBasicType(
BasicTypeReference<T> typeReference, JavaType<T> javaType, JdbcType jdbcType) {
final String name = typeReference.getName();
if ( typeReference.getConverter() == null ) {
return typeReference.isForceImmutable()
? new ImmutableNamedBasicTypeImpl<>( javaType, jdbcType, name )
: new NamedBasicTypeImpl<>( javaType, jdbcType, name );
}
else {
final var converter = typeReference.getConverter();
assert javaType == converter.getDomainJavaType();
return typeReference.isForceImmutable()
? new CustomMutabilityConvertedBasicTypeImpl<>( name, jdbcType, converter,
ImmutableMutabilityPlan.instance() )
: new ConvertedBasicTypeImpl<>( name, jdbcType, converter );
}
}
public @Nullable BasicType<?> getRegisteredType(java.lang.reflect.Type javaType) {
return getRegisteredType( javaType.getTypeName() );
}
public <J> @Nullable BasicType<J> getRegisteredType(Class<J> javaType) {
//noinspection unchecked
return (BasicType<J>) getRegisteredType( javaType.getTypeName() );
}
public @Nullable BasicType<?> getRegisteredArrayType(java.lang.reflect.Type javaElementType) {
return getRegisteredType( javaElementType.getTypeName() + "[]" );
}
public <J> @Nullable BasicType<J> resolve(BasicTypeReference<J> basicTypeReference) {
//noinspection unchecked
return (BasicType<J>) getRegisteredType( basicTypeReference.getName() );
}
public <J> BasicType<J> resolve(Class<J> javaType, int sqlTypeCode) {
return resolve( getJavaTypeRegistry().resolveDescriptor( javaType ), sqlTypeCode );
}
public BasicType<?> resolve(java.lang.reflect.Type javaType, int sqlTypeCode) {
return resolve( getJavaTypeRegistry().getDescriptor( javaType ), sqlTypeCode );
}
public <J> BasicType<J> resolve(JavaType<J> javaType, int sqlTypeCode) {
return resolve( javaType, getJdbcTypeRegistry().getDescriptor( sqlTypeCode ) );
}
/**
* Find an existing {@link BasicType} registration for the given {@link JavaType}
* descriptor and {@link JdbcType} descriptor combo or create (and register) one.
*/
public <J> BasicType<J> resolve(JavaType<J> javaType, JdbcType jdbcType) {
return resolve( javaType, jdbcType, () -> resolvedType( javaType, jdbcType ) );
}
private <J> BasicType<J> resolvedType(JavaType<J> javaType, JdbcType jdbcType) {
if ( javaType instanceof BasicPluralJavaType<?> pluralJavaType
&& jdbcType instanceof ArrayJdbcType arrayType ) {
//noinspection unchecked
return (BasicType<J>) resolvedType( arrayType, pluralJavaType );
}
else {
return new BasicTypeImpl<>( javaType, jdbcType );
}
}
private <E> BasicType<?> resolvedType(ArrayJdbcType arrayType, BasicPluralJavaType<E> castPluralJavaType) {
final var elementType = resolve( castPluralJavaType.getElementJavaType(), arrayType.getElementJdbcType() );
final var indicators = typeConfiguration.getCurrentBaseSqlTypeIndicators();
final var resolvedType = castPluralJavaType.resolveType(
typeConfiguration,
indicators.getDialect(),
elementType,
new ColumnTypeInformation() {
@Override
public Boolean getNullable() {
return null;
}
@Override
public int getTypeCode() {
return arrayType.getDefaultSqlTypeCode();
}
@Override
public String getTypeName() {
return null;
}
@Override
public int getColumnSize() {
return 0;
}
@Override
public int getDecimalDigits() {
return 0;
}
},
new DelegatingJdbcTypeIndicators( indicators ) {
@Override
public Integer getExplicitJdbcTypeCode() {
return arrayType.getDefaultSqlTypeCode();
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return arrayType.getDefaultSqlTypeCode();
}
@Override
public int getPreferredSqlTypeCodeForArray(int elementSqlTypeCode) {
return arrayType.getDefaultSqlTypeCode();
}
}
);
if ( resolvedType instanceof BasicPluralType<?,?> ) {
register( resolvedType );
}
else if ( resolvedType == null ) {
if ( isNestedArray( elementType ) ) {
// No support for nested arrays, except for byte[][]
throw new MappingException( "Nested arrays (with the exception of byte[][]) are not supported" );
}
}
return resolvedType;
}
private static boolean isNestedArray(BasicType<?> elementType) {
final var elementJavaTypeClass = elementType.getJavaTypeDescriptor().getJavaTypeClass();
return elementJavaTypeClass != null
&& elementJavaTypeClass.isArray()
&& elementJavaTypeClass != byte[].class;
}
public <J> BasicType<J> resolve(JavaType<J> javaType, JdbcType jdbcType, String baseTypeName) {
return resolve( javaType, jdbcType, () -> new NamedBasicTypeImpl<>( javaType, jdbcType, baseTypeName ) );
}
/**
* Find an existing BasicType registration for the given JavaType and
* JdbcType combo or create (and register) one.
*/
public <J> BasicType<J> resolve(JavaType<J> javaType, JdbcType jdbcType, Supplier<BasicType<J>> creator) {
return createIfUnregistered( javaType, jdbcType, creator );
}
private <J> BasicType<J> createIfUnregistered(
JavaType<J> javaType,
JdbcType jdbcType,
Supplier<BasicType<J>> creator) {
// Before simply creating the type, we try to find if there is a registered type for this java type,
// and if so, if the jdbc type descriptor matches. Unless it does, we at least reuse the name
final var registeredType = getRegisteredType( javaType.getJavaTypeClass() );
if ( registeredTypeMatches( javaType, jdbcType, registeredType ) ) {
return castNonNull( registeredType );
}
else {
final var createdType = creator.get();
register( javaType, jdbcType, createdType );
return createdType;
}
}
private static <J> boolean registeredTypeMatches(JavaType<J> javaType, JdbcType jdbcType, BasicType<J> registeredType) {
return registeredType != null
&& registeredType.getJdbcType() == jdbcType
&& registeredType.getMappedJavaType() == javaType;
}
private <J> void register(JavaType<J> javaType, JdbcType jdbcType, BasicType<J> createdType) {
if ( createdType != null ) {
// if we are still building mappings, register this adhoc
// type via a unique code. (This is to support Envers.)
try {
getBootstrapContext().registerAdHocBasicType( createdType );
}
catch (Exception ignore) {
}
}
}
private BootstrapContext getBootstrapContext() {
return typeConfiguration.getMetadataBuildingContext().getBootstrapContext();
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Mutations
public void register(BasicType<?> type) {
register( type, type.getRegistrationKeys() );
}
public void register(BasicType<?> type, String key) {
register( type, new String[]{ key } );
}
public void register(BasicType<?> type, String... keys) {
if ( ! isPrimed() ) {
throw new IllegalStateException( "BasicTypeRegistry not yet primed. Calls to `#register` not valid until after primed" );
}
if ( type == null ) {
throw new HibernateException( "Type to register cannot be null" );
}
// explicit registration keys
if ( isEmpty( keys ) ) {
CORE_LOGGER.typeDefinedNoRegistrationKeys( type );
}
else {
applyRegistrationKeys( type, keys );
}
}
public <T> CustomType<T> register(UserType<T> type, String... keys) {
final var customType = new CustomType<>( type, keys, typeConfiguration );
register( customType );
return customType;
}
public void unregister(String... keys) {
for ( String key : keys ) {
typesByName.remove( key );
}
}
@Internal
public void addTypeReferenceRegistrationKey(String typeReferenceKey, String... additionalTypeReferenceKeys) {
final var basicTypeReference = typeReferencesByName.get( typeReferenceKey );
if ( basicTypeReference == null ) {
throw new IllegalArgumentException( "Couldn't find type reference with name: " + typeReferenceKey );
}
for ( String additionalTypeReferenceKey : additionalTypeReferenceKeys ) {
typeReferencesByName.put( additionalTypeReferenceKey, basicTypeReference );
}
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// priming
public boolean isPrimed() {
return primed;
}
public void primed() {
this.primed = true;
}
public void addPrimeEntry(BasicType<?> type, String legacyTypeClassName, String[] registrationKeys) {
if ( primed ) {
throw new IllegalStateException( "BasicTypeRegistry already primed" );
}
if ( type == null ) {
throw new HibernateException( "Type to register cannot be null" );
}
// Legacy name registration
if ( isNotEmpty( legacyTypeClassName ) ) {
typesByName.put( legacyTypeClassName, type );
}
// explicit registration keys
if ( registrationKeys == null || registrationKeys.length == 0 ) {
CORE_LOGGER.typeDefinedNoRegistrationKeys( type );
}
else {
applyRegistrationKeys( type, registrationKeys );
}
}
public void addPrimeEntry(BasicTypeReference<?> type, String legacyTypeClassName, String[] registrationKeys) {
if ( primed ) {
throw new IllegalStateException( "BasicTypeRegistry already primed" );
}
if ( type == null ) {
throw new HibernateException( "Type to register cannot be null" );
}
// Legacy name registration
if ( isNotEmpty( legacyTypeClassName ) ) {
typeReferencesByName.put( legacyTypeClassName, type );
}
// explicit registration keys
if ( registrationKeys == null || registrationKeys.length == 0 ) {
CORE_LOGGER.typeDefinedNoRegistrationKeys( type );
}
else {
applyRegistrationKeys( type, registrationKeys );
}
}
private void applyRegistrationKeys(BasicType<?> type, String[] keys) {
for ( String key : keys ) {
if ( key != null ) {
// Use String.intern here as there's a high probability of duplicates combined with long-term usage:
// just running our testsuite would generate 210,000 instances for the String "java.lang.Class" alone.
// Incidentally, this might also help with map lookup efficiency.
key = key.intern();
// Incredibly verbose logging disabled
// LOG.tracef( "Adding type registration %s -> %s", key, type );
final Type old = typesByName.put( key, type );
// if ( old != null && old != type ) {
// LOG.tracef(
// "Type registration key [%s] overrode previous entry : `%s`",
// key,
// old
// );
// }
}
}
}
private void applyRegistrationKeys(BasicTypeReference<?> type, String[] keys) {
for ( String key : keys ) {
if ( key != null ) {
// Use String.intern here as there's a high probability of duplicates combined with long-term usage:
// just running our testsuite would generate 210,000 instances for the String "java.lang.Class" alone.
// Incidentally, this might also help with map lookup efficiency.
key = key.intern();
// Incredibly verbose logging disabled
// LOG.tracef( "Adding type registration %s -> %s", key, type );
final BasicTypeReference<?> old = typeReferencesByName.put( key, type );
// if ( old != null && old != type ) {
// LOG.tracef(
// "Type registration key [%s] overrode previous entry : `%s`",
// key,
// old
// );
// }
}
}
}
}
|
BasicTypeRegistry
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/events/StreamsOnTasksAssignedCallbackNeededEvent.java
|
{
"start": 968,
"end": 1602
}
|
class ____ extends CompletableBackgroundEvent<Void> {
private final StreamsRebalanceData.Assignment assignment;
public StreamsOnTasksAssignedCallbackNeededEvent(StreamsRebalanceData.Assignment assignment) {
super(Type.STREAMS_ON_TASKS_ASSIGNED_CALLBACK_NEEDED, Long.MAX_VALUE);
this.assignment = Objects.requireNonNull(assignment);
}
public StreamsRebalanceData.Assignment assignment() {
return assignment;
}
@Override
protected String toStringBase() {
return super.toStringBase() +
", assignment=" + assignment;
}
}
|
StreamsOnTasksAssignedCallbackNeededEvent
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/JGroupsEndpointBuilderFactory.java
|
{
"start": 13997,
"end": 14299
}
|
interface ____
extends
AdvancedJGroupsEndpointConsumerBuilder,
AdvancedJGroupsEndpointProducerBuilder {
default JGroupsEndpointBuilder basic() {
return (JGroupsEndpointBuilder) this;
}
}
public
|
AdvancedJGroupsEndpointBuilder
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/reflect/TypeTokenTest.java
|
{
"start": 65151,
"end": 65907
}
|
class ____<Y extends Sub2<Y>> extends BaseWithTypeVar<List<Y>> {}
}
ParameterizedType subtype =
(ParameterizedType)
new TypeToken<BaseWithTypeVar<List<?>>>() {}.getSubtype(Outer.Sub.class).getType();
assertEquals(Outer.Sub.class, subtype.getRawType());
assertThat(subtype.getActualTypeArguments()[0]).isInstanceOf(WildcardType.class);
ParameterizedType owner = (ParameterizedType) subtype.getOwnerType();
assertEquals(Outer.class, owner.getRawType());
// This returns a strange ? extends Sub2<Y> type, which isn't ideal.
TypeToken<?> unused = new TypeToken<BaseWithTypeVar<List<?>>>() {}.getSubtype(Outer.Sub2.class);
}
public void testGetSubtype_subtypeSameAsDeclaringType() throws Exception {
|
Sub2
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/ArrayType.java
|
{
"start": 1608,
"end": 4368
}
|
class ____ extends LogicalType {
private static final long serialVersionUID = 1L;
public static final String FORMAT = "ARRAY<%s>";
private static final Set<String> INPUT_OUTPUT_CONVERSION =
conversionSet(List.class.getName(), ArrayData.class.getName());
private final LogicalType elementType;
public ArrayType(boolean isNullable, LogicalType elementType) {
super(isNullable, LogicalTypeRoot.ARRAY);
this.elementType =
Preconditions.checkNotNull(elementType, "Element type must not be null.");
}
public ArrayType(LogicalType elementType) {
this(true, elementType);
}
public LogicalType getElementType() {
return elementType;
}
@Override
public LogicalType copy(boolean isNullable) {
return new ArrayType(isNullable, elementType.copy());
}
@Override
public String asSummaryString() {
return withNullability(FORMAT, elementType.asSummaryString());
}
@Override
public String asSerializableString() {
return withNullability(FORMAT, elementType.asSerializableString());
}
@Override
public boolean supportsInputConversion(Class<?> clazz) {
if (List.class.isAssignableFrom(clazz)) {
return true;
}
if (INPUT_OUTPUT_CONVERSION.contains(clazz.getName())) {
return true;
}
if (!clazz.isArray()) {
return false;
}
return elementType.supportsInputConversion(clazz.getComponentType());
}
@Override
public boolean supportsOutputConversion(Class<?> clazz) {
if (INPUT_OUTPUT_CONVERSION.contains(clazz.getName())) {
return true;
}
if (!clazz.isArray()) {
return false;
}
return elementType.supportsOutputConversion(clazz.getComponentType());
}
@Override
public Class<?> getDefaultConversion() {
return Array.newInstance(elementType.getDefaultConversion(), 0).getClass();
}
@Override
public List<LogicalType> getChildren() {
return Collections.singletonList(elementType);
}
@Override
public <R> R accept(LogicalTypeVisitor<R> visitor) {
return visitor.visit(this);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
ArrayType arrayType = (ArrayType) o;
return elementType.equals(arrayType.elementType);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), elementType);
}
}
|
ArrayType
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/nullPrecedence/SupportingNativelyDialectTest.java
|
{
"start": 577,
"end": 652
}
|
class ____ extends AbstractNullPrecedenceTest {
}
|
SupportingNativelyDialectTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/jmx/export/assembler/MethodExclusionMBeanInfoAssembler.java
|
{
"start": 1144,
"end": 2215
}
|
interface ____ be exposed to
* JMX. JavaBean getters and setters will automatically be exposed as JMX attributes.
*
* <p>You can supply an array of method names via the {@code ignoredMethods}
* property. If you have multiple beans and you wish each bean to use a different
* set of method names, then you can map bean keys (that is the name used to pass
* the bean to the {@code MBeanExporter}) to a list of method names using the
* {@code ignoredMethodMappings} property.
*
* <p>If you specify values for both {@code ignoredMethodMappings} and
* {@code ignoredMethods}, Spring will attempt to find method names in the
* mappings first. If no method names for the bean are found, it will use the
* method names defined by {@code ignoredMethods}.
*
* @author Rob Harrop
* @author Seth Ladd
* @since 1.2.5
* @see #setIgnoredMethods
* @see #setIgnoredMethodMappings
* @see InterfaceBasedMBeanInfoAssembler
* @see SimpleReflectiveMBeanInfoAssembler
* @see MethodNameBasedMBeanInfoAssembler
* @see org.springframework.jmx.export.MBeanExporter
*/
public
|
will
|
java
|
apache__logging-log4j2
|
log4j-api-test/src/main/java/org/apache/logging/log4j/test/junit/InitializesThreadContext.java
|
{
"start": 1318,
"end": 1634
}
|
class ____ initializes the {@link ThreadContext} class;
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.METHOD})
@Documented
@Inherited
@ExtendWith(ThreadContextInitializer.class)
@ResourceLock(value = Log4jStaticResources.THREAD_CONTEXT, mode = ResourceAccessMode.READ_WRITE)
public @
|
that
|
java
|
google__guice
|
extensions/struts2/example/src/com/google/inject/struts2/example/Count.java
|
{
"start": 736,
"end": 1258
}
|
class ____ {
final Counter counter;
final Service service;
String message;
@Inject
public Count(Counter counter, Service service) {
this.counter = counter;
this.service = service;
}
public String execute() {
return SUCCESS;
}
public int getCount() {
return counter.increment();
}
public String getStatus() {
return service.getStatus();
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
}
|
Count
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/DefaultProducerCacheTest.java
|
{
"start": 2354,
"end": 9568
}
|
class ____ extends ContextTestSupport {
private final AtomicInteger producerCounter = new AtomicInteger();
private final AtomicInteger stopCounter = new AtomicInteger();
private final AtomicInteger shutdownCounter = new AtomicInteger();
private MyComponent component;
@Test
public void testCacheProducerAcquireAndRelease() {
DefaultProducerCache cache = new DefaultProducerCache(this, context, 0);
cache.start();
assertEquals(0, cache.size(), "Size should be 0");
// test that we cache at most 1000 producers to avoid it eating to much
// memory
for (int i = 0; i < 1003; i++) {
Endpoint e = context.getEndpoint("direct:queue:" + i);
AsyncProducer p = cache.acquireProducer(e);
cache.releaseProducer(e, p);
}
await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> {
// the eviction is async so force cleanup
cache.cleanUp();
assertEquals(1000, cache.size(), "Size should be 1000");
});
cache.stop();
assertEquals(0, cache.size(), "Size should be 0");
}
@Test
public void testCacheStopExpired() {
DefaultProducerCache cache = new DefaultProducerCache(this, context, 5);
cache.start();
assertEquals(0, cache.size(), "Size should be 0");
for (int i = 0; i < 8; i++) {
Endpoint e = newEndpoint(true, i);
e.setCamelContext(context);
AsyncProducer p = cache.acquireProducer(e);
cache.releaseProducer(e, p);
}
await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> {
// the eviction is async so force cleanup
cache.cleanUp();
assertEquals(5, cache.size(), "Size should be 5");
});
await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> assertEquals(3, stopCounter.get()));
cache.stop();
// should have stopped all 8
assertEquals(8, stopCounter.get());
}
@Test
public void testExtendedStatistics() {
DefaultProducerCache cache = new DefaultProducerCache(this, context, 5);
cache.setExtendedStatistics(true);
cache.start();
assertEquals(0, cache.size(), "Size should be 0");
// use 1 = 2 times
// use 2 = 3 times
// use 3..4 = 1 times
// use 5 = 0 times
Endpoint e = newEndpoint(true, 1);
AsyncProducer p = cache.acquireProducer(e);
cache.releaseProducer(e, p);
e = newEndpoint(true, 1);
p = cache.acquireProducer(e);
cache.releaseProducer(e, p);
e = newEndpoint(true, 2);
p = cache.acquireProducer(e);
cache.releaseProducer(e, p);
e = newEndpoint(true, 2);
p = cache.acquireProducer(e);
cache.releaseProducer(e, p);
e = newEndpoint(true, 2);
p = cache.acquireProducer(e);
cache.releaseProducer(e, p);
e = newEndpoint(true, 3);
p = cache.acquireProducer(e);
cache.releaseProducer(e, p);
e = newEndpoint(true, 4);
p = cache.acquireProducer(e);
cache.releaseProducer(e, p);
assertEquals(4, cache.size(), "Size should be 4");
EndpointUtilizationStatistics stats = cache.getEndpointUtilizationStatistics();
assertEquals(4, stats.size());
Map<String, Long> recent = stats.getStatistics();
assertEquals(2, recent.get("my://1").longValue());
assertEquals(3, recent.get("my://2").longValue());
assertEquals(1, recent.get("my://3").longValue());
assertEquals(1, recent.get("my://4").longValue());
assertNull(recent.get("my://5"));
cache.stop();
}
@Test
public void testCacheEvictWhileInUse() {
producerCounter.set(0);
MyProducerCache cache = new MyProducerCache(this, context, 2);
cache.start();
assertEquals(0, cache.size(), "Size should be 0");
Endpoint e = newEndpoint(false, 1);
e.setCamelContext(context);
AsyncProducer p1 = cache.acquireProducer(e);
assertEquals(0, cache.size(), "Size should be 0");
AsyncProducer p2 = cache.acquireProducer(e);
assertEquals(0, cache.size(), "Size should be 0");
cache.releaseProducer(e, p2);
cache.releaseProducer(e, p1);
assertEquals(2, cache.size(), "Size should be 2");
// nothing has stopped yet
assertEquals(0, stopCounter.get());
p1 = cache.acquireProducer(e);
p2 = cache.acquireProducer(e);
AsyncProducer p3 = cache.acquireProducer(e);
assertEquals(0, cache.size(), "Size should be 0");
// nothing has stopped yet even we have 3 producers and a cache limit of 2
assertEquals(0, stopCounter.get());
// force evict p2 while its in use (eg simulate someone else grabbing it while evicting race condition)
cache.forceEvict(p2);
// and should still not be stopped
assertEquals(0, stopCounter.get());
// now release the others back
cache.releaseProducer(e, p3);
cache.releaseProducer(e, p2);
// which should trigger the eviction run to stop one of the producers as we have 3 and the cache size is 2
assertEquals(1, stopCounter.get());
cache.stop();
// should have stopped all 3 when the cache is stopped
await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> assertEquals(3, stopCounter.get()));
}
@Test
public void testAcquireProducerConcurrency() throws InterruptedException, ExecutionException {
DefaultProducerCache cache = new DefaultProducerCache(this, context, 0);
cache.start();
List<Endpoint> endpoints = new ArrayList<>();
for (int i = 0; i < 3; i++) {
Endpoint e = context.getEndpoint("direct:queue:" + i);
AsyncProducer p = cache.acquireProducer(e);
endpoints.add(e);
}
assertEquals(3, cache.size());
ExecutorService ex = Executors.newFixedThreadPool(16);
List<Callable<Boolean>> callables = new ArrayList<>();
for (int i = 0; i < 500; i++) {
int index = i % 3;
callables.add(() -> isEqualTask(cache, endpoints, index));
}
for (int i = 1; i <= 100; i++) {
log.info("Iteration: {}", i);
List<Future<Boolean>> results = ex.invokeAll(callables);
for (Future<Boolean> future : results) {
assertEquals(true, future.get());
}
}
}
private boolean isEqualTask(DefaultProducerCache cache, List<Endpoint> endpoints, int index) {
Producer producer = cache.acquireProducer(endpoints.get(index));
boolean isEqual
= producer.getEndpoint().getEndpointUri().equalsIgnoreCase(endpoints.get(index).getEndpointUri());
if (!isEqual) {
log.info("Endpoint uri to acquire: {}, returned producer (uri): {}", endpoints.get(index).getEndpointUri(),
producer.getEndpoint().getEndpointUri());
}
return isEqual;
}
private static
|
DefaultProducerCacheTest
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/ScenariosForSpringSecurityExpressionTests.java
|
{
"start": 1854,
"end": 5811
}
|
class ____ extends AbstractExpressionTests {
@Test
void testScenario01_Roles() {
SpelExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext ctx = new StandardEvaluationContext();
Expression expr = parser.parseRaw("hasAnyRole('MANAGER','TELLER')");
ctx.setRootObject(new Person("Ben"));
Boolean value = expr.getValue(ctx,Boolean.class);
assertThat((boolean) value).isFalse();
ctx.setRootObject(new Manager("Luke"));
value = expr.getValue(ctx,Boolean.class);
assertThat((boolean) value).isTrue();
}
@Test
void testScenario02_ComparingNames() {
SpelExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext ctx = new StandardEvaluationContext();
ctx.addPropertyAccessor(new SecurityPrincipalAccessor());
// Multiple options for supporting this expression: "p.name == principal.name"
// (1) If the right person is the root context object then "name==principal.name" is good enough
Expression expr = parser.parseRaw("name == principal.name");
ctx.setRootObject(new Person("Andy"));
Boolean value = expr.getValue(ctx,Boolean.class);
assertThat((boolean) value).isTrue();
ctx.setRootObject(new Person("Christian"));
value = expr.getValue(ctx,Boolean.class);
assertThat((boolean) value).isFalse();
// (2) Or register an accessor that can understand 'p' and return the right person
expr = parser.parseRaw("p.name == principal.name");
PersonAccessor pAccessor = new PersonAccessor();
ctx.addPropertyAccessor(pAccessor);
ctx.setRootObject(null);
pAccessor.setPerson(new Person("Andy"));
value = expr.getValue(ctx,Boolean.class);
assertThat((boolean) value).isTrue();
pAccessor.setPerson(new Person("Christian"));
value = expr.getValue(ctx,Boolean.class);
assertThat((boolean) value).isFalse();
}
@Test
void testScenario03_Arithmetic() {
SpelExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext ctx = new StandardEvaluationContext();
// Might be better with a as a variable although it would work as a property too...
// Variable references using a '#'
Expression expr = parser.parseRaw("(hasRole('SUPERVISOR') or (#a < 1.042)) and hasIpAddress('10.10.0.0/16')");
Boolean value = null;
ctx.setVariable("a",1.0d); // referenced as #a in the expression
ctx.setRootObject(new Supervisor("Ben")); // so non-qualified references 'hasRole()' 'hasIpAddress()' are invoked against it
value = expr.getValue(ctx,Boolean.class);
assertThat((boolean) value).isTrue();
ctx.setRootObject(new Manager("Luke"));
ctx.setVariable("a",1.043d);
value = expr.getValue(ctx,Boolean.class);
assertThat((boolean) value).isFalse();
}
// Here i'm going to change which hasRole() executes and make it one of my own Java methods
@Test
void testScenario04_ControllingWhichMethodsRun() {
SpelExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext ctx = new StandardEvaluationContext();
ctx.setRootObject(new Supervisor("Ben")); // so non-qualified references 'hasRole()' 'hasIpAddress()' are invoked against it;
ctx.addMethodResolver(new MyMethodResolver()); // NEEDS TO OVERRIDE THE REFLECTION ONE - SHOW REORDERING MECHANISM
// Might be better with a as a variable although it would work as a property too...
// Variable references using a '#'
// SpelExpression expr = parser.parseExpression("(hasRole('SUPERVISOR') or (#a < 1.042)) and hasIpAddress('10.10.0.0/16')");
Expression expr = parser.parseRaw("(hasRole(3) or (#a < 1.042)) and hasIpAddress('10.10.0.0/16')");
Boolean value = null;
ctx.setVariable("a",1.0d); // referenced as #a in the expression
value = expr.getValue(ctx,Boolean.class);
assertThat((boolean) value).isTrue();
// ctx.setRootObject(new Manager("Luke"));
// ctx.setVariable("a",1.043d);
// value = (Boolean)expr.getValue(ctx,Boolean.class);
// assertFalse(value);
}
static
|
ScenariosForSpringSecurityExpressionTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java
|
{
"start": 1198,
"end": 2178
}
|
class ____ extends BaseTasksResponse implements Writeable {
private final boolean killed;
public Response(StreamInput in) throws IOException {
super(in);
killed = in.readBoolean();
}
public Response(boolean killed) {
super(null, null);
this.killed = killed;
}
public boolean isKilled() {
return killed;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(killed);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Response response = (Response) o;
return killed == response.killed;
}
@Override
public int hashCode() {
return Objects.hash(killed);
}
}
}
|
Response
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/sql/FunctionITCase.java
|
{
"start": 94316,
"end": 94937
}
|
class ____ extends ScalarFunction {
public Long eval(Long a, Long b) {
long localVariable;
if (a == null) {
// block 1
localVariable = 0;
} else if (a == 0) {
// block 2
localVariable = -1;
} else if (b < 1) {
// block 3
localVariable = -1L * a;
} else {
// block 4
localVariable = a;
}
return localVariable * Optional.ofNullable(b).orElse(0L);
}
}
private
|
MultiLocalVariableBlocksClass
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/pattern/LoggerFqcnPatternConverter.java
|
{
"start": 1231,
"end": 2070
}
|
class ____ extends LogEventPatternConverter {
/**
* Singleton.
*/
private static final LoggerFqcnPatternConverter INSTANCE = new LoggerFqcnPatternConverter();
/**
* Private constructor.
*/
private LoggerFqcnPatternConverter() {
super("LoggerFqcn", "loggerFqcn");
}
/**
* Obtains an instance of LoggerFqcnPatternConverter.
*
* @param options options, currently ignored, may be null.
* @return instance of LoggerFqcnPatternConverter.
*/
public static LoggerFqcnPatternConverter newInstance(final String[] options) {
return INSTANCE;
}
/**
* {@inheritDoc}
*/
@Override
public void format(final LogEvent event, final StringBuilder toAppendTo) {
toAppendTo.append(event.getLoggerFqcn());
}
}
|
LoggerFqcnPatternConverter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/diagnostics/BucketDiagnostics.java
|
{
"start": 820,
"end": 4798
}
|
class ____ {
private static final int MIN_BUCKETS = 10;
private final long bucketSpanMs;
private final long latencyMs;
private final int maxSize;
private final long[] buckets;
private long movingBucketCount = 0;
private long latestBucketStartMs = -1;
private int latestBucketIndex;
private long earliestBucketStartMs = -1;
private int earliestBucketIndex;
private long latestFlushedBucketStartMs = -1;
private final BucketFlushListener bucketFlushListener;
BucketDiagnostics(Job job, DataCounts dataCounts, BucketFlushListener bucketFlushListener) {
bucketSpanMs = job.getAnalysisConfig().getBucketSpan().millis();
latencyMs = job.getAnalysisConfig().getLatency() == null ? 0 : job.getAnalysisConfig().getLatency().millis();
maxSize = Math.max((int) (Intervals.alignToCeil(latencyMs, bucketSpanMs) / bucketSpanMs), MIN_BUCKETS);
buckets = new long[maxSize];
this.bucketFlushListener = bucketFlushListener;
Date latestRecordTimestamp = dataCounts.getLatestRecordTimeStamp();
if (latestRecordTimestamp != null) {
addRecord(latestRecordTimestamp.getTime());
}
}
void addRecord(long recordTimestampMs) {
long bucketStartMs = Intervals.alignToFloor(recordTimestampMs, bucketSpanMs);
// Initialize earliest/latest times
if (latestBucketStartMs < 0) {
latestBucketStartMs = bucketStartMs;
earliestBucketStartMs = bucketStartMs;
}
advanceTime(bucketStartMs);
addToBucket(bucketStartMs);
}
private void advanceTime(long bucketStartMs) {
while (bucketStartMs > latestBucketStartMs) {
int flushBucketIndex = (latestBucketIndex + 1) % maxSize;
if (flushBucketIndex == earliestBucketIndex) {
flush(flushBucketIndex);
movingBucketCount -= buckets[flushBucketIndex];
earliestBucketStartMs += bucketSpanMs;
earliestBucketIndex = (earliestBucketIndex + 1) % maxSize;
}
buckets[flushBucketIndex] = 0L;
latestBucketStartMs += bucketSpanMs;
latestBucketIndex = flushBucketIndex;
}
}
private void addToBucket(long bucketStartMs) {
int offsetToLatest = (int) ((bucketStartMs - latestBucketStartMs) / bucketSpanMs);
int bucketIndex = (latestBucketIndex + offsetToLatest) % maxSize;
if (bucketIndex < 0) {
bucketIndex = maxSize + bucketIndex;
}
++buckets[bucketIndex];
++movingBucketCount;
if (bucketStartMs < earliestBucketStartMs) {
earliestBucketStartMs = bucketStartMs;
earliestBucketIndex = bucketIndex;
}
}
private void flush(int bucketIndex) {
long bucketStartMs = getTimestampMs(bucketIndex);
if (bucketStartMs > latestFlushedBucketStartMs) {
bucketFlushListener.onBucketFlush(bucketStartMs, buckets[bucketIndex]);
latestFlushedBucketStartMs = bucketStartMs;
}
}
private long getTimestampMs(int bucketIndex) {
int offsetToLatest = latestBucketIndex - bucketIndex;
if (offsetToLatest < 0) {
offsetToLatest = maxSize + offsetToLatest;
}
return latestBucketStartMs - offsetToLatest * bucketSpanMs;
}
void flush() {
if (latestBucketStartMs < 0) {
return;
}
int bucketIndex = earliestBucketIndex;
while (bucketIndex != latestBucketIndex) {
flush(bucketIndex);
bucketIndex = (bucketIndex + 1) % maxSize;
}
}
double averageBucketCount() {
return (double) movingBucketCount / size();
}
private int size() {
if (latestBucketStartMs < 0) {
return 0;
}
return (int) ((latestBucketStartMs - earliestBucketStartMs) / bucketSpanMs) + 1;
}
|
BucketDiagnostics
|
java
|
elastic__elasticsearch
|
x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlSpecLoader.java
|
{
"start": 553,
"end": 5358
}
|
class ____ {
public static List<EqlSpec> load(String path, Set<String> uniqueTestNames) throws Exception {
try (InputStream is = EqlSpecLoader.class.getResourceAsStream(path)) {
if (is == null) {
throw new IllegalAccessException("Cannot find classpath resource " + path);
}
return readFromStream(is, uniqueTestNames);
}
}
public static List<EqlSpec> load(String... paths) throws Exception {
Set<String> uniqueTestNames = new HashSet<>();
List<EqlSpec> specs = new ArrayList<>();
for (String path : paths) {
specs.addAll(load(path, uniqueTestNames));
}
return specs;
}
private static void validateAndAddSpec(List<EqlSpec> specs, EqlSpec spec, Set<String> uniqueTestNames) {
if (Strings.isNullOrEmpty(spec.name())) {
throw new IllegalArgumentException("Read a test without a name value");
}
if (Strings.isNullOrEmpty(spec.query())) {
throw new IllegalArgumentException("Read a test without a query value");
}
if (spec.expectedEventIds() == null) {
throw new IllegalArgumentException("Read a test without a expected_event_ids value");
}
if (uniqueTestNames.contains(spec.name())) { // TODO: scope it per file?
throw new IllegalArgumentException("Found a test with the same name as another test: " + spec.name());
} else {
uniqueTestNames.add(spec.name());
}
specs.add(spec);
}
private static String getTrimmedString(TomlTable table, String key) {
String s = table.getString(key);
if (s != null) {
return s.trim();
}
return null;
}
private static Integer getInteger(TomlTable table, String key) {
Long s = table.getLong(key);
if (s != null) {
return s.intValue();
}
return null;
}
private static Boolean getBoolean(TomlTable table, String key) {
return table.getBoolean(key);
}
private static List<EqlSpec> readFromStream(InputStream is, Set<String> uniqueTestNames) throws Exception {
List<EqlSpec> testSpecs = new ArrayList<>();
EqlSpec spec;
Toml toml = JToml.parse(is);
List<TomlTable> queries = toml.getArrayTable("queries");
for (TomlTable table : queries) {
spec = new EqlSpec();
spec.query(getTrimmedString(table, "query"));
spec.name(getTrimmedString(table, "name"));
spec.note(getTrimmedString(table, "note"));
spec.description(getTrimmedString(table, "description"));
spec.size(getInteger(table, "size"));
spec.allowPartialSearchResults(getBoolean(table, "allow_partial_search_results"));
spec.allowPartialSequenceResults(getBoolean(table, "allow_partial_sequence_results"));
spec.expectShardFailures(getBoolean(table, "expect_shard_failures"));
List<?> arr = table.getList("tags");
if (arr != null) {
String[] tags = new String[arr.size()];
int i = 0;
for (Object obj : arr) {
tags[i++] = (String) obj;
}
spec.tags(tags);
}
arr = table.getList("expected_event_ids");
if (arr != null) {
List<long[]> expectedEventIdsList = new ArrayList<>();
if (arr.size() == 0) {
expectedEventIdsList.add(new long[] {});
} else if (arr.stream().allMatch(x -> x instanceof Long)) {
long[] expectedEventIds = asLongArray(arr);
expectedEventIdsList.add(expectedEventIds);
} else if (arr.stream().allMatch(x -> x instanceof List)) {
for (Object o : arr) {
expectedEventIdsList.add(asLongArray((List) o));
}
} else {
throw new IllegalArgumentException("Invalid expected_event_ids");
}
spec.expectedEventIds(expectedEventIdsList);
}
arr = table.getList("join_keys");
spec.joinKeys(arr != null ? arr.toArray(new String[0]) : new String[0]);
spec.maxSamplesPerKey(getInteger(table, "max_samples_per_key"));
validateAndAddSpec(testSpecs, spec, uniqueTestNames);
}
return testSpecs;
}
private static long[] asLongArray(List<?> arr) {
long[] expectedEventIds = new long[arr.size()];
int i = 0;
for (Object obj : arr) {
expectedEventIds[i++] = (Long) obj;
}
return expectedEventIds;
}
}
|
EqlSpecLoader
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/junit/jupiter/SpringExtension.java
|
{
"start": 21864,
"end": 23451
}
|
class ____ the same as the supplied
* test class.
* @since 7.0
* @see SpringExtensionConfig#useTestClassScopedExtensionContext()
* @see ExtensionContextScope
*/
private static ExtensionContext findProperlyScopedExtensionContext(Class<?> testClass, ExtensionContext context) {
if (useTestClassScopedExtensionContextCache.get(testClass)) {
while (context.getRequiredTestClass() != testClass) {
context = context.getParent().get();
}
}
return context;
}
/**
* Determine if the supplied test class, or one of its enclosing classes, is annotated
* with {@code @SpringExtensionConfig(useTestClassScopedExtensionContext = true)}.
* @since 7.0
* @see SpringExtensionConfig#useTestClassScopedExtensionContext()
* @see #useTestClassScopedExtensionContextCache
*/
private static boolean useTestClassScopedExtensionContext(Class<?> testClass) {
MergedAnnotation<SpringExtensionConfig> mergedAnnotation =
MergedAnnotations.search(SearchStrategy.TYPE_HIERARCHY)
.withEnclosingClasses(ClassUtils::isInnerClass)
.from(testClass)
.get(SpringExtensionConfig.class);
if (mergedAnnotation.isPresent()) {
if (mergedAnnotation.getSource() instanceof Class<?> source && ClassUtils.isInnerClass(source)) {
throw new IllegalStateException("""
Test class [%s] must not be annotated with @SpringExtensionConfig. \
@SpringExtensionConfig is only supported on top-level classes.\
""".formatted(source.getName()));
}
return mergedAnnotation.getBoolean("useTestClassScopedExtensionContext");
}
return false;
}
}
|
is
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-soap/src/test/java/org/apache/camel/component/cxf/wsdl/OrderEndpoint.java
|
{
"start": 988,
"end": 1241
}
|
class ____ {
private static final Logger LOG = LoggerFactory.getLogger(OrderEndpoint.class);
@WebMethod
public String doOrder(Order order) {
LOG.info("Processing order");
return "Order processed " + order;
}
}
|
OrderEndpoint
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/commons/util/ReflectionUtilsWithGenericTypeHierarchiesTests.java
|
{
"start": 5463,
"end": 5626
}
|
class ____
implements InterfaceWithGenericObjectParameter, InterfaceWithGenericNumberParameter {
}
public static
|
ClassImplementingGenericAndMoreSpecificInterface
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/standard/PropertiesConversionSpelTests.java
|
{
"start": 3205,
"end": 3261
}
|
class ____ extends HashMap<String, Object> {
}
}
|
CustomMap
|
java
|
apache__camel
|
components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java
|
{
"start": 3357,
"end": 47236
}
|
class ____ extends DefaultEndpoint
implements AsyncEndpoint, HeaderFilterStrategyAware, MultipleConsumersSupport, EndpointServiceLocation {
private static final Logger LOG = LoggerFactory.getLogger(JmsEndpoint.class);
private String serviceUrl;
private String serviceProtocol;
private Map<String, String> serviceMetadata;
private final AtomicInteger runningMessageListeners = new AtomicInteger();
private boolean pubSubDomain;
private JmsBinding binding;
@UriPath(defaultValue = "queue", enums = "queue,topic,temp-queue,temp-topic",
description = "The kind of destination to use")
private String destinationType;
@UriPath(description = "Name of the queue or topic to use as destination")
@Metadata(required = true)
private String destinationName;
@UriParam(label = "advanced",
description = "To use a custom HeaderFilterStrategy to filter header to and from Camel message.")
private HeaderFilterStrategy headerFilterStrategy;
@UriParam
private JmsConfiguration configuration;
public JmsEndpoint() {
}
public JmsEndpoint(String uri, JmsComponent component, String destinationName, boolean pubSubDomain,
JmsConfiguration configuration) {
super(UnsafeUriCharactersEncoder.encode(uri), component);
this.configuration = configuration;
this.destinationName = destinationName;
this.pubSubDomain = pubSubDomain;
if (pubSubDomain) {
this.destinationType = "topic";
} else {
this.destinationType = "queue";
}
}
public JmsEndpoint(String endpointUri, JmsBinding binding, JmsConfiguration configuration, String destinationName,
boolean pubSubDomain) {
super(UnsafeUriCharactersEncoder.encode(endpointUri), null);
this.binding = binding;
this.configuration = configuration;
this.destinationName = destinationName;
this.pubSubDomain = pubSubDomain;
if (pubSubDomain) {
this.destinationType = "topic";
} else {
this.destinationType = "queue";
}
}
public JmsEndpoint(String endpointUri, String destinationName, boolean pubSubDomain) {
this(UnsafeUriCharactersEncoder.encode(endpointUri), null, new JmsConfiguration(), destinationName, pubSubDomain);
this.binding = new JmsBinding(this);
if (pubSubDomain) {
this.destinationType = "topic";
} else {
this.destinationType = "queue";
}
}
/**
* Creates a pub-sub endpoint with the given destination
*/
public JmsEndpoint(String endpointUri, String destinationName) {
this(UnsafeUriCharactersEncoder.encode(endpointUri), destinationName, true);
}
@Override
protected void doStart() throws Exception {
if (getComponent().isServiceLocationEnabled()) {
// we need to use reflection to find the URL to the brokers, so do this once on startup
BeanIntrospection bi = PluginHelper.getBeanIntrospection(getCamelContext());
ConnectionFactory cf = getConnectionFactory();
// unwrap if cf is from a synthetic ClientProxy bean
if (cf != null && cf.getClass().getName().endsWith("ClientProxy")) {
ConnectionFactory actual = UnwrapHelper.unwrapClientProxy(cf);
if (actual != null) {
cf = actual;
}
}
serviceUrl = JmsServiceLocationHelper.getBrokerURLFromConnectionFactory(bi, cf);
serviceProtocol = getComponent().getDefaultName();
serviceMetadata = new HashMap<>();
String user = JmsServiceLocationHelper.getUsernameFromConnectionFactory(bi, cf);
if (user != null) {
serviceMetadata.put("username", user);
if (getConfiguration().getClientId() != null) {
serviceMetadata.put("clientId", getConfiguration().getClientId());
}
}
}
}
@Override
public String getServiceUrl() {
return serviceUrl;
}
@Override
public String getServiceProtocol() {
return serviceProtocol;
}
@Override
public Map<String, String> getServiceMetadata() {
if (serviceMetadata != null && !serviceMetadata.isEmpty()) {
return serviceMetadata;
}
return null;
}
@Override
public Producer createProducer() throws Exception {
Producer answer = new JmsProducer(this);
if (getConfiguration().isSynchronous()) {
return new SynchronousDelegateProducer(answer);
} else {
return answer;
}
}
@Override
public JmsConsumer createConsumer(Processor processor) throws Exception {
AbstractMessageListenerContainer listenerContainer = createMessageListenerContainer();
return createConsumer(processor, listenerContainer);
}
public AbstractMessageListenerContainer createMessageListenerContainer() {
return configuration.createMessageListenerContainer(this);
}
public AbstractMessageListenerContainer createReplyToMessageListenerContainer() {
// only choose as the reply manager will configure the listener
return configuration.chooseMessageListenerContainerImplementation(this, configuration.getReplyToConsumerType());
}
public void configureListenerContainer(AbstractMessageListenerContainer listenerContainer, JmsConsumer consumer) {
if (destinationName != null) {
String target = destinationName;
if (getConfiguration().getArtemisConsumerPriority() != 0) {
target += "?consumer-priority=" + getConfiguration().getArtemisConsumerPriority();
}
listenerContainer.setDestinationName(target);
LOG.debug("Using destinationName: {} on listenerContainer: {}", destinationName, listenerContainer);
} else {
DestinationResolver resolver = getDestinationResolver();
if (resolver != null) {
listenerContainer.setDestinationResolver(resolver);
} else {
throw new IllegalArgumentException(
"Neither destination, destinationName or destinationResolver are specified on this endpoint!");
}
LOG.debug("Using destinationResolver: {} on listenerContainer: {}", resolver, listenerContainer);
}
listenerContainer.setPubSubDomain(pubSubDomain);
// include destination name as part of thread and transaction name
String consumerName = getThreadName();
if (configuration.getTaskExecutor() != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Using custom TaskExecutor: {} on listener container: {}", configuration.getTaskExecutor(),
listenerContainer);
}
setContainerTaskExecutor(listenerContainer, configuration.getTaskExecutor());
// we are using a shared thread pool that this listener container is using.
// store a reference to the consumer, but we should not shutdown the thread pool when the consumer stops
// as the lifecycle of the shared thread pool is handled elsewhere
if (configuration.getTaskExecutor() instanceof ExecutorService executorService) {
consumer.setListenerContainerExecutorService(executorService, false);
}
} else if (!(listenerContainer instanceof DefaultJmsMessageListenerContainer)
|| configuration.getDefaultTaskExecutorType() == null) {
// preserve backwards compatibility if an explicit Default TaskExecutor Type was not set;
// otherwise, defer the creation of the TaskExecutor
// use a cached pool as DefaultMessageListenerContainer will throttle pool sizing
ExecutorService executor
= getCamelContext().getExecutorServiceManager().newCachedThreadPool(consumer, consumerName);
setContainerTaskExecutor(listenerContainer, executor);
// we created a new private thread pool that this listener container is using, now store a reference on the consumer
// so when the consumer is stopped we can shutdown the thread pool also, to ensure all resources is shutdown
consumer.setListenerContainerExecutorService(executor, true);
} else {
// do nothing, as we're working with a DefaultJmsMessageListenerContainer with an explicit DefaultTaskExecutorType,
// so DefaultJmsMessageListenerContainer#createDefaultTaskExecutor will handle the creation
LOG.debug("Deferring creation of TaskExecutor for listener container: {} as per policy: {}",
listenerContainer, getDefaultTaskExecutorType());
}
// set a default transaction name if none provided
if (configuration.getTransactionName() == null) {
if (listenerContainer instanceof DefaultMessageListenerContainer defaultMessageListenerContainer) {
defaultMessageListenerContainer.setTransactionName(consumerName);
}
}
// now configure the JMS 2.0 API
if (configuration.getDurableSubscriptionName() != null) {
listenerContainer.setDurableSubscriptionName(configuration.getDurableSubscriptionName());
} else if (configuration.isSubscriptionDurable()) {
listenerContainer.setSubscriptionDurable(true);
}
if (configuration.getSubscriptionName() != null) {
listenerContainer.setSubscriptionName(configuration.getSubscriptionName());
}
listenerContainer.setSubscriptionShared(configuration.isSubscriptionShared());
}
private void setContainerTaskExecutor(AbstractMessageListenerContainer listenerContainer, Executor executor) {
if (listenerContainer instanceof SimpleMessageListenerContainer container) {
container.setTaskExecutor(executor);
} else if (listenerContainer instanceof DefaultMessageListenerContainer defaultMessageListenerContainer) {
defaultMessageListenerContainer.setTaskExecutor(executor);
}
}
/**
* Gets the destination name which was configured from the endpoint uri.
*
* @return the destination name resolved from the endpoint uri
*/
public String getEndpointConfiguredDestinationName() {
String remainder = StringHelper.after(getEndpointKey(), "//");
if (remainder != null && remainder.contains("?")) {
// remove parameters
remainder = StringHelper.before(remainder, "?");
}
return JmsMessageHelper.normalizeDestinationName(remainder);
}
/**
* Creates a consumer using the given processor and listener container
*
* @param processor the processor to use to process the messages
* @param listenerContainer the listener container
* @return a newly created consumer
* @throws Exception if the consumer cannot be created
*/
public JmsConsumer createConsumer(Processor processor, AbstractMessageListenerContainer listenerContainer)
throws Exception {
JmsConsumer consumer = new JmsConsumer(this, processor, listenerContainer);
configureListenerContainer(listenerContainer, consumer);
configureConsumer(consumer);
if (isBridgeErrorHandler()) {
throw new IllegalArgumentException("BridgeErrorHandler is not support on JMS endpoint");
}
String replyTo = consumer.getEndpoint().getReplyTo();
if (replyTo != null && consumer.getEndpoint().getDestinationName().equals(replyTo)) {
throw new IllegalArgumentException(
"Invalid Endpoint configuration: " + consumer.getEndpoint()
+ ". ReplyTo=" + replyTo
+ " cannot be the same as the destination name on the JmsConsumer as that"
+ " would lead to the consumer sending reply messages to itself in an endless loop.");
}
return consumer;
}
@Override
public PollingConsumer createPollingConsumer() throws Exception {
JmsPollingConsumer answer = new JmsPollingConsumer(this, createInOnlyTemplate());
configurePollingConsumer(answer);
return answer;
}
@Override
public Exchange createExchange(ExchangePattern pattern) {
Exchange exchange = super.createExchange(pattern);
exchange.setProperty(Exchange.BINDING, getBinding());
return exchange;
}
public Exchange createExchange(Message message, Session session) {
Exchange exchange = createExchange(getExchangePattern());
exchange.setIn(new JmsMessage(exchange, message, session, getBinding()));
return exchange;
}
/**
* Factory method for creating a new template for InOnly message exchanges
*/
public JmsOperations createInOnlyTemplate() {
return configuration.createInOnlyTemplate(this, pubSubDomain, destinationName);
}
/**
* Factory method for creating a new template for InOut message exchanges
*/
public JmsOperations createInOutTemplate() {
return configuration.createInOutTemplate(this, pubSubDomain, destinationName, configuration.getRequestTimeout());
}
@Override
public boolean isMultipleConsumersSupported() {
// JMS allows multiple consumers on both queues and topics
return true;
}
public String getThreadName() {
return "JmsConsumer[" + getEndpointConfiguredDestinationName() + "]";
}
// Properties
// -------------------------------------------------------------------------
@Override
public JmsComponent getComponent() {
return (JmsComponent) super.getComponent();
}
@Override
public HeaderFilterStrategy getHeaderFilterStrategy() {
if (headerFilterStrategy == null) {
headerFilterStrategy = new JmsHeaderFilterStrategy(isIncludeAllJMSXProperties());
}
return headerFilterStrategy;
}
/**
* To use a custom HeaderFilterStrategy to filter header to and from Camel message.
*/
@Override
public void setHeaderFilterStrategy(HeaderFilterStrategy strategy) {
this.headerFilterStrategy = strategy;
}
public JmsBinding getBinding() {
if (binding == null) {
binding = createBinding();
}
return binding;
}
/**
* Creates the {@link org.apache.camel.component.jms.JmsBinding} to use.
*/
protected JmsBinding createBinding() {
return new JmsBinding(this);
}
/**
* Sets the binding used to convert from a Camel message to and from a JMS message
*/
public void setBinding(JmsBinding binding) {
this.binding = binding;
}
public String getDestinationType() {
return destinationType;
}
/**
* The kind of destination to use
*/
public void setDestinationType(String destinationType) {
this.destinationType = destinationType;
}
public String getDestinationName() {
return destinationName;
}
/**
* Name of the queue or topic to use as destination
*/
public void setDestinationName(String destinationName) {
this.destinationName = destinationName;
}
public JmsConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(JmsConfiguration configuration) {
this.configuration = configuration;
}
@ManagedAttribute
public boolean isPubSubDomain() {
return pubSubDomain;
}
protected ExecutorService getAsyncStartStopExecutorService() {
if (getComponent() == null) {
throw new IllegalStateException(
"AsyncStartStopListener requires JmsComponent to be configured on this endpoint: " + this);
}
// use shared thread pool from component
return getComponent().getAsyncStartStopExecutorService();
}
public void onListenerContainerStarting() {
runningMessageListeners.incrementAndGet();
}
public void onListenerContainerStopped() {
runningMessageListeners.decrementAndGet();
}
/**
* State whether this endpoint is running (eg started)
*/
protected boolean isRunning() {
return isStarted();
}
@Override
public void stop() {
int running = runningMessageListeners.get();
if (running <= 0) {
super.stop();
} else {
LOG.trace("There are still {} running message listeners. Cannot stop endpoint {}", running, this);
}
}
@Override
public void shutdown() {
int running = runningMessageListeners.get();
if (running <= 0) {
super.shutdown();
} else {
LOG.trace("There are still {} running message listeners. Cannot shutdown endpoint {}", running, this);
}
}
// Delegated properties from the configuration
//-------------------------------------------------------------------------
@ManagedAttribute
public int getAcknowledgementMode() {
return getConfiguration().getAcknowledgementMode();
}
@ManagedAttribute
public String getAcknowledgementModeName() {
return getConfiguration().getAcknowledgementModeName();
}
@ManagedAttribute
public int getCacheLevel() {
return getConfiguration().getCacheLevel();
}
@ManagedAttribute
public String getCacheLevelName() {
return getConfiguration().getCacheLevelName();
}
@ManagedAttribute
public String getReplyToCacheLevelName() {
return getConfiguration().getReplyToCacheLevelName();
}
@ManagedAttribute
public String getClientId() {
return getConfiguration().getClientId();
}
@ManagedAttribute
public int getConcurrentConsumers() {
return getConfiguration().getConcurrentConsumers();
}
@ManagedAttribute
public int getReplyToConcurrentConsumers() {
return getConfiguration().getReplyToConcurrentConsumers();
}
public ConnectionFactory getConnectionFactory() {
return getConfiguration().getConnectionFactory();
}
public DestinationResolver getDestinationResolver() {
return getConfiguration().getDestinationResolver();
}
public TemporaryQueueResolver getTemporaryQueueResolver() {
return getConfiguration().getTemporaryQueueResolver();
}
@ManagedAttribute
public String getDurableSubscriptionName() {
return getConfiguration().getDurableSubscriptionName();
}
public ExceptionListener getExceptionListener() {
return getConfiguration().getExceptionListener();
}
public ErrorHandler getErrorHandler() {
return getConfiguration().getErrorHandler();
}
public LoggingLevel getErrorHandlerLoggingLevel() {
return getConfiguration().getErrorHandlerLoggingLevel();
}
@ManagedAttribute
public boolean isErrorHandlerLogStackTrace() {
return getConfiguration().isErrorHandlerLogStackTrace();
}
@ManagedAttribute
public void setErrorHandlerLogStackTrace(boolean errorHandlerLogStackTrace) {
getConfiguration().setErrorHandlerLogStackTrace(errorHandlerLogStackTrace);
}
@ManagedAttribute
public int getIdleTaskExecutionLimit() {
return getConfiguration().getIdleTaskExecutionLimit();
}
@ManagedAttribute
public int getIdleConsumerLimit() {
return getConfiguration().getIdleConsumerLimit();
}
public JmsOperations getJmsOperations() {
return getConfiguration().getJmsOperations();
}
public ConnectionFactory getListenerConnectionFactory() {
return getConfiguration().getListenerConnectionFactory();
}
@ManagedAttribute
public int getMaxConcurrentConsumers() {
return getConfiguration().getMaxConcurrentConsumers();
}
@ManagedAttribute
public int getReplyToMaxConcurrentConsumers() {
return getConfiguration().getReplyToMaxConcurrentConsumers();
}
@ManagedAttribute
public int getReplyToOnTimeoutMaxConcurrentConsumers() {
return getConfiguration().getReplyToOnTimeoutMaxConcurrentConsumers();
}
@ManagedAttribute
public int getMaxMessagesPerTask() {
return getConfiguration().getMaxMessagesPerTask();
}
@ManagedAttribute
public int getIdleReceivesPerTaskLimit() {
return getConfiguration().getIdleReceivesPerTaskLimit();
}
public MessageConverter getMessageConverter() {
return getConfiguration().getMessageConverter();
}
@ManagedAttribute
public int getPriority() {
return getConfiguration().getPriority();
}
@ManagedAttribute
public long getReceiveTimeout() {
return getConfiguration().getReceiveTimeout();
}
@ManagedAttribute
public long getRecoveryInterval() {
return getConfiguration().getRecoveryInterval();
}
@ManagedAttribute
public String getReplyTo() {
return getConfiguration().getReplyTo();
}
@ManagedAttribute
public String getReplyToOverride() {
return getConfiguration().getReplyToOverride();
}
@ManagedAttribute
public boolean isReplyToSameDestinationAllowed() {
return getConfiguration().isReplyToSameDestinationAllowed();
}
@ManagedAttribute
public String getReplyToDestinationSelectorName() {
return getConfiguration().getReplyToDestinationSelectorName();
}
@ManagedAttribute
public long getRequestTimeout() {
return getConfiguration().getRequestTimeout();
}
@ManagedAttribute
public long getRequestTimeoutCheckerInterval() {
return getConfiguration().getRequestTimeoutCheckerInterval();
}
public TaskExecutor getTaskExecutor() {
return getConfiguration().getTaskExecutor();
}
public ConnectionFactory getTemplateConnectionFactory() {
return getConfiguration().getTemplateConnectionFactory();
}
@ManagedAttribute
public long getTimeToLive() {
return getConfiguration().getTimeToLive();
}
public PlatformTransactionManager getTransactionManager() {
return getConfiguration().getTransactionManager();
}
@ManagedAttribute
public String getTransactionName() {
return getConfiguration().getTransactionName();
}
@ManagedAttribute
public int getTransactionTimeout() {
return getConfiguration().getTransactionTimeout();
}
@ManagedAttribute
public boolean isAcceptMessagesWhileStopping() {
return getConfiguration().isAcceptMessagesWhileStopping();
}
@ManagedAttribute
public boolean isAllowReplyManagerQuickStop() {
return getConfiguration().isAllowReplyManagerQuickStop();
}
@ManagedAttribute
public boolean isAlwaysCopyMessage() {
return getConfiguration().isAlwaysCopyMessage();
}
@ManagedAttribute
public boolean isAutoStartup() {
return getConfiguration().isAutoStartup();
}
@ManagedAttribute
public boolean isDeliveryPersistent() {
return getConfiguration().isDeliveryPersistent();
}
@ManagedAttribute
public Integer getDeliveryMode() {
return getConfiguration().getDeliveryMode();
}
@ManagedAttribute
public boolean isDisableReplyTo() {
return getConfiguration().isDisableReplyTo();
}
@ManagedAttribute
public String getEagerPoisonBody() {
return getConfiguration().getEagerPoisonBody();
}
@ManagedAttribute
public boolean isEagerLoadingOfProperties() {
return getConfiguration().isEagerLoadingOfProperties();
}
@ManagedAttribute
public boolean isExplicitQosEnabled() {
return getConfiguration().isExplicitQosEnabled();
}
@ManagedAttribute
public boolean isExposeListenerSession() {
return getConfiguration().isExposeListenerSession();
}
@ManagedAttribute
public boolean isMessageIdEnabled() {
return getConfiguration().isMessageIdEnabled();
}
@ManagedAttribute
public boolean isMessageTimestampEnabled() {
return getConfiguration().isMessageTimestampEnabled();
}
@ManagedAttribute
public boolean isPreserveMessageQos() {
return getConfiguration().isPreserveMessageQos();
}
@ManagedAttribute
public boolean isPubSubNoLocal() {
return getConfiguration().isPubSubNoLocal();
}
@ManagedAttribute
public boolean isReplyToDeliveryPersistent() {
return getConfiguration().isReplyToDeliveryPersistent();
}
@ManagedAttribute
public boolean isTransacted() {
return getConfiguration().isTransacted();
}
@ManagedAttribute
public boolean isTransactedInOut() {
return getConfiguration().isTransactedInOut();
}
@ManagedAttribute
public boolean isLazyCreateTransactionManager() {
return getConfiguration().isLazyCreateTransactionManager();
}
@ManagedAttribute
public boolean isUseMessageIDAsCorrelationID() {
return getConfiguration().isUseMessageIDAsCorrelationID();
}
@ManagedAttribute
public void setAcceptMessagesWhileStopping(boolean acceptMessagesWhileStopping) {
getConfiguration().setAcceptMessagesWhileStopping(acceptMessagesWhileStopping);
}
@ManagedAttribute
public void setAllowReplyManagerQuickStop(boolean allowReplyManagerQuickStop) {
getConfiguration().setAllowReplyManagerQuickStop(allowReplyManagerQuickStop);
}
@ManagedAttribute
public void setAcknowledgementMode(int consumerAcknowledgementMode) {
getConfiguration().setAcknowledgementMode(consumerAcknowledgementMode);
}
@ManagedAttribute
public void setAcknowledgementModeName(String consumerAcknowledgementMode) {
getConfiguration().setAcknowledgementModeName(consumerAcknowledgementMode);
}
@ManagedAttribute
public void setAlwaysCopyMessage(boolean alwaysCopyMessage) {
getConfiguration().setAlwaysCopyMessage(alwaysCopyMessage);
}
@ManagedAttribute
public void setAutoStartup(boolean autoStartup) {
getConfiguration().setAutoStartup(autoStartup);
}
@ManagedAttribute
public void setCacheLevel(int cacheLevel) {
getConfiguration().setCacheLevel(cacheLevel);
}
@ManagedAttribute
public void setCacheLevelName(String cacheName) {
getConfiguration().setCacheLevelName(cacheName);
}
@ManagedAttribute
public void setReplyToCacheLevelName(String cacheName) {
getConfiguration().setReplyToCacheLevelName(cacheName);
}
@ManagedAttribute
public void setClientId(String consumerClientId) {
getConfiguration().setClientId(consumerClientId);
}
@ManagedAttribute
public void setConcurrentConsumers(int concurrentConsumers) {
getConfiguration().setConcurrentConsumers(concurrentConsumers);
}
@ManagedAttribute
public void setReplyToConcurrentConsumers(int concurrentConsumers) {
getConfiguration().setReplyToConcurrentConsumers(concurrentConsumers);
}
public void setConnectionFactory(ConnectionFactory connectionFactory) {
getConfiguration().setConnectionFactory(connectionFactory);
}
@ManagedAttribute
public void setDeliveryPersistent(boolean deliveryPersistent) {
getConfiguration().setDeliveryPersistent(deliveryPersistent);
}
@ManagedAttribute
public void setDeliveryMode(Integer deliveryMode) {
getConfiguration().setDeliveryMode(deliveryMode);
}
public void setDestinationResolver(DestinationResolver destinationResolver) {
getConfiguration().setDestinationResolver(destinationResolver);
}
public void setDestinationResolver(TemporaryQueueResolver temporaryQueueResolver) {
getConfiguration().setTemporaryQueueResolver(temporaryQueueResolver);
}
@ManagedAttribute
public void setDisableReplyTo(boolean disableReplyTo) {
getConfiguration().setDisableReplyTo(disableReplyTo);
}
@ManagedAttribute
public void setDurableSubscriptionName(String durableSubscriptionName) {
getConfiguration().setDurableSubscriptionName(durableSubscriptionName);
}
@ManagedAttribute
public void setEagerPoisonBody(String eagerPoisonBody) {
getConfiguration().setEagerPoisonBody(eagerPoisonBody);
}
@ManagedAttribute
public void setEagerLoadingOfProperties(boolean eagerLoadingOfProperties) {
getConfiguration().setEagerLoadingOfProperties(eagerLoadingOfProperties);
}
public void setExceptionListener(ExceptionListener exceptionListener) {
getConfiguration().setExceptionListener(exceptionListener);
}
public void setErrorHandler(ErrorHandler errorHandler) {
getConfiguration().setErrorHandler(errorHandler);
}
@ManagedAttribute
public void setExplicitQosEnabled(boolean explicitQosEnabled) {
getConfiguration().setExplicitQosEnabled(explicitQosEnabled);
}
@ManagedAttribute
public void setExposeListenerSession(boolean exposeListenerSession) {
getConfiguration().setExposeListenerSession(exposeListenerSession);
}
@ManagedAttribute
public void setIdleTaskExecutionLimit(int idleTaskExecutionLimit) {
getConfiguration().setIdleTaskExecutionLimit(idleTaskExecutionLimit);
}
@ManagedAttribute
public void setIdleConsumerLimit(int idleConsumerLimit) {
getConfiguration().setIdleConsumerLimit(idleConsumerLimit);
}
public void setJmsOperations(JmsOperations jmsOperations) {
getConfiguration().setJmsOperations(jmsOperations);
}
public void setListenerConnectionFactory(ConnectionFactory listenerConnectionFactory) {
getConfiguration().setListenerConnectionFactory(listenerConnectionFactory);
}
@ManagedAttribute
public void setMaxConcurrentConsumers(int maxConcurrentConsumers) {
getConfiguration().setMaxConcurrentConsumers(maxConcurrentConsumers);
}
@ManagedAttribute
public void setReplyToMaxConcurrentConsumers(int maxConcurrentConsumers) {
getConfiguration().setReplyToMaxConcurrentConsumers(maxConcurrentConsumers);
}
@ManagedAttribute
public void setMaxMessagesPerTask(int maxMessagesPerTask) {
getConfiguration().setMaxMessagesPerTask(maxMessagesPerTask);
}
@ManagedAttribute
public void setIdleReceivesPerTaskLimit(int idleReceivesPerTaskLimit) {
getConfiguration().setIdleReceivesPerTaskLimit(idleReceivesPerTaskLimit);
}
public void setMessageConverter(MessageConverter messageConverter) {
getConfiguration().setMessageConverter(messageConverter);
}
@ManagedAttribute
public void setMessageIdEnabled(boolean messageIdEnabled) {
getConfiguration().setMessageIdEnabled(messageIdEnabled);
}
@ManagedAttribute
public void setMessageTimestampEnabled(boolean messageTimestampEnabled) {
getConfiguration().setMessageTimestampEnabled(messageTimestampEnabled);
}
@ManagedAttribute
public void setPreserveMessageQos(boolean preserveMessageQos) {
getConfiguration().setPreserveMessageQos(preserveMessageQos);
}
@ManagedAttribute
public void setPriority(int priority) {
getConfiguration().setPriority(priority);
}
@ManagedAttribute
public void setPubSubNoLocal(boolean pubSubNoLocal) {
getConfiguration().setPubSubNoLocal(pubSubNoLocal);
}
@ManagedAttribute
public void setReceiveTimeout(long receiveTimeout) {
getConfiguration().setReceiveTimeout(receiveTimeout);
}
@ManagedAttribute
public void setRecoveryInterval(long recoveryInterval) {
getConfiguration().setRecoveryInterval(recoveryInterval);
}
@ManagedAttribute
public void setReplyTo(String replyToDestination) {
getConfiguration().setReplyTo(replyToDestination);
}
@ManagedAttribute
public void setReplyToOverride(String replyToDestination) {
getConfiguration().setReplyToOverride(replyToDestination);
}
@ManagedAttribute
public void setReplyToSameDestinationAllowed(boolean replyToSameDestinationAllowed) {
getConfiguration().setReplyToSameDestinationAllowed(replyToSameDestinationAllowed);
}
@ManagedAttribute
public void setReplyToDeliveryPersistent(boolean replyToDeliveryPersistent) {
getConfiguration().setReplyToDeliveryPersistent(replyToDeliveryPersistent);
}
@ManagedAttribute
public void setReplyToDestinationSelectorName(String replyToDestinationSelectorName) {
getConfiguration().setReplyToDestinationSelectorName(replyToDestinationSelectorName);
}
@ManagedAttribute
public void setRequestTimeout(long requestTimeout) {
getConfiguration().setRequestTimeout(requestTimeout);
}
public void setTaskExecutor(TaskExecutor taskExecutor) {
getConfiguration().setTaskExecutor(taskExecutor);
}
public void setTemplateConnectionFactory(ConnectionFactory templateConnectionFactory) {
getConfiguration().setTemplateConnectionFactory(templateConnectionFactory);
}
@ManagedAttribute
public void setTimeToLive(long timeToLive) {
getConfiguration().setTimeToLive(timeToLive);
}
@ManagedAttribute
public void setTransacted(boolean transacted) {
getConfiguration().setTransacted(transacted);
}
@ManagedAttribute
public void setLazyCreateTransactionManager(boolean lazyCreating) {
getConfiguration().setLazyCreateTransactionManager(lazyCreating);
}
public void setTransactionManager(PlatformTransactionManager transactionManager) {
getConfiguration().setTransactionManager(transactionManager);
}
@ManagedAttribute
public void setTransactionName(String transactionName) {
getConfiguration().setTransactionName(transactionName);
}
@ManagedAttribute
public void setTransactionTimeout(int transactionTimeout) {
getConfiguration().setTransactionTimeout(transactionTimeout);
}
@ManagedAttribute
public void setUseMessageIDAsCorrelationID(boolean useMessageIDAsCorrelationID) {
getConfiguration().setUseMessageIDAsCorrelationID(useMessageIDAsCorrelationID);
}
public JmsMessageType getJmsMessageType() {
return getConfiguration().getJmsMessageType();
}
public void setJmsMessageType(JmsMessageType jmsMessageType) {
getConfiguration().setJmsMessageType(jmsMessageType);
}
public JmsKeyFormatStrategy getJmsKeyFormatStrategy() {
return getConfiguration().getJmsKeyFormatStrategy();
}
public void setJmsKeyFormatStrategy(JmsKeyFormatStrategy jmsHeaderStrategy) {
getConfiguration().setJmsKeyFormatStrategy(jmsHeaderStrategy);
}
public MessageCreatedStrategy getMessageCreatedStrategy() {
return getConfiguration().getMessageCreatedStrategy();
}
public void setMessageCreatedStrategy(MessageCreatedStrategy messageCreatedStrategy) {
getConfiguration().setMessageCreatedStrategy(messageCreatedStrategy);
}
@ManagedAttribute
public boolean isTransferExchange() {
return getConfiguration().isTransferExchange();
}
@ManagedAttribute
public void setTransferExchange(boolean transferExchange) {
getConfiguration().setTransferExchange(transferExchange);
}
@ManagedAttribute
public boolean isAllowSerializedHeaders() {
return getConfiguration().isAllowSerializedHeaders();
}
@ManagedAttribute
public void setAllowSerializedHeaders(boolean allowSerializedHeaders) {
getConfiguration().setAllowSerializedHeaders(allowSerializedHeaders);
}
@ManagedAttribute
public boolean isTransferException() {
return getConfiguration().isTransferException();
}
@ManagedAttribute
public void setTransferException(boolean transferException) {
getConfiguration().setTransferException(transferException);
}
@ManagedAttribute
public boolean isTestConnectionOnStartup() {
return configuration.isTestConnectionOnStartup();
}
@ManagedAttribute
public void setTestConnectionOnStartup(boolean testConnectionOnStartup) {
configuration.setTestConnectionOnStartup(testConnectionOnStartup);
}
@ManagedAttribute
public boolean isForceSendOriginalMessage() {
return configuration.isForceSendOriginalMessage();
}
@ManagedAttribute
public void setForceSendOriginalMessage(boolean forceSendOriginalMessage) {
configuration.setForceSendOriginalMessage(forceSendOriginalMessage);
}
@ManagedAttribute
public boolean isDisableTimeToLive() {
return configuration.isDisableTimeToLive();
}
@ManagedAttribute
public void setDisableTimeToLive(boolean disableTimeToLive) {
configuration.setDisableTimeToLive(disableTimeToLive);
}
@ManagedAttribute
public void setAsyncConsumer(boolean asyncConsumer) {
configuration.setAsyncConsumer(asyncConsumer);
}
@ManagedAttribute
public boolean isAsyncConsumer() {
return configuration.isAsyncConsumer();
}
@ManagedAttribute
public void setAsyncStartListener(boolean asyncStartListener) {
configuration.setAsyncStartListener(asyncStartListener);
}
@ManagedAttribute
public boolean isAsyncStartListener() {
return configuration.isAsyncStartListener();
}
@ManagedAttribute
public void setAsyncStopListener(boolean asyncStopListener) {
configuration.setAsyncStopListener(asyncStopListener);
}
@ManagedAttribute
public boolean isAsyncStopListener() {
return configuration.isAsyncStopListener();
}
@ManagedAttribute
public boolean isAllowNullBody() {
return configuration.isAllowNullBody();
}
@ManagedAttribute
public void setAllowNullBody(boolean allowNullBody) {
configuration.setAllowNullBody(allowNullBody);
}
@ManagedAttribute
public boolean isIncludeSentJMSMessageID() {
return configuration.isIncludeSentJMSMessageID();
}
@ManagedAttribute
public void setIncludeSentJMSMessageID(boolean includeSentJMSMessageID) {
configuration.setIncludeSentJMSMessageID(includeSentJMSMessageID);
}
@ManagedAttribute
public boolean isIncludeAllJMSXProperties() {
return configuration.isIncludeAllJMSXProperties();
}
@ManagedAttribute
public void setIncludeAllJMSXProperties(boolean includeAllJMSXProperties) {
configuration.setIncludeAllJMSXProperties(includeAllJMSXProperties);
}
@ManagedAttribute
public DefaultTaskExecutorType getDefaultTaskExecutorType() {
return configuration.getDefaultTaskExecutorType();
}
public void setDefaultTaskExecutorType(DefaultTaskExecutorType type) {
configuration.setDefaultTaskExecutorType(type);
}
@ManagedAttribute
public String getAllowAdditionalHeaders() {
return configuration.getAllowAdditionalHeaders();
}
@ManagedAttribute
public void setAllowAdditionalHeaders(String allowAdditionalHeaders) {
configuration.setAllowAdditionalHeaders(allowAdditionalHeaders);
}
public MessageListenerContainerFactory getMessageListenerContainerFactory() {
return configuration.getMessageListenerContainerFactory();
}
public void setMessageListenerContainerFactory(MessageListenerContainerFactory messageListenerContainerFactory) {
configuration.setMessageListenerContainerFactory(messageListenerContainerFactory);
configuration.setConsumerType(ConsumerType.Custom);
}
@ManagedAttribute
public boolean isSubscriptionDurable() {
return getConfiguration().isSubscriptionDurable();
}
@ManagedAttribute
public void setSubscriptionDurable(boolean subscriptionDurable) {
getConfiguration().setSubscriptionDurable(subscriptionDurable);
}
@ManagedAttribute
public boolean isSubscriptionShared() {
return getConfiguration().isSubscriptionShared();
}
@ManagedAttribute
public void setSubscriptionShared(boolean subscriptionShared) {
getConfiguration().setSubscriptionShared(subscriptionShared);
}
@ManagedAttribute
public String getSubscriptionName() {
return getConfiguration().getSubscriptionName();
}
@ManagedAttribute
public void setSubscriptionName(String subscriptionName) {
getConfiguration().setSubscriptionName(subscriptionName);
}
@ManagedAttribute
public String getReplyToType() {
if (configuration.getReplyToType() != null) {
return configuration.getReplyToType().name();
} else {
return null;
}
}
@ManagedAttribute
public void setReplyToType(String replyToType) {
ReplyToType type = ReplyToType.valueOf(replyToType);
configuration.setReplyToType(type);
}
@ManagedAttribute(description = "Number of running message listeners")
public int getRunningMessageListeners() {
return runningMessageListeners.get();
}
@ManagedAttribute
public String getSelector() {
return configuration.getSelector();
}
public void setSelector(String selector) {
configuration.setSelector(selector);
}
@ManagedAttribute
public int getWaitForProvisionCorrelationToBeUpdatedCounter() {
return configuration.getWaitForProvisionCorrelationToBeUpdatedCounter();
}
@ManagedAttribute
public void setWaitForProvisionCorrelationToBeUpdatedCounter(int counter) {
configuration.setWaitForProvisionCorrelationToBeUpdatedCounter(counter);
}
@ManagedAttribute
public long getWaitForProvisionCorrelationToBeUpdatedThreadSleepingTime() {
return configuration.getWaitForProvisionCorrelationToBeUpdatedThreadSleepingTime();
}
@ManagedAttribute
public void setWaitForProvisionCorrelationToBeUpdatedThreadSleepingTime(long sleepingTime) {
configuration.setWaitForProvisionCorrelationToBeUpdatedThreadSleepingTime(sleepingTime);
}
@ManagedAttribute
public boolean isFormatDateHeadersToIso8601() {
return configuration.isFormatDateHeadersToIso8601();
}
@ManagedAttribute
public void setFormatDateHeadersToIso8601(boolean formatDateHeadersToIso8601) {
configuration.setFormatDateHeadersToIso8601(formatDateHeadersToIso8601);
}
@ManagedAttribute
public boolean isArtemisStreamingEnabled() {
return configuration.isArtemisStreamingEnabled();
}
@ManagedAttribute
public void setArtemisStreamingEnabled(boolean artemisStreamingEnabled) {
configuration.setArtemisStreamingEnabled(artemisStreamingEnabled);
}
// Implementation methods
//-------------------------------------------------------------------------
@Override
protected String createEndpointUri() {
String scheme = "jms";
if (destinationName != null) {
return scheme + ":" + destinationName;
}
DestinationResolver resolver = getDestinationResolver();
if (resolver != null) {
return scheme + ":" + resolver;
}
return super.createEndpointUri();
}
}
|
JmsEndpoint
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/checkpointing/StatefulJobWBroadcastStateMigrationITCase.java
|
{
"start": 2541,
"end": 15905
}
|
class ____ extends SnapshotMigrationTestBase
implements MigrationTest {
private static final int NUM_SOURCE_ELEMENTS = 4;
@Parameterized.Parameters(name = "Test snapshot: {0}")
public static Collection<SnapshotSpec> createSpecsForTestRuns() {
return internalParameters(null);
}
public static Collection<SnapshotSpec> createSpecsForTestDataGeneration(
FlinkVersion targetVersion) {
return internalParameters(targetVersion);
}
private static Collection<SnapshotSpec> internalParameters(
@Nullable FlinkVersion targetGeneratingVersion) {
BiFunction<FlinkVersion, FlinkVersion, Collection<FlinkVersion>> getFlinkVersions =
(minInclVersion, maxInclVersion) -> {
if (targetGeneratingVersion != null) {
return FlinkVersion.rangeOf(minInclVersion, maxInclVersion).stream()
.filter(v -> v.equals(targetGeneratingVersion))
.collect(Collectors.toList());
} else {
return FlinkVersion.rangeOf(minInclVersion, maxInclVersion);
}
};
Collection<SnapshotSpec> parameters = new LinkedList<>();
parameters.addAll(
SnapshotSpec.withVersions(
StateBackendLoader.HASHMAP_STATE_BACKEND_NAME,
SnapshotType.SAVEPOINT_CANONICAL,
getFlinkVersions.apply(
FlinkVersion.v1_15,
MigrationTest.getMostRecentlyPublishedVersion())));
parameters.addAll(
SnapshotSpec.withVersions(
StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME,
SnapshotType.SAVEPOINT_CANONICAL,
getFlinkVersions.apply(
FlinkVersion.v1_8,
MigrationTest.getMostRecentlyPublishedVersion())));
parameters.addAll(
SnapshotSpec.withVersions(
StateBackendLoader.HASHMAP_STATE_BACKEND_NAME,
SnapshotType.SAVEPOINT_NATIVE,
getFlinkVersions.apply(
FlinkVersion.v1_15,
MigrationTest.getMostRecentlyPublishedVersion())));
parameters.addAll(
SnapshotSpec.withVersions(
StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME,
SnapshotType.SAVEPOINT_NATIVE,
getFlinkVersions.apply(
FlinkVersion.v1_15,
MigrationTest.getMostRecentlyPublishedVersion())));
parameters.addAll(
SnapshotSpec.withVersions(
StateBackendLoader.HASHMAP_STATE_BACKEND_NAME,
SnapshotType.CHECKPOINT,
getFlinkVersions.apply(
FlinkVersion.v1_15,
MigrationTest.getMostRecentlyPublishedVersion())));
parameters.addAll(
SnapshotSpec.withVersions(
StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME,
SnapshotType.CHECKPOINT,
getFlinkVersions.apply(
FlinkVersion.v1_15,
MigrationTest.getMostRecentlyPublishedVersion())));
return parameters;
}
private final SnapshotSpec snapshotSpec;
public StatefulJobWBroadcastStateMigrationITCase(SnapshotSpec snapshotSpec) throws Exception {
this.snapshotSpec = snapshotSpec;
}
@ParameterizedSnapshotsGenerator("createSpecsForTestDataGeneration")
public void generateSnapshots(SnapshotSpec snapshotSpec) throws Exception {
testOrCreateSavepoint(ExecutionMode.CREATE_SNAPSHOT, snapshotSpec);
}
@Test
public void testSavepoint() throws Exception {
testOrCreateSavepoint(ExecutionMode.VERIFY_SNAPSHOT, snapshotSpec);
}
private void testOrCreateSavepoint(ExecutionMode executionMode, SnapshotSpec snapshotSpec)
throws Exception {
final int parallelism = 4;
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
RestartStrategyUtils.configureNoRestartStrategy(env);
switch (snapshotSpec.getStateBackendType()) {
case StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME:
StateBackendUtils.configureRocksDBStateBackend(env);
if (executionMode == ExecutionMode.CREATE_SNAPSHOT) {
// disable changelog backend for now to ensure determinism in test data
// generation (see FLINK-31766)
env.enableChangelogStateBackend(false);
}
break;
case StateBackendLoader.HASHMAP_STATE_BACKEND_NAME:
StateBackendUtils.configureHashMapStateBackend(env);
break;
default:
throw new UnsupportedOperationException();
}
env.enableCheckpointing(500);
env.setParallelism(parallelism);
env.setMaxParallelism(parallelism);
SourceFunction<Tuple2<Long, Long>> nonParallelSource;
SourceFunction<Tuple2<Long, Long>> nonParallelSourceB;
SourceFunction<Tuple2<Long, Long>> parallelSource;
SourceFunction<Tuple2<Long, Long>> parallelSourceB;
KeyedBroadcastProcessFunction<
Long, Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>>
firstBroadcastFunction;
KeyedBroadcastProcessFunction<
Long, Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>>
secondBroadcastFunction;
final Map<Long, Long> expectedFirstState = new HashMap<>();
expectedFirstState.put(0L, 0L);
expectedFirstState.put(1L, 1L);
expectedFirstState.put(2L, 2L);
expectedFirstState.put(3L, 3L);
final Map<String, Long> expectedSecondState = new HashMap<>();
expectedSecondState.put("0", 0L);
expectedSecondState.put("1", 1L);
expectedSecondState.put("2", 2L);
expectedSecondState.put("3", 3L);
final Map<Long, String> expectedThirdState = new HashMap<>();
expectedThirdState.put(0L, "0");
expectedThirdState.put(1L, "1");
expectedThirdState.put(2L, "2");
expectedThirdState.put(3L, "3");
if (executionMode == ExecutionMode.CREATE_SNAPSHOT) {
nonParallelSource =
new MigrationTestUtils.CheckpointingNonParallelSourceWithListState(
NUM_SOURCE_ELEMENTS);
nonParallelSourceB =
new MigrationTestUtils.CheckpointingNonParallelSourceWithListState(
NUM_SOURCE_ELEMENTS);
parallelSource =
new MigrationTestUtils.CheckpointingParallelSourceWithUnionListState(
NUM_SOURCE_ELEMENTS);
parallelSourceB =
new MigrationTestUtils.CheckpointingParallelSourceWithUnionListState(
NUM_SOURCE_ELEMENTS);
firstBroadcastFunction = new CheckpointingKeyedBroadcastFunction();
secondBroadcastFunction = new CheckpointingKeyedSingleBroadcastFunction();
} else if (executionMode == ExecutionMode.VERIFY_SNAPSHOT) {
nonParallelSource =
new MigrationTestUtils.CheckingNonParallelSourceWithListState(
NUM_SOURCE_ELEMENTS);
nonParallelSourceB =
new MigrationTestUtils.CheckingNonParallelSourceWithListState(
NUM_SOURCE_ELEMENTS);
parallelSource =
new MigrationTestUtils.CheckingParallelSourceWithUnionListState(
NUM_SOURCE_ELEMENTS);
parallelSourceB =
new MigrationTestUtils.CheckingParallelSourceWithUnionListState(
NUM_SOURCE_ELEMENTS);
firstBroadcastFunction =
new CheckingKeyedBroadcastFunction(expectedFirstState, expectedSecondState);
secondBroadcastFunction = new CheckingKeyedSingleBroadcastFunction(expectedThirdState);
} else {
throw new IllegalStateException("Unknown ExecutionMode " + executionMode);
}
KeyedStream<Tuple2<Long, Long>, Long> npStream =
env.addSource(nonParallelSource)
.uid("CheckpointingSource1")
.keyBy(
new KeySelector<Tuple2<Long, Long>, Long>() {
private static final long serialVersionUID =
-4514793867774977152L;
@Override
public Long getKey(Tuple2<Long, Long> value) throws Exception {
return value.f0;
}
});
KeyedStream<Tuple2<Long, Long>, Long> pStream =
env.addSource(parallelSource)
.uid("CheckpointingSource2")
.keyBy(
new KeySelector<Tuple2<Long, Long>, Long>() {
private static final long serialVersionUID =
4940496713319948104L;
@Override
public Long getKey(Tuple2<Long, Long> value) throws Exception {
return value.f0;
}
});
final MapStateDescriptor<Long, Long> firstBroadcastStateDesc =
new MapStateDescriptor<>(
"broadcast-state-1",
BasicTypeInfo.LONG_TYPE_INFO,
BasicTypeInfo.LONG_TYPE_INFO);
final MapStateDescriptor<String, Long> secondBroadcastStateDesc =
new MapStateDescriptor<>(
"broadcast-state-2",
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.LONG_TYPE_INFO);
final MapStateDescriptor<Long, String> thirdBroadcastStateDesc =
new MapStateDescriptor<>(
"broadcast-state-3",
BasicTypeInfo.LONG_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO);
BroadcastStream<Tuple2<Long, Long>> npBroadcastStream =
env.addSource(nonParallelSourceB)
.uid("BrCheckpointingSource1")
.broadcast(firstBroadcastStateDesc, secondBroadcastStateDesc);
BroadcastStream<Tuple2<Long, Long>> pBroadcastStream =
env.addSource(parallelSourceB)
.uid("BrCheckpointingSource2")
.broadcast(thirdBroadcastStateDesc);
npStream.connect(npBroadcastStream)
.process(firstBroadcastFunction)
.uid("BrProcess1")
.addSink(new MigrationTestUtils.AccumulatorCountingSink<>());
pStream.connect(pBroadcastStream)
.process(secondBroadcastFunction)
.uid("BrProcess2")
.addSink(new MigrationTestUtils.AccumulatorCountingSink<>());
if (executionMode == ExecutionMode.CREATE_SNAPSHOT) {
executeAndSnapshot(
env,
"src/test/resources/" + getSnapshotPath(snapshotSpec),
snapshotSpec.getSnapshotType(),
new Tuple2<>(
MigrationTestUtils.AccumulatorCountingSink.NUM_ELEMENTS_ACCUMULATOR,
2 * NUM_SOURCE_ELEMENTS));
} else {
restoreAndExecute(
env,
getResourceFilename(getSnapshotPath(snapshotSpec)),
new Tuple2<>(
MigrationTestUtils.CheckingNonParallelSourceWithListState
.SUCCESSFUL_RESTORE_CHECK_ACCUMULATOR,
2), // we have 2 sources
new Tuple2<>(
MigrationTestUtils.CheckingParallelSourceWithUnionListState
.SUCCESSFUL_RESTORE_CHECK_ACCUMULATOR,
2 * parallelism), // we have 2 sources
new Tuple2<>(
MigrationTestUtils.AccumulatorCountingSink.NUM_ELEMENTS_ACCUMULATOR,
NUM_SOURCE_ELEMENTS * 2));
}
}
private String getSnapshotPath(SnapshotSpec snapshotSpec) {
return "new-stateful-broadcast-udf-migration-itcase-" + snapshotSpec;
}
/**
* A simple {@link KeyedBroadcastProcessFunction} that puts everything on the broadcast side in
* the state.
*/
private static
|
StatefulJobWBroadcastStateMigrationITCase
|
java
|
spring-projects__spring-boot
|
loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/launch/SystemPropertyUtils.java
|
{
"start": 984,
"end": 5228
}
|
class ____ {
private static final String PLACEHOLDER_PREFIX = "${";
private static final String PLACEHOLDER_SUFFIX = "}";
private static final String VALUE_SEPARATOR = ":";
private static final String SIMPLE_PREFIX = PLACEHOLDER_PREFIX.substring(1);
private SystemPropertyUtils() {
}
static String resolvePlaceholders(Properties properties, String text) {
return (text != null) ? parseStringValue(properties, text, text, new HashSet<>()) : null;
}
private static String parseStringValue(Properties properties, String value, String current,
Set<String> visitedPlaceholders) {
StringBuilder result = new StringBuilder(current);
int startIndex = current.indexOf(PLACEHOLDER_PREFIX);
while (startIndex != -1) {
int endIndex = findPlaceholderEndIndex(result, startIndex);
if (endIndex == -1) {
startIndex = -1;
continue;
}
String placeholder = result.substring(startIndex + PLACEHOLDER_PREFIX.length(), endIndex);
String originalPlaceholder = placeholder;
if (!visitedPlaceholders.add(originalPlaceholder)) {
throw new IllegalArgumentException(
"Circular placeholder reference '" + originalPlaceholder + "' in property definitions");
}
placeholder = parseStringValue(properties, value, placeholder, visitedPlaceholders);
String propertyValue = resolvePlaceholder(properties, value, placeholder);
if (propertyValue == null) {
int separatorIndex = placeholder.indexOf(VALUE_SEPARATOR);
if (separatorIndex != -1) {
String actualPlaceholder = placeholder.substring(0, separatorIndex);
String defaultValue = placeholder.substring(separatorIndex + VALUE_SEPARATOR.length());
propertyValue = resolvePlaceholder(properties, value, actualPlaceholder);
propertyValue = (propertyValue != null) ? propertyValue : defaultValue;
}
}
if (propertyValue != null) {
propertyValue = parseStringValue(properties, value, propertyValue, visitedPlaceholders);
result.replace(startIndex, endIndex + PLACEHOLDER_SUFFIX.length(), propertyValue);
startIndex = result.indexOf(PLACEHOLDER_PREFIX, startIndex + propertyValue.length());
}
else {
startIndex = result.indexOf(PLACEHOLDER_PREFIX, endIndex + PLACEHOLDER_SUFFIX.length());
}
visitedPlaceholders.remove(originalPlaceholder);
}
return result.toString();
}
private static String resolvePlaceholder(Properties properties, String text, String placeholderName) {
String propertyValue = getProperty(placeholderName, null, text);
if (propertyValue != null) {
return propertyValue;
}
return (properties != null) ? properties.getProperty(placeholderName) : null;
}
static String getProperty(String key) {
return getProperty(key, null, "");
}
private static String getProperty(String key, String defaultValue, String text) {
try {
String value = System.getProperty(key);
value = (value != null) ? value : System.getenv(key);
value = (value != null) ? value : System.getenv(key.replace('.', '_'));
value = (value != null) ? value : System.getenv(key.toUpperCase(Locale.ENGLISH).replace('.', '_'));
return (value != null) ? value : defaultValue;
}
catch (Throwable ex) {
System.err.println("Could not resolve key '" + key + "' in '" + text
+ "' as system property or in environment: " + ex);
return defaultValue;
}
}
private static int findPlaceholderEndIndex(CharSequence buf, int startIndex) {
int index = startIndex + PLACEHOLDER_PREFIX.length();
int withinNestedPlaceholder = 0;
while (index < buf.length()) {
if (substringMatch(buf, index, PLACEHOLDER_SUFFIX)) {
if (withinNestedPlaceholder > 0) {
withinNestedPlaceholder--;
index = index + PLACEHOLDER_SUFFIX.length();
}
else {
return index;
}
}
else if (substringMatch(buf, index, SIMPLE_PREFIX)) {
withinNestedPlaceholder++;
index = index + SIMPLE_PREFIX.length();
}
else {
index++;
}
}
return -1;
}
private static boolean substringMatch(CharSequence str, int index, CharSequence substring) {
for (int j = 0; j < substring.length(); j++) {
int i = index + j;
if (i >= str.length() || str.charAt(i) != substring.charAt(j)) {
return false;
}
}
return true;
}
}
|
SystemPropertyUtils
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ensemble/WeightedModeTests.java
|
{
"start": 750,
"end": 5186
}
|
class ____ extends WeightedAggregatorTests<WeightedMode> {
@Override
WeightedMode createTestInstance(int numberOfWeights) {
double[] weights = Stream.generate(ESTestCase::randomDouble).limit(numberOfWeights).mapToDouble(Double::valueOf).toArray();
return new WeightedMode(weights, randomIntBetween(2, 10));
}
@Override
protected WeightedMode doParseInstance(XContentParser parser) throws IOException {
return lenient ? WeightedMode.fromXContentLenient(parser) : WeightedMode.fromXContentStrict(parser);
}
@Override
protected WeightedMode createTestInstance() {
return randomBoolean() ? new WeightedMode(randomIntBetween(2, 10)) : createTestInstance(randomIntBetween(1, 100));
}
@Override
protected WeightedMode mutateInstance(WeightedMode instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Writeable.Reader<WeightedMode> instanceReader() {
return WeightedMode::new;
}
public void testAggregate() {
double[] ones = new double[] { 1.0, 1.0, 1.0, 1.0, 1.0 };
double[][] values = new double[][] {
new double[] { 1.0 },
new double[] { 2.0 },
new double[] { 2.0 },
new double[] { 3.0 },
new double[] { 5.0 } };
WeightedMode weightedMode = new WeightedMode(ones, 6);
assertThat(weightedMode.aggregate(weightedMode.processValues(values)), equalTo(2.0));
double[] variedWeights = new double[] { 1.0, -1.0, .5, 1.0, 5.0 };
weightedMode = new WeightedMode(variedWeights, 6);
assertThat(weightedMode.aggregate(weightedMode.processValues(values)), equalTo(5.0));
weightedMode = new WeightedMode(6);
assertThat(weightedMode.aggregate(weightedMode.processValues(values)), equalTo(2.0));
values = new double[][] {
new double[] { 1.0 },
new double[] { 1.0 },
new double[] { 1.0 },
new double[] { 1.0 },
new double[] { 2.0 } };
weightedMode = new WeightedMode(6);
double[] processedValues = weightedMode.processValues(values);
assertThat(processedValues.length, equalTo(6));
assertThat(processedValues[0], equalTo(0.0));
assertThat(processedValues[1], closeTo(0.95257412, 0.00001));
assertThat(processedValues[2], closeTo((1.0 - 0.95257412), 0.00001));
assertThat(processedValues[3], equalTo(0.0));
assertThat(processedValues[4], equalTo(0.0));
assertThat(processedValues[5], equalTo(0.0));
assertThat(weightedMode.aggregate(processedValues), equalTo(1.0));
}
public void testAggregateMultiValueArrays() {
double[] ones = new double[] { 1.0, 1.0, 1.0, 1.0, 1.0 };
double[][] values = new double[][] {
new double[] { 1.0, 0.0, 1.0 },
new double[] { 2.0, 0.0, 0.0 },
new double[] { 2.0, 3.0, 1.0 },
new double[] { 3.0, 3.0, 1.0 },
new double[] { 1.0, 1.0, 5.0 } };
WeightedMode weightedMode = new WeightedMode(ones, 3);
double[] processedValues = weightedMode.processValues(values);
assertThat(processedValues.length, equalTo(3));
assertThat(processedValues[0], closeTo(0.665240955, 0.00001));
assertThat(processedValues[1], closeTo(0.090030573, 0.00001));
assertThat(processedValues[2], closeTo(0.244728471, 0.00001));
assertThat(weightedMode.aggregate(weightedMode.processValues(values)), equalTo(0.0));
double[] variedWeights = new double[] { 1.0, -1.0, .5, 1.0, 5.0 };
weightedMode = new WeightedMode(variedWeights, 3);
processedValues = weightedMode.processValues(values);
assertThat(processedValues.length, equalTo(3));
assertThat(processedValues[0], closeTo(0.0, 0.00001));
assertThat(processedValues[1], closeTo(0.0, 0.00001));
assertThat(processedValues[2], closeTo(0.9999999, 0.00001));
assertThat(weightedMode.aggregate(weightedMode.processValues(values)), equalTo(2.0));
}
public void testCompatibleWith() {
WeightedMode weightedMode = createTestInstance();
assertThat(weightedMode.compatibleWith(TargetType.CLASSIFICATION), is(true));
assertThat(weightedMode.compatibleWith(TargetType.REGRESSION), is(false));
}
}
|
WeightedModeTests
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/CheckedExceptionNotThrownTest.java
|
{
"start": 1542,
"end": 1843
}
|
class ____ {
/**
* Frobnicate
*
* @throws Exception foo
*/
void test() throws Exception {}
}
""")
.addOutputLines(
"Test.java",
"""
public final
|
Test
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/ModelAndViewMethodReturnValueHandler.java
|
{
"start": 1932,
"end": 4769
}
|
class ____ implements HandlerMethodReturnValueHandler {
private String @Nullable [] redirectPatterns;
/**
* Configure one more simple patterns (as described in {@link PatternMatchUtils#simpleMatch})
* to use in order to recognize custom redirect prefixes in addition to "redirect:".
* <p>Note that simply configuring this property will not make a custom redirect prefix work.
* There must be a custom {@link View} that recognizes the prefix as well.
* @since 4.1
*/
public void setRedirectPatterns(String @Nullable ... redirectPatterns) {
this.redirectPatterns = redirectPatterns;
}
/**
* Return the configured redirect patterns, if any.
* @since 4.1
*/
public String @Nullable [] getRedirectPatterns() {
return this.redirectPatterns;
}
@Override
public boolean supportsReturnType(MethodParameter returnType) {
Class<?> type = returnType.getParameterType();
if (Collection.class.isAssignableFrom(type)) {
type = returnType.nested().getNestedParameterType();
}
return (ModelAndView.class.isAssignableFrom(type) || FragmentsRendering.class.isAssignableFrom(type));
}
@SuppressWarnings("unchecked")
@Override
public void handleReturnValue(@Nullable Object returnValue, MethodParameter returnType,
ModelAndViewContainer mavContainer, NativeWebRequest webRequest) throws Exception {
if (returnValue == null) {
mavContainer.setRequestHandled(true);
return;
}
if (returnValue instanceof Collection<?> mavs) {
returnValue = FragmentsRendering.fragments((Collection<ModelAndView>) mavs).build();
}
if (returnValue instanceof FragmentsRendering rendering) {
mavContainer.setView(rendering);
return;
}
ModelAndView mav = (ModelAndView) returnValue;
if (mav.isReference()) {
String viewName = mav.getViewName();
mavContainer.setViewName(viewName);
if (viewName != null && isRedirectViewName(viewName)) {
mavContainer.setRedirectModelScenario(true);
}
}
else {
View view = mav.getView();
mavContainer.setView(view);
if (view instanceof SmartView smartView && smartView.isRedirectView()) {
mavContainer.setRedirectModelScenario(true);
}
}
mavContainer.setStatus(mav.getStatus());
mavContainer.addAllAttributes(mav.getModel());
}
/**
* Whether the given view name is a redirect view reference.
* The default implementation checks the configured redirect patterns and
* also if the view name starts with the "redirect:" prefix.
* @param viewName the view name to check, never {@code null}
* @return "true" if the given view name is recognized as a redirect view
* reference; "false" otherwise.
*/
protected boolean isRedirectViewName(String viewName) {
return (PatternMatchUtils.simpleMatch(this.redirectPatterns, viewName) || viewName.startsWith("redirect:"));
}
}
|
ModelAndViewMethodReturnValueHandler
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/JcrEndpointBuilderFactory.java
|
{
"start": 27465,
"end": 36135
}
|
interface ____
extends
JcrEndpointConsumerBuilder,
JcrEndpointProducerBuilder {
default AdvancedJcrEndpointBuilder advanced() {
return (AdvancedJcrEndpointBuilder) this;
}
/**
* When isDeep is true, events whose associated parent node is at
* absPath or within its subgraph are received.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param deep the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder deep(boolean deep) {
doSetProperty("deep", deep);
return this;
}
/**
* When isDeep is true, events whose associated parent node is at
* absPath or within its subgraph are received.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param deep the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder deep(String deep) {
doSetProperty("deep", deep);
return this;
}
/**
* eventTypes (a combination of one or more event types encoded as a bit
* mask value such as javax.jcr.observation.Event.NODE_ADDED,
* javax.jcr.observation.Event.NODE_REMOVED, etc.).
*
* The option is a: <code>int</code> type.
*
* Group: common
*
* @param eventTypes the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder eventTypes(int eventTypes) {
doSetProperty("eventTypes", eventTypes);
return this;
}
/**
* eventTypes (a combination of one or more event types encoded as a bit
* mask value such as javax.jcr.observation.Event.NODE_ADDED,
* javax.jcr.observation.Event.NODE_REMOVED, etc.).
*
* The option will be converted to a <code>int</code> type.
*
* Group: common
*
* @param eventTypes the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder eventTypes(String eventTypes) {
doSetProperty("eventTypes", eventTypes);
return this;
}
/**
* When a comma separated nodeTypeName list string is set, only events
* whose associated parent node has one of the node types (or a subtype
* of one of the node types) in this list will be received.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param nodeTypeNames the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder nodeTypeNames(String nodeTypeNames) {
doSetProperty("nodeTypeNames", nodeTypeNames);
return this;
}
/**
* If noLocal is true, then events generated by the session through
* which the listener was registered are ignored. Otherwise, they are
* not ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param noLocal the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder noLocal(boolean noLocal) {
doSetProperty("noLocal", noLocal);
return this;
}
/**
* If noLocal is true, then events generated by the session through
* which the listener was registered are ignored. Otherwise, they are
* not ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param noLocal the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder noLocal(String noLocal) {
doSetProperty("noLocal", noLocal);
return this;
}
/**
* Password for login.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param password the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* Interval in milliseconds to wait before each session live checking
* The default value is 60000 ms.
*
* The option is a: <code>long</code> type.
*
* Default: 60000
* Group: common
*
* @param sessionLiveCheckInterval the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder sessionLiveCheckInterval(long sessionLiveCheckInterval) {
doSetProperty("sessionLiveCheckInterval", sessionLiveCheckInterval);
return this;
}
/**
* Interval in milliseconds to wait before each session live checking
* The default value is 60000 ms.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 60000
* Group: common
*
* @param sessionLiveCheckInterval the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder sessionLiveCheckInterval(String sessionLiveCheckInterval) {
doSetProperty("sessionLiveCheckInterval", sessionLiveCheckInterval);
return this;
}
/**
* Interval in milliseconds to wait before the first session live
* checking. The default value is 3000 ms.
*
* The option is a: <code>long</code> type.
*
* Default: 3000
* Group: common
*
* @param sessionLiveCheckIntervalOnStart the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder sessionLiveCheckIntervalOnStart(long sessionLiveCheckIntervalOnStart) {
doSetProperty("sessionLiveCheckIntervalOnStart", sessionLiveCheckIntervalOnStart);
return this;
}
/**
* Interval in milliseconds to wait before the first session live
* checking. The default value is 3000 ms.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 3000
* Group: common
*
* @param sessionLiveCheckIntervalOnStart the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder sessionLiveCheckIntervalOnStart(String sessionLiveCheckIntervalOnStart) {
doSetProperty("sessionLiveCheckIntervalOnStart", sessionLiveCheckIntervalOnStart);
return this;
}
/**
* Username for login.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param username the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder username(String username) {
doSetProperty("username", username);
return this;
}
/**
* When a comma separated uuid list string is set, only events whose
* associated parent node has one of the identifiers in the comma
* separated uuid list will be received.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param uuids the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder uuids(String uuids) {
doSetProperty("uuids", uuids);
return this;
}
/**
* The workspace to access. If it's not specified then the default one
* will be used.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param workspaceName the value to set
* @return the dsl builder
*/
default JcrEndpointBuilder workspaceName(String workspaceName) {
doSetProperty("workspaceName", workspaceName);
return this;
}
}
/**
* Advanced builder for endpoint for the JCR component.
*/
public
|
JcrEndpointBuilder
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java
|
{
"start": 40449,
"end": 41180
}
|
class ____ {
public static void main(String[] args) {
switch (args.length) {
case 0 -> System.out.println(0);
default -> {
// hello
// world
}
}
}
}
""")
.setArgs("-XepOpt:StatementSwitchToExpressionSwitch:EnableDirectConversion=true")
.doTest(TEXT_MATCH);
}
/**********************************
*
* Return switch test cases
*
**********************************/
@Test
public void switchByEnum_returnSwitch_error() {
refactoringHelper
.addInputLines(
"Test.java",
"""
|
Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/components/EntitySuperclassComponentWithCollectionTest.java
|
{
"start": 6920,
"end": 7214
}
|
class ____ extends AbstractEntity {
@Embedded
private Information information;
public Information getInformation() {
return information;
}
public void setInformation(Information information) {
this.information = information;
}
}
@Entity(name = "Leader")
public static
|
Person
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jdbc/src/main/java/org/springframework/boot/jdbc/docker/compose/MariaDbJdbcDockerComposeConnectionDetailsFactory.java
|
{
"start": 1266,
"end": 1793
}
|
class ____
extends DockerComposeConnectionDetailsFactory<JdbcConnectionDetails> {
protected MariaDbJdbcDockerComposeConnectionDetailsFactory() {
super("mariadb");
}
@Override
protected JdbcConnectionDetails getDockerComposeConnectionDetails(DockerComposeConnectionSource source) {
return new MariaDbJdbcDockerComposeConnectionDetails(source.getRunningService());
}
/**
* {@link JdbcConnectionDetails} backed by a {@code mariadb} {@link RunningService}.
*/
static
|
MariaDbJdbcDockerComposeConnectionDetailsFactory
|
java
|
square__javapoet
|
src/test/java/com/squareup/javapoet/TypeSpecTest.java
|
{
"start": 16831,
"end": 17452
}
|
enum ____ {\n"
+ " CORN {\n"
+ " @Override\n"
+ " public void fold() {\n"
+ " }\n"
+ " };\n"
+ "\n"
+ " public abstract void fold();\n"
+ "}\n");
}
@Test public void noEnumConstants() throws Exception {
TypeSpec roshambo = TypeSpec.enumBuilder("Roshambo")
.addField(String.class, "NO_ENUM", Modifier.STATIC)
.build();
assertThat(toString(roshambo)).isEqualTo(""
+ "package com.squareup.tacos;\n"
+ "\n"
+ "import java.lang.String;\n"
+ "\n"
+ "
|
Tortilla
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java
|
{
"start": 68960,
"end": 69749
}
|
class ____ {
private final Object actual = new AtomicMarkableReference<>(0, false);
@Test
void createAssert() {
// WHEN
AtomicMarkableReferenceAssert<Integer> result = atomicMarkableReference(Integer.class).createAssert(actual);
// THEN
result.hasReference(0);
}
@Test
void createAssert_with_ValueProvider() {
// GIVEN
ValueProvider<?> valueProvider = mockThatDelegatesTo(type -> actual);
// WHEN
AtomicMarkableReferenceAssert<Integer> result = atomicMarkableReference(Integer.class).createAssert(valueProvider);
// THEN
result.hasReference(0);
verify(valueProvider).apply(parameterizedType(AtomicMarkableReference.class, Integer.class));
}
}
@Nested
|
AtomicMarkableReference_Typed_Factory
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/intercept/InterceptFromRouteTestSupport.java
|
{
"start": 1040,
"end": 1931
}
|
class ____ extends ContextTestSupport {
protected MockEndpoint a;
protected MockEndpoint b;
@Test
public void testSendMatchingMessage() throws Exception {
prepareMatchingTest();
template.sendBodyAndHeader("direct:start", "<matched/>", "foo", "bar");
assertMockEndpointsSatisfied();
}
@Test
public void testSendNonMatchingMessage() throws Exception {
prepareNonMatchingTest();
template.sendBodyAndHeader("direct:start", "<notMatched/>", "foo", "notMatchedHeaderValue");
assertMockEndpointsSatisfied();
}
@BeforeEach
public void setUpMocks() throws Exception {
super.setUp();
a = getMockEndpoint("mock:a");
b = getMockEndpoint("mock:b");
}
protected abstract void prepareMatchingTest();
protected abstract void prepareNonMatchingTest();
}
|
InterceptFromRouteTestSupport
|
java
|
quarkusio__quarkus
|
extensions/funqy/funqy-server-common/runtime/src/main/java/io/quarkus/funqy/runtime/query/QueryReader.java
|
{
"start": 98,
"end": 188
}
|
interface ____<T> {
T readValue(Iterator<Map.Entry<String, String>> params);
}
|
QueryReader
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/main/java/org/springframework/boot/test/system/OutputCaptureRule.java
|
{
"start": 1577,
"end": 2923
}
|
class ____ implements TestRule, CapturedOutput {
private final OutputCapture delegate = new OutputCapture();
private final List<Matcher<? super String>> matchers = new ArrayList<>();
@Override
public Statement apply(Statement base, Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
OutputCaptureRule.this.delegate.push();
try {
base.evaluate();
}
finally {
try {
if (!OutputCaptureRule.this.matchers.isEmpty()) {
String output = OutputCaptureRule.this.delegate.toString();
MatcherAssert.assertThat(output, allOf(OutputCaptureRule.this.matchers));
}
}
finally {
OutputCaptureRule.this.delegate.pop();
}
}
}
};
}
@Override
public String getAll() {
return this.delegate.getAll();
}
@Override
public String getOut() {
return this.delegate.getOut();
}
@Override
public String getErr() {
return this.delegate.getErr();
}
@Override
public String toString() {
return this.delegate.toString();
}
/**
* Verify that the output is matched by the supplied {@code matcher}. Verification is
* performed after the test method has executed.
* @param matcher the matcher
*/
public void expect(Matcher<? super String> matcher) {
this.matchers.add(matcher);
}
}
|
OutputCaptureRule
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/AbstractDependsOnBeanFactoryPostProcessorTests.java
|
{
"start": 4678,
"end": 4904
}
|
class ____ extends AbstractDependsOnBeanFactoryPostProcessor {
protected FooDependsOnBarNamePostProcessor() {
super(Foo.class, FooFactoryBean.class, "bar", "barFactoryBean");
}
}
static
|
FooDependsOnBarNamePostProcessor
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/cluster/api/sync/Executions.java
|
{
"start": 385,
"end": 1487
}
|
interface ____<T> extends Iterable<T> {
/**
*
* @return map between {@link RedisClusterNode} and the {@link CompletionStage}
*/
Map<RedisClusterNode, T> asMap();
/**
*
* @return collection of nodes on which the command was executed.
*/
Collection<RedisClusterNode> nodes();
/**
*
* @param redisClusterNode the node
* @return the completion stage for this node
*/
T get(RedisClusterNode redisClusterNode);
/**
*
* @return iterator over the {@link CompletionStage}s
*/
@Override
default Iterator<T> iterator() {
return asMap().values().iterator();
}
/**
*
* @return a {@code Spliterator} over the elements in this collection
*/
@Override
default Spliterator<T> spliterator() {
return Spliterators.spliterator(iterator(), nodes().size(), 0);
}
/**
* @return a sequential {@code Stream} over the elements in this collection
*/
default Stream<T> stream() {
return StreamSupport.stream(spliterator(), false);
}
}
|
Executions
|
java
|
quarkusio__quarkus
|
extensions/liquibase/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigFixture.java
|
{
"start": 765,
"end": 8057
}
|
class ____ {
@Inject
Config config;
public void assertAllConfigurationSettings(LiquibaseConfig configuration, String dataSourceName) {
assertEquals(configuration.migrateAtStart, migrateAtStart(dataSourceName));
assertEquals(configuration.cleanAtStart, cleanAtStart(dataSourceName));
assertEquals(configuration.validateOnMigrate, validateOnMigrate(dataSourceName));
assertEquals(configuration.changeLog, changeLog(dataSourceName));
assertEquals(configuration.defaultCatalogName.orElse(null), defaultCatalogName(dataSourceName));
assertEquals(configuration.defaultSchemaName.orElse(null), defaultSchemaName(dataSourceName));
assertEquals(configuration.liquibaseCatalogName.orElse(null), liquibaseCatalogName(dataSourceName));
assertEquals(configuration.liquibaseSchemaName.orElse(null), liquibaseSchemaName(dataSourceName));
assertEquals(configuration.liquibaseTablespaceName.orElse(null), liquibaseTablespaceName(dataSourceName));
assertEquals(configuration.databaseChangeLogTableName, databaseChangeLogTableName(dataSourceName));
assertEquals(configuration.databaseChangeLogLockTableName, databaseChangeLogLockTableName(dataSourceName));
assertEquals(labels(configuration), labels(dataSourceName));
assertEquals(contexts(configuration), contexts(dataSourceName));
assertEquals(changeLogParameters(configuration), changeLogParameters(dataSourceName));
}
public void assertDefaultConfigurationSettings(LiquibaseConfig configuration) {
assertEquals(configuration.changeLog, LiquibaseDataSourceBuildTimeConfig.DEFAULT_CHANGE_LOG);
assertEquals(configuration.databaseChangeLogTableName,
GlobalConfiguration.DATABASECHANGELOG_TABLE_NAME.getCurrentValue());
assertEquals(configuration.databaseChangeLogLockTableName,
GlobalConfiguration.DATABASECHANGELOGLOCK_TABLE_NAME.getCurrentValue());
assertEquals(configuration.liquibaseTablespaceName.orElse(null),
GlobalConfiguration.LIQUIBASE_TABLESPACE_NAME.getCurrentValue());
assertEquals(configuration.liquibaseCatalogName.orElse(null),
GlobalConfiguration.LIQUIBASE_CATALOG_NAME.getCurrentValue());
assertEquals(configuration.liquibaseSchemaName.orElse(null),
GlobalConfiguration.LIQUIBASE_SCHEMA_NAME.getCurrentValue());
}
public Map<String, String> changeLogParameters(LiquibaseConfig configuration) {
if (configuration.changeLogParameters == null) {
return Collections.emptyMap();
}
return configuration.changeLogParameters;
}
public Map<String, String> changeLogParameters(String datasourceName) {
String propertyName = fillin("quarkus.liquibase.%s.change-log-parameters", datasourceName);
Map<String, String> map = new HashMap<>();
StreamSupport.stream(config.getPropertyNames().spliterator(), false).filter(p -> p.startsWith(propertyName))
.forEach(p -> map.put(unquote(p.substring(propertyName.length() + 1)), config.getValue(p, String.class)));
return map;
}
private String unquote(String s) {
if (s.startsWith("\"") && s.endsWith("\"")) {
return s.substring(1, s.length() - 1);
} else {
return s;
}
}
public String contexts(LiquibaseConfig configuration) {
if (configuration.contexts == null) {
return null;
}
return String.join(",", configuration.contexts);
}
public String contexts(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.contexts", datasourceName);
}
public String labels(LiquibaseConfig configuration) {
if (configuration.labels == null) {
return null;
}
return String.join(",", configuration.labels);
}
public String labels(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.labels", datasourceName);
}
public String changeLog(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.change-log", datasourceName);
}
public String defaultCatalogName(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.default-catalog-name", datasourceName);
}
public String defaultSchemaName(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.default-schema-name", datasourceName);
}
public String username(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.username", datasourceName);
}
public String password(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.password", datasourceName);
}
public String liquibaseCatalogName(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.liquibase-catalog-name", datasourceName);
}
public String liquibaseSchemaName(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.liquibase-schema-name", datasourceName);
}
public String liquibaseTablespaceName(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.liquibase-tablespace-name", datasourceName);
}
public String databaseChangeLogTableName(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.database-change-log-table-name", datasourceName);
}
public String databaseChangeLogLockTableName(String datasourceName) {
return getStringValue("quarkus.liquibase.%s.database-change-log-lock-table-name", datasourceName);
}
public boolean migrateAtStart(String datasourceName) {
return getBooleanValue("quarkus.liquibase.%s.migrate-at-start", datasourceName);
}
public boolean cleanAtStart(String datasourceName) {
return getBooleanValue("quarkus.liquibase.%s.clean-at-start", datasourceName);
}
public boolean validateOnMigrate(String datasourceName) {
return getBooleanValue("quarkus.liquibase.%s.validate-on-migrate", datasourceName);
}
private String getStringValue(String parameterName, String datasourceName) {
return getValue(parameterName, datasourceName, String.class);
}
private boolean getBooleanValue(String parameterName, String datasourceName) {
return getValue(parameterName, datasourceName, Boolean.class);
}
private <T> T getValue(String parameterName, String datasourceName, Class<T> type) {
return getValue(parameterName, datasourceName, type, this::log);
}
private <T> T getValue(String parameterName, String datasourceName, Class<T> type, Consumer<String> logger) {
String propertyName = fillin(parameterName, datasourceName);
T propertyValue = config.getValue(propertyName, type);
logger.accept("Config property " + propertyName + " = " + propertyValue);
return propertyValue;
}
private void log(String content) {
//activate for debugging
// System.out.println(content);
}
private String fillin(String propertyName, String datasourceName) {
return String.format(propertyName, datasourceName).replace("..", ".");
}
}
|
LiquibaseExtensionConfigFixture
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/array/B.java
|
{
"start": 177,
"end": 306
}
|
class ____ {
private Integer id;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
}
|
B
|
java
|
apache__flink
|
flink-queryable-state/flink-queryable-state-runtime/src/main/java/org/apache/flink/queryablestate/messages/KvStateInternalRequest.java
|
{
"start": 2720,
"end": 3522
}
|
class ____
implements MessageDeserializer<KvStateInternalRequest> {
@Override
public KvStateInternalRequest deserializeMessage(ByteBuf buf) {
KvStateID kvStateId = new KvStateID(buf.readLong(), buf.readLong());
int length = buf.readInt();
Preconditions.checkArgument(
length >= 0,
"Negative length for key and namespace. "
+ "This indicates a serialization error.");
byte[] serializedKeyAndNamespace = new byte[length];
if (length > 0) {
buf.readBytes(serializedKeyAndNamespace);
}
return new KvStateInternalRequest(kvStateId, serializedKeyAndNamespace);
}
}
}
|
KvStateInternalRequestDeserializer
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java
|
{
"start": 87461,
"end": 89852
}
|
class ____ applied to entity classes,
// so that ByteBuddy can take these transformations into account.
// This is especially important when getters/setters are added to entity classes,
// because we want those methods to be overridden in proxies to trigger proxy initialization.
private TypePool createTransformedClassesTypePool(TransformedClassesBuildItem transformedClassesBuildItem,
Set<String> entityClasses) {
Map<String, byte[]> transformedClasses = new HashMap<>();
for (Set<TransformedClassesBuildItem.TransformedClass> transformedClassSet : transformedClassesBuildItem
.getTransformedClassesByJar().values()) {
for (TransformedClassesBuildItem.TransformedClass transformedClass : transformedClassSet) {
String className = transformedClass.getClassName();
if (entityClasses.contains(className)) {
transformedClasses.put(className, transformedClass.getData());
}
}
}
ClassFileLocator classFileLocator = new ClassFileLocator.Compound(
new ClassFileLocator.Simple(transformedClasses),
QuarkusClassFileLocator.INSTANCE);
// we can reuse the core TypePool but we may not reuse the full enhancer TypePool
// or PublicFieldWithProxyAndLazyLoadingAndInheritanceTest will fail
return new TypePool.Default(new CacheProvider.Simple(), classFileLocator, ReaderMode.FAST,
HibernateEntityEnhancer.CORE_TYPE_POOL);
}
private boolean isModified(String entity, Set<String> changedClasses, IndexView index) {
if (changedClasses.contains(entity)) {
return true;
}
ClassInfo clazz = index.getClassByName(DotName.createSimple(entity));
if (clazz == null) {
//if it is not in the index, then it has not been modified
return false;
}
for (DotName i : clazz.interfaceNames()) {
if (isModified(i.toString(), changedClasses, index)) {
return true;
}
}
DotName superName = clazz.superName();
if (superName != null && !DotName.OBJECT_NAME.equals(superName)) {
return isModified(superName.toString(), changedClasses, index);
}
return false;
}
private static final
|
transformations
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableEnumCheckerTest.java
|
{
"start": 4357,
"end": 4575
}
|
class ____ {}
""")
.addSourceLines(
"Test.java",
"""
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
|
Foo
|
java
|
google__guice
|
core/test/com/google/inject/internal/SpiUtils.java
|
{
"start": 54019,
"end": 54150
}
|
enum ____ {
INSTANCE,
LINKED,
PROVIDER_INSTANCE,
PROVIDER_KEY
}
/** The result of the binding. */
static
|
BindType
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/bugs/injection/ChildWithSameParentFieldInjectionTest.java
|
{
"start": 423,
"end": 800
}
|
class ____ {
@InjectMocks private System system;
@Mock private SomeService someService;
@Test
public void parent_field_is_not_null() {
assertNotNull(((AbstractSystem) system).someService);
}
@Test
public void child_field_is_not_null() {
assertNotNull(system.someService);
}
public static
|
ChildWithSameParentFieldInjectionTest
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/ExtensionLoaderConfig.java
|
{
"start": 234,
"end": 451
}
|
interface ____ {
/**
* Report runtime Config objects used during deployment time.
*/
@WithDefault("warn")
ReportRuntimeConfigAtDeployment reportRuntimeConfigAtDeployment();
|
ExtensionLoaderConfig
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/cache/LruContextCacheTests.java
|
{
"start": 8335,
"end": 14892
}
|
class ____ {
/**
* Mimics a database shared across application contexts.
*/
private static final Set<String> database = new HashSet<>();
private static final List<String> events = new ArrayList<>();
@BeforeEach
@AfterEach
void resetTracking() {
resetEvents();
DatabaseInitializer.counter.set(0);
database.clear();
}
@Test
void maxCacheSizeOne() {
DefaultContextCache contextCache = new DefaultContextCache(1);
// -----------------------------------------------------------------
// Get ApplicationContext for TestCase1.
Class<?> testClass1 = TestCase1.class;
TestContext testContext1 = TestContextTestUtils.buildTestContext(testClass1, contextCache);
testContext1.getApplicationContext();
assertContextCacheStatistics(contextCache, testClass1.getSimpleName(), 1, 1, 0, 1);
assertCacheContents(contextCache, "Config1");
assertThat(database).containsExactly("enigma1");
assertThat(events).containsExactly("START 1");
resetEvents();
// -----------------------------------------------------------------
// Get ApplicationContext for TestCase2.
Class<?> testClass2 = TestCase2.class;
TestContext testContext2 = TestContextTestUtils.buildTestContext(testClass2, contextCache);
testContext2.getApplicationContext();
assertContextCacheStatistics(contextCache, testClass2.getSimpleName(), 1, 1, 0, 2);
assertCacheContents(contextCache, "Config2");
assertThat(database).containsExactly("enigma2");
assertThat(events).containsExactly("CLOSE 1", "START 2");
resetEvents();
// -----------------------------------------------------------------
// Get ApplicationContext for TestCase3.
Class<?> testClass3 = TestCase3.class;
TestContext testContext3 = TestContextTestUtils.buildTestContext(testClass3, contextCache);
testContext3.getApplicationContext();
assertContextCacheStatistics(contextCache, testClass3.getSimpleName(), 1, 1, 0, 3);
assertCacheContents(contextCache, "Config3");
assertThat(database).containsExactly("enigma3");
assertThat(events).containsExactly("CLOSE 2", "START 3");
resetEvents();
// -----------------------------------------------------------------
// Get ApplicationContext for TestCase1 again.
testContext1.getApplicationContext();
assertContextCacheStatistics(contextCache, testClass1.getSimpleName(), 1, 1, 0, 4);
assertCacheContents(contextCache, "Config1");
assertThat(database).containsExactly("enigma4");
assertThat(events).containsExactly("CLOSE 3", "START 4");
resetEvents();
// -----------------------------------------------------------------
testContext1.markApplicationContextDirty(HierarchyMode.EXHAUSTIVE);
assertThat(events).containsExactly("CLOSE 4");
assertThat(database).isEmpty();
assertThat(contextCache.size()).isZero();
}
@Test
void maxCacheSizeTwo() {
DefaultContextCache contextCache = new DefaultContextCache(2);
// -----------------------------------------------------------------
// Get ApplicationContext for TestCase1.
Class<?> testClass1 = TestCase1.class;
TestContext testContext1 = TestContextTestUtils.buildTestContext(testClass1, contextCache);
testContext1.getApplicationContext();
assertContextCacheStatistics(contextCache, testClass1.getSimpleName(), 1, 1, 0, 1);
testContext1.markApplicationContextUnused();
assertContextCacheStatistics(contextCache, testClass1.getSimpleName(), 1, 0, 0, 1);
assertCacheContents(contextCache, "Config1");
assertThat(events).containsExactly("START 1");
assertThat(database).containsExactly("enigma1");
resetEvents();
// -----------------------------------------------------------------
// Get ApplicationContext for TestCase2.
Class<?> testClass2 = TestCase2.class;
TestContext testContext2 = TestContextTestUtils.buildTestContext(testClass2, contextCache);
testContext2.getApplicationContext();
assertContextCacheStatistics(contextCache, testClass2.getSimpleName(), 2, 1, 0, 2);
testContext2.markApplicationContextUnused();
assertContextCacheStatistics(contextCache, testClass2.getSimpleName(), 2, 0, 0, 2);
assertCacheContents(contextCache, "Config1", "Config2");
assertThat(events).containsExactly("START 2");
assertThat(database).containsExactly("enigma1", "enigma2");
resetEvents();
// -----------------------------------------------------------------
// Get ApplicationContext for TestCase3.
Class<?> testClass3 = TestCase3.class;
TestContext testContext3 = TestContextTestUtils.buildTestContext(testClass3, contextCache);
testContext3.getApplicationContext();
assertContextCacheStatistics(contextCache, testClass3.getSimpleName(), 2, 1, 0, 3);
testContext3.markApplicationContextUnused();
assertContextCacheStatistics(contextCache, testClass3.getSimpleName(), 2, 0, 0, 3);
assertCacheContents(contextCache, "Config2", "Config3");
assertThat(events).containsExactly("CLOSE 1", "START 3");
// Closing App #1 removed "enigma1" and "enigma2" from the database.
assertThat(database).containsExactly("enigma3");
resetEvents();
// -----------------------------------------------------------------
// Get ApplicationContext for TestCase1 again.
testContext1.getApplicationContext();
assertContextCacheStatistics(contextCache, testClass1.getSimpleName(), 2, 1, 0, 4);
testContext1.markApplicationContextUnused();
assertContextCacheStatistics(contextCache, testClass1.getSimpleName(), 2, 0, 0, 4);
assertCacheContents(contextCache, "Config3", "Config1");
assertThat(events).containsExactly("CLOSE 2", "START 4");
// Closing App #2 removed "enigma3" from the database.
assertThat(database).containsExactly("enigma4");
resetEvents();
// -----------------------------------------------------------------
testContext3.markApplicationContextDirty(HierarchyMode.EXHAUSTIVE);
assertThat(events).containsExactly("CLOSE 3");
resetEvents();
testContext1.markApplicationContextDirty(HierarchyMode.EXHAUSTIVE);
assertThat(events).containsExactly("CLOSE 4");
assertThat(database).isEmpty();
assertThat(contextCache.size()).isZero();
}
private static void resetEvents() {
events.clear();
}
/**
* Mimics a Spring component that inserts data into the database when the
* application context is started and drops data from a database when the
* application context is closed.
*
* @see org.springframework.jdbc.datasource.init.DataSourceInitializer
*/
static
|
PutWithLoadFunctionIntegrationTests
|
java
|
apache__maven
|
its/core-it-suite/src/test/resources/mng-5578-session-scope/plugin/src/main/java/org/apache/maven/its/mng5530/sessionscope/TestSessionScopeMojo.java
|
{
"start": 1970,
"end": 2302
}
|
class ____ extends AbstractMojo {
@Component
private TestSessionScopedComponent sessionScopedComponent;
public void execute() throws MojoExecutionException, MojoFailureException {
if (sessionScopedComponent.getSession() == null) {
throw new NullPointerException();
}
}
}
|
TestSessionScopeMojo
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/security/permission/HttpSecPolicyGrantingRolesTest.java
|
{
"start": 6027,
"end": 8286
}
|
class ____ implements Handler<RoutingContext> {
private final Supplier<Uni<Void>> callService;
private RouteHandler(Supplier<Uni<Void>> callService) {
this.callService = callService;
}
@Override
public void handle(RoutingContext event) {
// activate context so that we can use CDI beans
Arc.container().requestContext().activate();
// set identity used by security checks performed by standard security interceptors
QuarkusHttpUser user = (QuarkusHttpUser) event.user();
Arc.container().instance(SecurityIdentityAssociation.class).get().setIdentity(user.getSecurityIdentity());
callService.get().subscribe().with(unused -> {
String ret = user.getSecurityIdentity().getPrincipal().getName() +
":" + event.normalizedPath();
event.response().end(ret);
}, throwable -> {
if (throwable instanceof UnauthorizedException) {
event.response().setStatusCode(401);
} else if (throwable instanceof ForbiddenException) {
event.response().setStatusCode(403);
} else {
event.response().setStatusCode(500);
}
event.end();
});
}
}
private void assertSuccess(AuthenticatedUser user, String... paths) {
user.authenticate();
for (var path : paths) {
RestAssured
.given()
.auth()
.basic(user.role(), user.role())
.get(path)
.then()
.statusCode(200)
.body(Matchers.is(user.role() + ":" + path));
}
}
private void assertForbidden(AuthenticatedUser user, String... paths) {
user.authenticate();
for (var path : paths) {
RestAssured
.given()
.auth()
.basic(user.role(), user.role())
.get(path)
.then()
.statusCode(403);
}
}
@ApplicationScoped
public static
|
RouteHandler
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/OAuth2AuthorizationCodeGrantTests.java
|
{
"start": 66306,
"end": 67163
}
|
class ____ extends AuthorizationServerConfiguration {
@Bean
JwtEncoder jwtEncoder() {
return jwtEncoder;
}
@Bean
OAuth2TokenGenerator<?> tokenGenerator() {
JwtGenerator jwtGenerator = new JwtGenerator(jwtEncoder());
jwtGenerator.setJwtCustomizer(jwtCustomizer());
OAuth2RefreshTokenGenerator refreshTokenGenerator = new OAuth2RefreshTokenGenerator();
OAuth2TokenGenerator<OAuth2Token> delegatingTokenGenerator = new DelegatingOAuth2TokenGenerator(
jwtGenerator, refreshTokenGenerator);
return spy(new OAuth2TokenGenerator<OAuth2Token>() {
@Override
public OAuth2Token generate(OAuth2TokenContext context) {
return delegatingTokenGenerator.generate(context);
}
});
}
}
@EnableWebSecurity
@Import(OAuth2AuthorizationServerConfiguration.class)
static
|
AuthorizationServerConfigurationWithTokenGenerator
|
java
|
elastic__elasticsearch
|
x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserTests.java
|
{
"start": 585,
"end": 3767
}
|
class ____ extends AbstractKqlParserTestCase {
public void testEmptyQueryParsing() {
{
// In Kql, an empty query is a match_all query.
assertThat(parseKqlQuery(""), isA(MatchAllQueryBuilder.class));
}
for (int runs = 0; runs < 100; runs++) {
// Also testing that a query that is composed only of whitespace chars returns a match_all query.
String kqlQuery = randomWhitespaces();
assertThat(parseKqlQuery(kqlQuery), isA(MatchAllQueryBuilder.class));
}
}
public void testMatchAllQuery() {
assertThat(parseKqlQuery("*"), isA(MatchAllQueryBuilder.class));
assertThat(parseKqlQuery(wrapWithRandomWhitespaces("*")), isA(MatchAllQueryBuilder.class));
assertThat(parseKqlQuery("*:*"), isA(MatchAllQueryBuilder.class));
assertThat(parseKqlQuery(String.join(wrapWithRandomWhitespaces(":"), "*", "*")), isA(MatchAllQueryBuilder.class));
}
public void testParenthesizedQuery() throws IOException {
for (String baseQuery : readQueries(SUPPORTED_QUERY_FILE_PATH)) {
// For each supported query, wrap it into parentheses and check query remains the same.
// Adding random whitespaces as well and test they are ignored.
String parenthesizedQuery = "(" + baseQuery + ")";
assertThat(parseKqlQuery(parenthesizedQuery), equalTo(parseKqlQuery(baseQuery)));
}
}
public void testSupportedQueries() throws IOException {
for (String query : readQueries(SUPPORTED_QUERY_FILE_PATH)) {
try {
QueryBuilder parsedQuery = parseKqlQuery(query);
// leading and trailing whitespaces does not change the query builing result:
assertThat(parseKqlQuery(wrapWithRandomWhitespaces(query)), equalTo(parsedQuery));
} catch (Throwable e) {
throw new AssertionError("Unexpected error during query parsing [ " + query + "]", e);
}
}
}
public void testUnsupportedQueries() throws IOException {
for (String query : readQueries(UNSUPPORTED_QUERY_FILE_PATH)) {
assertThrows(
"Was expecting a KqlParsingException exception to be thrown while parsing query [" + query + "]",
KqlParsingException.class,
() -> parseKqlQuery(query)
);
}
}
public void testSyntaxErrorsHandling() {
{
KqlParsingException e = assertThrows(KqlParsingException.class, () -> parseKqlQuery("foo: \"bar"));
assertThat(e.getLineNumber(), equalTo(1));
assertThat(e.getColumnNumber(), equalTo(6));
assertThat(e.getMessage(), equalTo("line 1:6: token recognition error at: '\"bar'"));
}
{
KqlParsingException e = assertThrows(KqlParsingException.class, () -> parseKqlQuery("foo: (bar baz AND qux"));
assertThat(e.getLineNumber(), equalTo(1));
assertThat(e.getColumnNumber(), equalTo(22));
assertThat(e.getMessage(), containsString("line 1:22: missing ')' at '<EOF>'"));
}
}
}
|
KqlParserTests
|
java
|
apache__kafka
|
connect/api/src/main/java/org/apache/kafka/connect/connector/policy/ConnectorClientConfigRequest.java
|
{
"start": 990,
"end": 4213
}
|
class ____ {
private final Map<String, Object> clientProps;
private final ClientType clientType;
private final String connectorName;
private final ConnectorType connectorType;
private final Class<? extends Connector> connectorClass;
public ConnectorClientConfigRequest(
String connectorName,
ConnectorType connectorType,
Class<? extends Connector> connectorClass,
Map<String, Object> clientProps,
ClientType clientType) {
this.clientProps = clientProps;
this.clientType = clientType;
this.connectorName = connectorName;
this.connectorType = connectorType;
this.connectorClass = connectorClass;
}
/**
* Provides configs with prefix "{@code producer.override.}" for {@link ConnectorType#SOURCE source connectors} and
* also {@link ConnectorType#SINK sink connectors} that are configured with a DLQ topic.
* <p>
* Provides configs with prefix "{@code consumer.override.}" for {@link ConnectorType#SINK sink connectors} and also
* {@link ConnectorType#SOURCE source connectors} that are configured with a connector specific offsets topic (see
* <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-618%3A+Exactly-Once+Support+for+Source+Connectors">KIP-618</a>
* for more details).
* <p>
* Provides configs with prefix "{@code admin.override.}" for {@link ConnectorType#SINK sink connectors} configured
* with a DLQ topic and {@link ConnectorType#SOURCE source connectors} that are configured with exactly-once semantics,
* a connector specific offsets topic or topic creation enabled (see
* <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-158%3A+Kafka+Connect+should+allow+source+connectors+to+set+topic-specific+settings+for+new+topics">KIP-158</a>
* for more details).
*
* @return The client override properties specified in the Connector Config with prefix "{@code producer.override.}",
* "{@code consumer.override.}" or "{@code admin.override.}". The returned configs don't include the prefixes.
*/
public Map<String, Object> clientProps() {
return clientProps;
}
/**
* <p>{@link ClientType#PRODUCER} for {@link ConnectorType#SOURCE}
* <p>{@link ClientType#CONSUMER} for {@link ConnectorType#SINK}
* <p>{@link ClientType#PRODUCER} for DLQ in {@link ConnectorType#SINK}
* <p>{@link ClientType#ADMIN} for DLQ Topic Creation in {@link ConnectorType#SINK}
*
* @return enumeration specifying the client type that is being overridden by the worker; never null.
*/
public ClientType clientType() {
return clientType;
}
/**
* Name of the connector specified in the connector config.
*
* @return name of the connector; never null.
*/
public String connectorName() {
return connectorName;
}
/**
* Type of the Connector.
*
* @return enumeration specifying the type of the connector - {@link ConnectorType#SINK} or {@link ConnectorType#SOURCE}.
*/
public ConnectorType connectorType() {
return connectorType;
}
/**
* The
|
ConnectorClientConfigRequest
|
java
|
google__guice
|
core/src/com/google/inject/internal/util/LineNumbers.java
|
{
"start": 5093,
"end": 6668
}
|
class ____ extends ClassVisitor {
private int line = -1;
private String pendingMethod;
private String name;
LineNumberReader() {
super(ASM_API_LEVEL);
}
@Override
public void visit(
int version,
int access,
String name,
String signature,
String superName,
String[] interfaces) {
this.name = name;
}
@Override
public MethodVisitor visitMethod(
int access, String name, String desc, String signature, String[] exceptions) {
if ((access & Opcodes.ACC_PRIVATE) != 0) {
return null;
}
pendingMethod = name + desc;
line = -1;
return new LineNumberMethodVisitor();
}
@Override
public void visitSource(String source, String debug) {
LineNumbers.this.source = source;
}
public void visitLineNumber(int line, Label start) {
if (line < firstLine) {
firstLine = line;
}
this.line = line;
if (pendingMethod != null) {
lines.put(pendingMethod, line);
pendingMethod = null;
}
}
@Override
public FieldVisitor visitField(
int access, String name, String desc, String signature, Object value) {
return null;
}
@Override
public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
return new LineNumberAnnotationVisitor();
}
public AnnotationVisitor visitParameterAnnotation(int parameter, String desc, boolean visible) {
return new LineNumberAnnotationVisitor();
}
|
LineNumberReader
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/server/endpoint/MessageEndpoint.java
|
{
"start": 1349,
"end": 2127
}
|
class ____ {
//end::endpointClassBegin[]
//tag::message[]
String message;
//end::message[]
@PostConstruct
public void init() {
this.message = "default message";
}
@Read
public String message() {
return this.message;
}
//tag::writeArg[]
@Write(consumes = MediaType.APPLICATION_FORM_URLENCODED, produces = MediaType.TEXT_PLAIN)
public String updateMessage(String newMessage) {
this.message = newMessage;
return "Message updated";
}
//end::writeArg[]
//tag::simpleDelete[]
@Delete
public String deleteMessage() {
this.message = null;
return "Message deleted";
}
//end::simpleDelete[]
//tag::endpointClassEnd[]
}
//end::endpointClassEnd[]
|
MessageEndpoint
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/test/condition/RedisConditions.java
|
{
"start": 792,
"end": 3726
}
|
class ____ {
private final Map<String, Integer> commands;
private final Version version;
private RedisConditions(RedisClusterCommands<String, String> commands) {
List<CommandDetail> result = CommandDetailParser.parse(commands.command());
this.commands = result.stream()
.collect(Collectors.toMap(commandDetail -> commandDetail.getName().toUpperCase(), CommandDetail::getArity));
String info = commands.info("server");
try {
ByteArrayInputStream inputStream = new ByteArrayInputStream(info.getBytes());
Properties p = new Properties();
p.load(inputStream);
version = Version.parse(p.getProperty("redis_version"));
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
/**
* Create {@link RedisCommands} given {@link StatefulRedisConnection}.
*
* @param connection must not be {@code null}.
* @return
*/
public static RedisConditions of(StatefulRedisConnection<String, String> connection) {
return new RedisConditions(connection.sync());
}
/**
* Create {@link RedisCommands} given {@link StatefulRedisClusterConnection}.
*
* @param connection must not be {@code null}.
* @return
*/
public static RedisConditions of(StatefulRedisClusterConnection<String, String> connection) {
return new RedisConditions(connection.sync());
}
/**
* Create {@link RedisConditions} given {@link RedisCommands}.
*
* @param commands must not be {@code null}.
* @return
*/
public static RedisConditions of(RedisClusterCommands<String, String> commands) {
return new RedisConditions(commands);
}
/**
* @return the Redis {@link Version}.
*/
public Version getRedisVersion() {
return version;
}
/**
* @param command
* @return {@code true} if the command is present.
*/
public boolean hasCommand(String command) {
return commands.containsKey(command.toUpperCase());
}
/**
* @param command command name.
* @param arity expected arity.
* @return {@code true} if the command is present with the given arity.
*/
public boolean hasCommandArity(String command, int arity) {
if (!hasCommand(command)) {
throw new IllegalStateException("Unknown command: " + command + " in " + commands);
}
return commands.get(command.toUpperCase()) == arity;
}
/**
* @param versionNumber
* @return {@code true} if the version number is met.
*/
public boolean hasVersionGreaterOrEqualsTo(String versionNumber) {
return version.isGreaterThanOrEqualTo(Version.parse(versionNumber));
}
/**
* Value object to represent a Version consisting of major, minor and bugfix part.
*/
public static
|
RedisConditions
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/customproviders/AsyncResponseWithExceptionAndFiltersTargetTest.java
|
{
"start": 5676,
"end": 6036
}
|
class ____ extends RuntimeException {
private final boolean handle;
public DummyException(boolean handle) {
super("dummy");
this.handle = handle;
setStackTrace(new StackTraceElement[0]);
}
public boolean isHandle() {
return handle;
}
}
public static
|
DummyException
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java
|
{
"start": 4512,
"end": 5993
}
|
class ____ extends RuntimeException {
static final long serialVersionUID = 1L;
public OutOfOrderException(String text) {
super(text);
}
}
LoggedJob nextJob() throws IOException, OutOfOrderException {
LoggedJob newJob = rawNextJob();
if (newJob != null) {
skewBuffer.add(newJob);
}
LoggedJob result = skewBuffer.poll();
while (result != null && result.getSubmitTime() < returnedLatestSubmitTime) {
LOG.error("The current job was submitted earlier than the previous one");
LOG.error("Its jobID is " + result.getJobID());
LOG.error("Its submit time is " + result.getSubmitTime()
+ ",but the previous one was " + returnedLatestSubmitTime);
if (abortOnUnfixableSkew) {
throw new OutOfOrderException("Job submit time is "
+ result.getSubmitTime() + ",but the previous one was "
+ returnedLatestSubmitTime);
}
result = rawNextJob();
}
if (result != null) {
returnedLatestSubmitTime = result.getSubmitTime();
}
return result;
}
private void fillSkewBuffer() throws IOException {
for (int i = 0; i < skewBufferLength; ++i) {
LoggedJob newJob = rawNextJob();
if (newJob == null) {
return;
}
skewBuffer.add(newJob);
}
}
int neededSkewBufferSize() {
return maxSkewBufferNeeded;
}
@Override
public void close() throws IOException {
reader.close();
}
}
|
OutOfOrderException
|
java
|
grpc__grpc-java
|
xds/src/test/java/io/grpc/xds/internal/security/trust/XdsX509TrustManagerTest.java
|
{
"start": 2911,
"end": 37556
}
|
class ____ {
@Rule
public final MockitoRule mockitoRule = MockitoJUnit.rule();
@Mock
private X509ExtendedTrustManager mockDelegate;
@Mock
private SSLSession mockSession;
private XdsX509TrustManager trustManager;
private final TestParam testParam;
public XdsX509TrustManagerTest(TestParam testParam) {
this.testParam = testParam;
}
@Test
public void nullCertContextTest() throws CertificateException, IOException {
trustManager = new XdsX509TrustManager(null, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, new ArrayList<>());
}
@Test
@SuppressWarnings("deprecation")
public void emptySanListContextTest() throws CertificateException, IOException {
CertificateValidationContext certContext = CertificateValidationContext.getDefaultInstance();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void missingPeerCerts() {
StringMatcher stringMatcher = StringMatcher.newBuilder().setExact("foo.com").build();
@SuppressWarnings("deprecation")
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
try {
trustManager.verifySubjectAltNameInChain(null, certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate(s) missing");
}
}
@Test
@SuppressWarnings("deprecation")
public void emptyArrayPeerCerts() {
StringMatcher stringMatcher = StringMatcher.newBuilder().setExact("foo.com").build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
try {
trustManager.verifySubjectAltNameInChain(
new X509Certificate[0], certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate(s) missing");
}
}
@Test
@SuppressWarnings("deprecation")
public void noSansInPeerCerts() throws CertificateException, IOException {
StringMatcher stringMatcher = StringMatcher.newBuilder().setExact("foo.com").build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(CLIENT_PEM_FILE));
try {
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate SAN check failed");
}
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCertsVerifies() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder()
.setExact("waterzooi.test.google.be")
.setIgnoreCase(false)
.build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCertsVerifies_differentCase_expectException()
throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder()
.setExact("waterZooi.test.Google.be")
.setIgnoreCase(false)
.build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
try {
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate SAN check failed");
}
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCertsVerifies_ignoreCase() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder().setExact("Waterzooi.Test.google.be").setIgnoreCase(true).build();
@SuppressWarnings("deprecation")
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCerts_prefix() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder()
.setPrefix("waterzooi.") // test.google.be
.setIgnoreCase(false)
.build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCertsPrefix_differentCase_expectException()
throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder().setPrefix("waterZooi.").setIgnoreCase(false).build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
try {
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate SAN check failed");
}
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCerts_prefixIgnoreCase() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder()
.setPrefix("WaterZooi.") // test.google.be
.setIgnoreCase(true)
.build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCerts_suffix() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder().setSuffix(".google.be").setIgnoreCase(false).build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCertsSuffix_differentCase_expectException()
throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder().setSuffix(".gooGle.bE").setIgnoreCase(false).build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
try {
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate SAN check failed");
}
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCerts_suffixIgnoreCase() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder().setSuffix(".GooGle.BE").setIgnoreCase(true).build();
@SuppressWarnings("deprecation")
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCerts_substring() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder().setContains("zooi.test.google").setIgnoreCase(false).build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCertsSubstring_differentCase_expectException()
throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder().setContains("zooi.Test.gooGle").setIgnoreCase(false).build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
try {
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate SAN check failed");
}
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCerts_substringIgnoreCase() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder().setContains("zooI.Test.Google").setIgnoreCase(true).build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCerts_safeRegex() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder()
.setSafeRegex(
RegexMatcher.newBuilder().setRegex("water[[:alpha:]]{1}ooi\\.test\\.google\\.be"))
.build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCerts_safeRegex1() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder()
.setSafeRegex(
RegexMatcher.newBuilder().setRegex("no-match-string|\\*\\.test\\.youtube\\.com"))
.build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCerts_safeRegex_ipAddress() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder()
.setSafeRegex(
RegexMatcher.newBuilder().setRegex("([[:digit:]]{1,3}\\.){3}[[:digit:]]{1,3}"))
.build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCerts_safeRegex_noMatch() throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder()
.setSafeRegex(
RegexMatcher.newBuilder().setRegex("water[[:alpha:]]{2}ooi\\.test\\.google\\.be"))
.build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
try {
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate SAN check failed");
}
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCertsVerifiesMultipleVerifySans()
throws CertificateException, IOException {
StringMatcher stringMatcher = StringMatcher.newBuilder().setExact("x.foo.com").build();
StringMatcher stringMatcher1 =
StringMatcher.newBuilder().setExact("waterzooi.test.google.be").build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder()
.addMatchSubjectAltNames(stringMatcher)
.addMatchSubjectAltNames(stringMatcher1)
.build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneSanInPeerCertsNotFoundException()
throws CertificateException, IOException {
StringMatcher stringMatcher = StringMatcher.newBuilder().setExact("x.foo.com").build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
try {
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate SAN check failed");
}
}
@Test
@SuppressWarnings("deprecation")
public void wildcardSanInPeerCertsVerifiesMultipleVerifySans()
throws CertificateException, IOException {
StringMatcher stringMatcher = StringMatcher.newBuilder().setExact("x.foo.com").build();
StringMatcher stringMatcher1 =
StringMatcher.newBuilder().setSuffix("test.youTube.Com").setIgnoreCase(true).build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder()
.addMatchSubjectAltNames(stringMatcher)
.addMatchSubjectAltNames(stringMatcher1) // should match suffix test.youTube.Com
.build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void wildcardSanInPeerCertsVerifiesMultipleVerifySans1()
throws CertificateException, IOException {
StringMatcher stringMatcher = StringMatcher.newBuilder().setExact("x.foo.com").build();
StringMatcher stringMatcher1 =
StringMatcher.newBuilder().setContains("est.Google.f").setIgnoreCase(true).build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder()
.addMatchSubjectAltNames(stringMatcher)
.addMatchSubjectAltNames(stringMatcher1) // should contain est.Google.f
.build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void wildcardSanInPeerCertsSubdomainMismatch()
throws CertificateException, IOException {
// 2. Asterisk (*) cannot match across domain name labels.
// For example, *.example.com matches test.example.com but does not match
// sub.test.example.com.
StringMatcher stringMatcher =
StringMatcher.newBuilder().setExact("sub.abc.test.youtube.com").build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
try {
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate SAN check failed");
}
}
@Test
@SuppressWarnings("deprecation")
public void oneIpAddressInPeerCertsVerifies() throws CertificateException, IOException {
StringMatcher stringMatcher = StringMatcher.newBuilder().setExact("x.foo.com").build();
StringMatcher stringMatcher1 = StringMatcher.newBuilder().setExact("192.168.1.3").build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder()
.addMatchSubjectAltNames(stringMatcher)
.addMatchSubjectAltNames(stringMatcher1)
.build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
}
@Test
@SuppressWarnings("deprecation")
public void oneIpAddressInPeerCertsMismatch() throws CertificateException, IOException {
StringMatcher stringMatcher = StringMatcher.newBuilder().setExact("x.foo.com").build();
StringMatcher stringMatcher1 = StringMatcher.newBuilder().setExact("192.168.2.3").build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder()
.addMatchSubjectAltNames(stringMatcher)
.addMatchSubjectAltNames(stringMatcher1)
.build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
try {
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate SAN check failed");
}
}
@Test
public void checkServerTrustedSslEngine()
throws CertificateException, IOException, CertStoreException {
TestSslEngine sslEngine = buildTrustManagerAndGetSslEngine();
X509Certificate[] serverCerts =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.checkServerTrusted(serverCerts, "ECDHE_ECDSA", sslEngine);
verify(sslEngine, times(1)).getHandshakeSession();
assertThat(sslEngine.getSSLParameters().getEndpointIdentificationAlgorithm()).isEmpty();
}
@Test
public void checkServerTrustedSslEngineSpiffeTrustMap()
throws CertificateException, IOException, CertStoreException {
TestSslEngine sslEngine = buildTrustManagerAndGetSslEngine();
X509Certificate[] serverCerts =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_SPIFFE_PEM_FILE));
List<X509Certificate> caCerts = Arrays.asList(CertificateUtils
.toX509Certificates(TlsTesting.loadCert(CA_PEM_FILE)));
trustManager = XdsTrustManagerFactory.createX509TrustManager(
ImmutableMap.of("example.com", caCerts), null, false);
trustManager.checkServerTrusted(serverCerts, "ECDHE_ECDSA", sslEngine);
verify(sslEngine, times(1)).getHandshakeSession();
assertThat(sslEngine.getSSLParameters().getEndpointIdentificationAlgorithm()).isEmpty();
}
@Test
public void checkServerTrustedSslEngineSpiffeTrustMap_missing_spiffe_id()
throws CertificateException, IOException, CertStoreException {
TestSslEngine sslEngine = buildTrustManagerAndGetSslEngine();
X509Certificate[] serverCerts =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
List<X509Certificate> caCerts = Arrays.asList(CertificateUtils
.toX509Certificates(TlsTesting.loadCert(CA_PEM_FILE)));
trustManager = XdsTrustManagerFactory.createX509TrustManager(
ImmutableMap.of("example.com", caCerts), null, false);
try {
trustManager.checkServerTrusted(serverCerts, "ECDHE_ECDSA", sslEngine);
fail("exception expected");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat()
.isEqualTo("Failed to extract SPIFFE ID from peer leaf certificate");
}
}
@Test
public void checkServerTrustedSpiffeSslEngineTrustMap_missing_trust_domain()
throws CertificateException, IOException, CertStoreException {
TestSslEngine sslEngine = buildTrustManagerAndGetSslEngine();
X509Certificate[] serverCerts =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_SPIFFE_PEM_FILE));
List<X509Certificate> caCerts = Arrays.asList(CertificateUtils
.toX509Certificates(TlsTesting.loadCert(CA_PEM_FILE)));
trustManager = XdsTrustManagerFactory.createX509TrustManager(
ImmutableMap.of("unknown.com", caCerts), null, false);
try {
trustManager.checkServerTrusted(serverCerts, "ECDHE_ECDSA", sslEngine);
fail("exception expected");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Spiffe Trust Map doesn't contain trust"
+ " domain 'example.com' from peer leaf certificate");
}
}
@Test
public void checkClientTrustedSpiffeTrustMap()
throws CertificateException, IOException, CertStoreException {
X509Certificate[] clientCerts =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(CLIENT_SPIFFE_PEM_FILE));
List<X509Certificate> caCerts = Arrays.asList(CertificateUtils
.toX509Certificates(TlsTesting.loadCert(CA_PEM_FILE)));
trustManager = XdsTrustManagerFactory.createX509TrustManager(
ImmutableMap.of("foo.bar.com", caCerts), null, false);
trustManager.checkClientTrusted(clientCerts, "RSA");
}
@Test
public void checkServerTrustedSslEngine_untrustedServer_expectException()
throws CertificateException, IOException, CertStoreException {
TestSslEngine sslEngine = buildTrustManagerAndGetSslEngine();
X509Certificate[] badServerCert =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(BAD_SERVER_PEM_FILE));
try {
trustManager.checkServerTrusted(badServerCert, "ECDHE_ECDSA", sslEngine);
fail("exception expected");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat()
.endsWith("unable to find valid certification path to requested target");
}
verify(sslEngine, times(1)).getHandshakeSession();
}
@Test
public void checkServerTrustedSslSocket()
throws CertificateException, IOException, CertStoreException {
TestSslSocket sslSocket = buildTrustManagerAndGetSslSocket();
X509Certificate[] serverCerts =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE));
trustManager.checkServerTrusted(serverCerts, "ECDHE_ECDSA", sslSocket);
verify(sslSocket, times(1)).isConnected();
verify(sslSocket, times(1)).getHandshakeSession();
assertThat(sslSocket.getSSLParameters().getEndpointIdentificationAlgorithm()).isEmpty();
}
@Test
public void checkServerTrustedSslSocketSpiffeTrustMap()
throws CertificateException, IOException, CertStoreException {
TestSslSocket sslSocket = buildTrustManagerAndGetSslSocket();
X509Certificate[] serverCerts =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_SPIFFE_PEM_FILE));
List<X509Certificate> caCerts = Arrays.asList(CertificateUtils
.toX509Certificates(TlsTesting.loadCert(CA_PEM_FILE)));
trustManager = XdsTrustManagerFactory.createX509TrustManager(
ImmutableMap.of("example.com", caCerts), null, false);
trustManager.checkServerTrusted(serverCerts, "ECDHE_ECDSA", sslSocket);
verify(sslSocket, times(1)).isConnected();
verify(sslSocket, times(1)).getHandshakeSession();
assertThat(sslSocket.getSSLParameters().getEndpointIdentificationAlgorithm()).isEmpty();
}
@Test
public void checkServerTrustedSslSocket_untrustedServer_expectException()
throws CertificateException, IOException, CertStoreException {
TestSslSocket sslSocket = buildTrustManagerAndGetSslSocket();
X509Certificate[] badServerCert =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(BAD_SERVER_PEM_FILE));
try {
trustManager.checkServerTrusted(badServerCert, "ECDHE_ECDSA", sslSocket);
fail("exception expected");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat()
.endsWith("unable to find valid certification path to requested target");
}
verify(sslSocket, times(1)).isConnected();
verify(sslSocket, times(1)).getHandshakeSession();
}
@Test
@SuppressWarnings("deprecation")
public void unsupportedAltNameType() throws CertificateException {
StringMatcher stringMatcher =
StringMatcher.newBuilder()
.setExact("waterzooi.test.google.be")
.setIgnoreCase(false)
.build();
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder().addMatchSubjectAltNames(stringMatcher).build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate mockCert = mock(X509Certificate.class);
when(mockCert.getSubjectAlternativeNames())
.thenReturn(Collections.<List<?>>singleton(ImmutableList.of(Integer.valueOf(1), "foo")));
X509Certificate[] certs = new X509Certificate[] {mockCert};
try {
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
fail("no exception thrown");
} catch (CertificateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Peer certificate SAN check failed");
}
}
@Test
@SuppressWarnings("deprecation")
public void testDnsWildcardPatterns()
throws CertificateException, IOException {
StringMatcher stringMatcher =
StringMatcher.newBuilder()
.setExact(testParam.sanPattern)
.setIgnoreCase(testParam.ignoreCase)
.build();
@SuppressWarnings("deprecation")
CertificateValidationContext certContext =
CertificateValidationContext.newBuilder()
.addMatchSubjectAltNames(stringMatcher)
.build();
trustManager = new XdsX509TrustManager(certContext, mockDelegate, false);
X509Certificate[] certs =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(testParam.certFile));
try {
trustManager.verifySubjectAltNameInChain(certs, certContext.getMatchSubjectAltNamesList());
assertThat(testParam.expected).isTrue();
} catch (CertificateException certException) {
assertThat(testParam.expected).isFalse();
assertThat(certException).hasMessageThat().isEqualTo("Peer certificate SAN check failed");
}
}
@Parameters(name = "{index}: {0}")
public static Collection<Object[]> getParameters() {
return Arrays.asList(new Object[][] {
{new TestParam("*.test.google.fr", SERVER_1_PEM_FILE, false, true)},
{new TestParam("*.test.youtube.com", SERVER_1_PEM_FILE, false, true)},
{new TestParam("waterzooi.test.google.be", SERVER_1_PEM_FILE, false, true)},
{new TestParam("192.168.1.3", SERVER_1_PEM_FILE, false, true)},
{new TestParam("*.TEST.YOUTUBE.com", SERVER_1_PEM_FILE, true, true)},
{new TestParam("w*i.test.google.be", SERVER_1_PEM_FILE, false, true)},
{new TestParam("w*a.test.google.be", SERVER_1_PEM_FILE, false, false)},
{new TestParam("*.test.google.com.au", SERVER_0_PEM_FILE, false, false)},
{new TestParam("*.TEST.YOUTUBE.com", SERVER_1_PEM_FILE, false, false)},
{new TestParam("*waterzooi", SERVER_1_PEM_FILE, false, false)},
{new TestParam("*.lyft.com", BAD_WILDCARD_DNS_PEM_FILE, false, false)},
{new TestParam("ly**ft.com", BAD_WILDCARD_DNS_PEM_FILE, false, false)},
{new TestParam("*yft.c*m", BAD_WILDCARD_DNS_PEM_FILE, false, false)},
{new TestParam("xn--*.lyft.com", BAD_WILDCARD_DNS_PEM_FILE, false, false)},
{new TestParam("", BAD_WILDCARD_DNS_PEM_FILE, false, false)},
});
}
private TestSslEngine buildTrustManagerAndGetSslEngine()
throws CertificateException, IOException, CertStoreException {
SSLParameters sslParams = buildTrustManagerAndGetSslParameters();
TestSslEngine sslEngine = mock(TestSslEngine.class, CALLS_REAL_METHODS);
sslEngine.setSSLParameters(sslParams);
doReturn(mockSession).when(sslEngine).getHandshakeSession();
return sslEngine;
}
private TestSslSocket buildTrustManagerAndGetSslSocket()
throws CertificateException, IOException, CertStoreException {
SSLParameters sslParams = buildTrustManagerAndGetSslParameters();
TestSslSocket sslSocket = mock(TestSslSocket.class, CALLS_REAL_METHODS);
sslSocket.setSSLParameters(sslParams);
doReturn(true).when(sslSocket).isConnected();
doReturn(mockSession).when(sslSocket).getHandshakeSession();
return sslSocket;
}
private SSLParameters buildTrustManagerAndGetSslParameters()
throws CertificateException, IOException, CertStoreException {
X509Certificate[] caCerts =
CertificateUtils.toX509Certificates(TlsTesting.loadCert(CA_PEM_FILE));
trustManager = XdsTrustManagerFactory.createX509TrustManager(caCerts,
null, false);
when(mockSession.getProtocol()).thenReturn("TLSv1.2");
when(mockSession.getPeerHost()).thenReturn("peer-host-from-mock");
SSLParameters sslParams = new SSLParameters();
sslParams.setEndpointIdentificationAlgorithm("HTTPS");
return sslParams;
}
private abstract static
|
XdsX509TrustManagerTest
|
java
|
apache__camel
|
core/camel-core-xml/src/main/java/org/apache/camel/core/xml/CamelPropertyPlaceholderLocationDefinition.java
|
{
"start": 1284,
"end": 2744
}
|
class ____ extends IdentifiedType {
@XmlAttribute
@Metadata(defaultValue = "classpath")
public String resolver;
@XmlAttribute(required = true)
public String path;
@XmlAttribute
@Metadata(defaultValue = "false")
public Boolean optional;
public String getResolver() {
return resolver;
}
/**
* The resolver to use to locate the location
*/
public void setResolver(String resolver) {
this.resolver = resolver;
}
public String getPath() {
return path;
}
/**
* Property locations to use.
*/
public void setPath(String path) {
this.path = path;
}
public Boolean getOptional() {
return optional;
}
/**
* If the location is optional.
*/
public void setOptional(Boolean optional) {
this.optional = optional;
}
@Override
public String toString() {
String answer = path;
if (ObjectHelper.isNotEmpty(resolver)) {
answer = resolver + ":" + answer;
}
if (ObjectHelper.isNotEmpty(optional)) {
answer = answer + ";optional=true";
}
return answer;
}
public PropertiesLocation toLocation() {
return new PropertiesLocation(
resolver != null ? resolver : "classpath",
path,
optional != null ? optional : false);
}
}
|
CamelPropertyPlaceholderLocationDefinition
|
java
|
netty__netty
|
common/src/main/java/io/netty/util/internal/AdaptiveCalculator.java
|
{
"start": 912,
"end": 4198
}
|
class ____ {
private static final int INDEX_INCREMENT = 4;
private static final int INDEX_DECREMENT = 1;
private static final int[] SIZE_TABLE;
static {
List<Integer> sizeTable = new ArrayList<Integer>();
for (int i = 16; i < 512; i += 16) {
sizeTable.add(i);
}
// Suppress a warning since i becomes negative when an integer overflow happens
for (int i = 512; i > 0; i <<= 1) {
sizeTable.add(i);
}
SIZE_TABLE = new int[sizeTable.size()];
for (int i = 0; i < SIZE_TABLE.length; i ++) {
SIZE_TABLE[i] = sizeTable.get(i);
}
}
private static int getSizeTableIndex(final int size) {
for (int low = 0, high = SIZE_TABLE.length - 1;;) {
if (high < low) {
return low;
}
if (high == low) {
return high;
}
int mid = low + high >>> 1;
int a = SIZE_TABLE[mid];
int b = SIZE_TABLE[mid + 1];
if (size > b) {
low = mid + 1;
} else if (size < a) {
high = mid - 1;
} else if (size == a) {
return mid;
} else {
return mid + 1;
}
}
}
private final int minIndex;
private final int maxIndex;
private final int minCapacity;
private final int maxCapacity;
private int index;
private int nextSize;
private boolean decreaseNow;
public AdaptiveCalculator(int minimum, int initial, int maximum) {
checkPositive(minimum, "minimum");
if (initial < minimum) {
throw new IllegalArgumentException("initial: " + initial);
}
if (maximum < initial) {
throw new IllegalArgumentException("maximum: " + maximum);
}
int minIndex = getSizeTableIndex(minimum);
if (SIZE_TABLE[minIndex] < minimum) {
this.minIndex = minIndex + 1;
} else {
this.minIndex = minIndex;
}
int maxIndex = getSizeTableIndex(maximum);
if (SIZE_TABLE[maxIndex] > maximum) {
this.maxIndex = maxIndex - 1;
} else {
this.maxIndex = maxIndex;
}
int initialIndex = getSizeTableIndex(initial);
if (SIZE_TABLE[initialIndex] > initial) {
this.index = initialIndex - 1;
} else {
this.index = initialIndex;
}
this.minCapacity = minimum;
this.maxCapacity = maximum;
nextSize = max(SIZE_TABLE[index], minCapacity);
}
public void record(int size) {
if (size <= SIZE_TABLE[max(0, index - INDEX_DECREMENT)]) {
if (decreaseNow) {
index = max(index - INDEX_DECREMENT, minIndex);
nextSize = max(SIZE_TABLE[index], minCapacity);
decreaseNow = false;
} else {
decreaseNow = true;
}
} else if (size >= nextSize) {
index = min(index + INDEX_INCREMENT, maxIndex);
nextSize = min(SIZE_TABLE[index], maxCapacity);
decreaseNow = false;
}
}
public int nextSize() {
return nextSize;
}
}
|
AdaptiveCalculator
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/sql/FunctionITCase.java
|
{
"start": 88496,
"end": 88750
}
|
class ____ extends LookupTableFunction {
public void eval(@DataTypeHint("STRING") StringData s) {
super.eval(s);
}
}
/** This is an empty synchronous table function. */
private static
|
LookupTableWithHintLevel1Function
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/FluxInterval.java
|
{
"start": 2616,
"end": 4214
}
|
class ____ implements Runnable, Subscription,
InnerProducer<Long> {
final CoreSubscriber<? super Long> actual;
final Worker worker;
volatile long requested;
static final AtomicLongFieldUpdater<IntervalRunnable> REQUESTED =
AtomicLongFieldUpdater.newUpdater(IntervalRunnable.class, "requested");
long count;
volatile boolean cancelled;
IntervalRunnable(CoreSubscriber<? super Long> actual, Worker worker) {
this.actual = actual;
this.worker = worker;
}
@Override
public CoreSubscriber<? super Long> actual() {
return actual;
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.CANCELLED) return cancelled;
if (key == Attr.RUN_ON) return worker;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.ASYNC;
return InnerProducer.super.scanUnsafe(key);
}
@Override
public void run() {
if (!cancelled) {
if (requested != 0L) {
actual.onNext(count++);
if (requested != Long.MAX_VALUE) {
REQUESTED.decrementAndGet(this);
}
} else {
cancel();
actual.onError(Exceptions.failWithOverflow("Could not emit tick " + count + " due to lack of requests" +
" (interval doesn't support small downstream requests that replenish slower than the ticks)"));
}
}
}
@Override
public void request(long n) {
if (Operators.validate(n)) {
Operators.addCap(REQUESTED, this, n);
}
}
@Override
public void cancel() {
if (!cancelled) {
cancelled = true;
worker.dispose();
}
}
}
}
|
IntervalRunnable
|
java
|
spring-projects__spring-boot
|
documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/web/servlet/springmvc/MyUserHandler.java
|
{
"start": 883,
"end": 1229
}
|
class ____ {
public ServerResponse getUser(ServerRequest request) {
/**/ return ServerResponse.ok().build();
}
public ServerResponse getUserCustomers(ServerRequest request) {
/**/ return ServerResponse.ok().build();
}
public ServerResponse deleteUser(ServerRequest request) {
/**/ return ServerResponse.ok().build();
}
}
|
MyUserHandler
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/components/MappedSuperclassComponentWithCollectionTest.java
|
{
"start": 7223,
"end": 7319
}
|
class ____ extends Dummy1 {
}
@Entity(name="Leader")
@Table(name="LEADER")
public static
|
Dummy2
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java
|
{
"start": 1498,
"end": 1817
}
|
class ____ {
private static final String BASEDIR =
GenericTestUtils.getTempPath(TestHttpCookieFlag.class.getSimpleName());
private static String keystoresDir;
private static String sslConfDir;
private static SSLFactory clientSslFactory;
private static HttpServer2 server;
public static
|
TestHttpCookieFlag
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/providers/serialisers/JsonMessageBodyWriterUtil.java
|
{
"start": 193,
"end": 691
}
|
class ____ {
public static void setContentTypeIfNecessary(MultivaluedMap<String, Object> httpHeaders) {
Object contentType = httpHeaders.getFirst(HttpHeaders.CONTENT_TYPE);
if (isNotJson(contentType)) {
httpHeaders.putSingle(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON);
}
}
public static boolean isNotJson(Object contentType) {
return (contentType == null) || !contentType.toString().contains("json");
}
}
|
JsonMessageBodyWriterUtil
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsIgnoringNewlines_CharSequence_Test.java
|
{
"start": 809,
"end": 1217
}
|
class ____ extends CharSequenceAssertBaseTest {
@Override
protected CharSequenceAssert invoke_api_method() {
return assertions.containsIgnoringNewlines("Al", "Bob");
}
@Override
protected void verify_internal_effects() {
verify(strings).assertContainsIgnoringNewlines(getInfo(assertions), getActual(assertions), "Al", "Bob");
}
}
|
CharSequenceAssert_containsIgnoringNewlines_CharSequence_Test
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java
|
{
"start": 832,
"end": 1055
}
|
class ____ preserve the stacktrace as well as the suppressed exceptions of
* the throwable it was created with instead of it's own. The stacktrace has no indication
* of where this exception was created.
*/
public final
|
will
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/MultipleAssertionsError.java
|
{
"start": 899,
"end": 2177
}
|
class ____ extends AssertionError {
@Serial
private static final long serialVersionUID = -5547434453993413952L;
private final List<? extends AssertionError> errors;
public MultipleAssertionsError(Description description, Object objectUnderTest, List<? extends AssertionError> errors) {
super(formatDescription(description) + "%n".formatted() + describesObjectUnderTest(objectUnderTest) + ","
+ createMessage(errors));
this.errors = errors;
}
private static String describesObjectUnderTest(Object objectUnderTest) {
return "For %s".formatted(objectUnderTest);
}
/**
* Returns the causal AssertionErrors in the order that they were thrown.
*
* @return the list of errors
*/
public List<? extends AssertionError> getErrors() {
return errors;
}
private static String formatDescription(Description description) {
return DescriptionFormatter.instance().format(description);
}
private static String createMessage(List<? extends AssertionError> errors) {
List<String> errorsMessage = errors.stream()
.map(AssertionError::getMessage)
.collect(toList());
return aggregateErrorMessages(errorsMessage);
}
}
|
MultipleAssertionsError
|
java
|
quarkusio__quarkus
|
integration-tests/hibernate-validator/src/main/java/io/quarkus/it/hibernate/validator/groups/ValidationGroups.java
|
{
"start": 134,
"end": 178
}
|
interface ____ extends Default {
}
|
Post
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/errors/UnstableOffsetCommitException.java
|
{
"start": 943,
"end": 1154
}
|
class ____ extends RetriableException {
private static final long serialVersionUID = 1L;
public UnstableOffsetCommitException(String message) {
super(message);
}
}
|
UnstableOffsetCommitException
|
java
|
spring-projects__spring-boot
|
module/spring-boot-freemarker/src/test/java/org/springframework/boot/freemarker/autoconfigure/FreeMarkerAutoConfigurationReactiveIntegrationTests.java
|
{
"start": 1909,
"end": 5639
}
|
class ____ {
private final ReactiveWebApplicationContextRunner contextRunner = new ReactiveWebApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(FreeMarkerAutoConfiguration.class));
@BeforeEach
@AfterEach
void clearReactorSchedulers() {
Schedulers.shutdownNow();
}
@Test
void defaultConfiguration() {
this.contextRunner.run((context) -> {
assertThat(context.getBean(FreeMarkerViewResolver.class)).isNotNull();
assertThat(context.getBean(FreeMarkerConfigurer.class)).isNotNull();
assertThat(context.getBean(FreeMarkerConfig.class)).isNotNull();
assertThat(context.getBean(freemarker.template.Configuration.class)).isNotNull();
});
}
@Test
@WithResource(name = "templates/home.ftlh", content = "home")
void defaultViewResolution() {
this.contextRunner.run((context) -> {
MockServerWebExchange exchange = render(context, "home");
String result = exchange.getResponse().getBodyAsString().block(Duration.ofSeconds(30));
assertThat(result).contains("home");
assertThat(exchange.getResponse().getHeaders().getContentType()).isEqualTo(MediaType.TEXT_HTML);
});
}
@Test
@WithResource(name = "templates/prefix/prefixed.ftlh", content = "prefixed")
void customPrefix() {
this.contextRunner.withPropertyValues("spring.freemarker.prefix:prefix/").run((context) -> {
MockServerWebExchange exchange = render(context, "prefixed");
String result = exchange.getResponse().getBodyAsString().block(Duration.ofSeconds(30));
assertThat(result).contains("prefixed");
});
}
@Test
@WithResource(name = "templates/suffixed.freemarker", content = "suffixed")
void customSuffix() {
this.contextRunner.withPropertyValues("spring.freemarker.suffix:.freemarker").run((context) -> {
MockServerWebExchange exchange = render(context, "suffixed");
String result = exchange.getResponse().getBodyAsString().block(Duration.ofSeconds(30));
assertThat(result).contains("suffixed");
});
}
@Test
@WithResource(name = "custom-templates/custom.ftlh", content = "custom")
void customTemplateLoaderPath() {
this.contextRunner.withPropertyValues("spring.freemarker.templateLoaderPath:classpath:/custom-templates/")
.run((context) -> {
MockServerWebExchange exchange = render(context, "custom");
String result = exchange.getResponse().getBodyAsString().block(Duration.ofSeconds(30));
assertThat(result).contains("custom");
});
}
@SuppressWarnings("deprecation")
@Test
void customFreeMarkerSettings() {
this.contextRunner.withPropertyValues("spring.freemarker.settings.boolean_format:yup,nope")
.run((context) -> assertThat(
context.getBean(FreeMarkerConfigurer.class).getConfiguration().getSetting("boolean_format"))
.isEqualTo("yup,nope"));
}
@Test
@WithResource(name = "templates/message.ftlh", content = "Message: ${greeting}")
void renderTemplate() {
this.contextRunner.withPropertyValues().run((context) -> {
FreeMarkerConfigurer freemarker = context.getBean(FreeMarkerConfigurer.class);
StringWriter writer = new StringWriter();
freemarker.getConfiguration().getTemplate("message.ftlh").process(new DataModel(), writer);
assertThat(writer.toString()).contains("Hello World");
});
}
private MockServerWebExchange render(ApplicationContext context, String viewName) {
FreeMarkerViewResolver resolver = context.getBean(FreeMarkerViewResolver.class);
Mono<View> view = resolver.resolveViewName(viewName, Locale.UK);
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("/path"));
view.flatMap((v) -> v.render(null, MediaType.TEXT_HTML, exchange)).block(Duration.ofSeconds(30));
return exchange;
}
public static
|
FreeMarkerAutoConfigurationReactiveIntegrationTests
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/TaskCheckpointStatistics.java
|
{
"start": 1295,
"end": 6731
}
|
class ____ implements ResponseBody {
public static final String FIELD_NAME_ID = "id";
public static final String FIELD_NAME_CHECKPOINT_STATUS = "status";
public static final String FIELD_NAME_LATEST_ACK_TIMESTAMP = "latest_ack_timestamp";
public static final String FIELD_NAME_CHECKPOINTED_SIZE = "checkpointed_size";
/**
* The accurate name of this field should be 'checkpointed_data_size', keep it as before to not
* break backwards compatibility for old web UI.
*
* @see <a href="https://issues.apache.org/jira/browse/FLINK-13390">FLINK-13390</a>
*/
public static final String FIELD_NAME_STATE_SIZE = "state_size";
public static final String FIELD_NAME_DURATION = "end_to_end_duration";
public static final String FIELD_NAME_ALIGNMENT_BUFFERED = "alignment_buffered";
public static final String FIELD_NAME_PROCESSED_DATA = "processed_data";
public static final String FIELD_NAME_PERSISTED_DATA = "persisted_data";
public static final String FIELD_NAME_NUM_SUBTASKS = "num_subtasks";
public static final String FIELD_NAME_NUM_ACK_SUBTASKS = "num_acknowledged_subtasks";
@JsonProperty(FIELD_NAME_ID)
private final long checkpointId;
@JsonProperty(FIELD_NAME_CHECKPOINT_STATUS)
private final CheckpointStatsStatus checkpointStatus;
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP)
private final long latestAckTimestamp;
@JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE)
private final long checkpointedSize;
@JsonProperty(FIELD_NAME_STATE_SIZE)
private final long stateSize;
@JsonProperty(FIELD_NAME_DURATION)
private final long duration;
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED)
private final long alignmentBuffered;
@JsonProperty(FIELD_NAME_PROCESSED_DATA)
private final long processedData;
@JsonProperty(FIELD_NAME_PERSISTED_DATA)
private final long persistedData;
@JsonProperty(FIELD_NAME_NUM_SUBTASKS)
private final int numSubtasks;
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS)
private final int numAckSubtasks;
@JsonCreator
public TaskCheckpointStatistics(
@JsonProperty(FIELD_NAME_ID) long checkpointId,
@JsonProperty(FIELD_NAME_CHECKPOINT_STATUS) CheckpointStatsStatus checkpointStatus,
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp,
@JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE) long checkpointedSize,
@JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered,
@JsonProperty(FIELD_NAME_PROCESSED_DATA) long processedData,
@JsonProperty(FIELD_NAME_PERSISTED_DATA) long persistedData,
@JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks,
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks) {
this.checkpointId = checkpointId;
this.checkpointStatus = Preconditions.checkNotNull(checkpointStatus);
this.latestAckTimestamp = latestAckTimestamp;
this.checkpointedSize = checkpointedSize;
this.stateSize = stateSize;
this.duration = duration;
this.processedData = processedData;
this.alignmentBuffered = alignmentBuffered;
this.persistedData = persistedData;
this.numSubtasks = numSubtasks;
this.numAckSubtasks = numAckSubtasks;
}
public long getLatestAckTimestamp() {
return latestAckTimestamp;
}
public long getCheckpointedSize() {
return checkpointedSize;
}
public long getStateSize() {
return stateSize;
}
public long getDuration() {
return duration;
}
public int getNumSubtasks() {
return numSubtasks;
}
public int getNumAckSubtasks() {
return numAckSubtasks;
}
public long getCheckpointId() {
return checkpointId;
}
public CheckpointStatsStatus getCheckpointStatus() {
return checkpointStatus;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TaskCheckpointStatistics that = (TaskCheckpointStatistics) o;
return checkpointId == that.checkpointId
&& latestAckTimestamp == that.latestAckTimestamp
&& stateSize == that.stateSize
&& checkpointedSize == that.checkpointedSize
&& duration == that.duration
&& alignmentBuffered == that.alignmentBuffered
&& processedData == that.processedData
&& persistedData == that.persistedData
&& numSubtasks == that.numSubtasks
&& numAckSubtasks == that.numAckSubtasks
&& checkpointStatus == that.checkpointStatus;
}
@Override
public int hashCode() {
return Objects.hash(
checkpointId,
checkpointStatus,
latestAckTimestamp,
stateSize,
checkpointedSize,
duration,
alignmentBuffered,
processedData,
persistedData,
numSubtasks,
numAckSubtasks);
}
}
|
TaskCheckpointStatistics
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt9Evaluator.java
|
{
"start": 4631,
"end": 5897
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory field;
private final int p0;
private final int p1;
private final int p2;
private final int p3;
private final int p4;
private final int p5;
private final int p6;
private final int p7;
private final int p8;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, int p0, int p1,
int p2, int p3, int p4, int p5, int p6, int p7, int p8) {
this.source = source;
this.field = field;
this.p0 = p0;
this.p1 = p1;
this.p2 = p2;
this.p3 = p3;
this.p4 = p4;
this.p5 = p5;
this.p6 = p6;
this.p7 = p7;
this.p8 = p8;
}
@Override
public RoundToInt9Evaluator get(DriverContext context) {
return new RoundToInt9Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, p8, context);
}
@Override
public String toString() {
return "RoundToInt9Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + "]";
}
}
}
|
Factory
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.