language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java | {
"start": 49026,
"end": 49650
} | class ____ {
@InlineMe(
replacement = "new Client(a, b)",
imports = {"com.google.foo.Client"})
@Deprecated
public static Client create(String a, ImmutableList<String> b) {
return new Client(a, b);
}
public Client(String a, ImmutableList<String> b) {}
}
""")
.expectUnchanged()
.addInputLines(
"Caller.java",
"""
package com.google.foo;
import com.google.common.collect.ImmutableList;
public final | Client |
java | spring-projects__spring-framework | spring-websocket/src/main/java/org/springframework/web/socket/config/annotation/StompWebSocketEndpointRegistration.java | {
"start": 1019,
"end": 3158
} | interface ____ {
/**
* Enable SockJS fallback options.
*/
SockJsServiceRegistration withSockJS();
/**
* Configure the HandshakeHandler to use.
*/
StompWebSocketEndpointRegistration setHandshakeHandler(HandshakeHandler handshakeHandler);
/**
* Configure the HandshakeInterceptor's to use.
*/
StompWebSocketEndpointRegistration addInterceptors(HandshakeInterceptor... interceptors);
/**
* Set the origins for which cross-origin requests are allowed from a browser.
* Please, refer to {@link CorsConfiguration#setAllowedOrigins(List)} for
* format details and considerations, and keep in mind that the CORS spec
* does not allow use of {@code "*"} with {@code allowCredentials=true}.
* For more flexible origin patterns use {@link #setAllowedOriginPatterns}
* instead.
*
* <p>By default, no origins are allowed. When
* {@link #setAllowedOriginPatterns(String...) allowedOriginPatterns} is also
* set, then that takes precedence over this property.
*
* <p>Note when SockJS is enabled and origins are restricted, transport types
* that do not allow to check request origin (Iframe based transports) are
* disabled. As a consequence, IE 6 to 9 are not supported when origins are
* restricted.
* @since 4.1.2
* @see #setAllowedOriginPatterns(String...)
* @see <a href="https://tools.ietf.org/html/rfc6454">RFC 6454: The Web Origin Concept</a>
* @see <a href="https://github.com/sockjs/sockjs-client#supported-transports-by-browser-html-served-from-http-or-https">SockJS supported transports by browser</a>
*/
StompWebSocketEndpointRegistration setAllowedOrigins(String... origins);
/**
* Alternative to {@link #setAllowedOrigins(String...)} that supports more
* flexible patterns for specifying the origins for which cross-origin
* requests are allowed from a browser. Please, refer to
* {@link CorsConfiguration#setAllowedOriginPatterns(List)} for format
* details and other considerations.
* <p>By default this is not set.
* @since 5.3.2
*/
StompWebSocketEndpointRegistration setAllowedOriginPatterns(String... originPatterns);
}
| StompWebSocketEndpointRegistration |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/functions/ProcessFunction.java | {
"start": 2453,
"end": 4452
} | class ____<I, O> extends AbstractRichFunction {
private static final long serialVersionUID = 1L;
/**
* Process one element from the input stream.
*
* <p>This function can output zero or more elements using the {@link Collector} parameter and
* also update internal state or set timers using the {@link Context} parameter.
*
* @param value The input value.
* @param ctx A {@link Context} that allows querying the timestamp of the element and getting a
* {@link TimerService} for registering timers and querying the time. The context is only
* valid during the invocation of this method, do not store it.
* @param out The collector for returning result values.
* @throws Exception This method may throw exceptions. Throwing an exception will cause the
* operation to fail and may trigger recovery.
*/
public abstract void processElement(I value, Context ctx, Collector<O> out) throws Exception;
/**
* Called when a timer set using {@link TimerService} fires.
*
* @param timestamp The timestamp of the firing timer.
* @param ctx An {@link OnTimerContext} that allows querying the timestamp of the firing timer,
* querying the {@link TimeDomain} of the firing timer and getting a {@link TimerService}
* for registering timers and querying the time. The context is only valid during the
* invocation of this method, do not store it.
* @param out The collector for returning result values.
* @throws Exception This method may throw exceptions. Throwing an exception will cause the
* operation to fail and may trigger recovery.
*/
public void onTimer(long timestamp, OnTimerContext ctx, Collector<O> out) throws Exception {}
/**
* Information available in an invocation of {@link #processElement(Object, Context, Collector)}
* or {@link #onTimer(long, OnTimerContext, Collector)}.
*/
public abstract | ProcessFunction |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/entities/collection/MultipleCollectionEntity.java | {
"start": 717,
"end": 3374
} | class ____ {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "ID", length = 10)
private Long id;
@Version
@Column(name = "VERSION", nullable = false)
private Integer version;
@Column(name = "TEXT", length = 50, nullable = false)
private String text;
@OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, orphanRemoval = true)
@JoinColumn(name = "MCE_ID", nullable = false)
@AuditJoinTable(name = "MCE_RE1_AUD", inverseJoinColumns = @JoinColumn(name = "RE1_ID"))
private List<MultipleCollectionRefEntity1> refEntities1 = new ArrayList<MultipleCollectionRefEntity1>();
@OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, orphanRemoval = true)
@JoinColumn(name = "MCE_ID", nullable = false)
@AuditJoinTable(name = "MCE_RE2_AUD", inverseJoinColumns = @JoinColumn(name = "RE2_ID"))
private List<MultipleCollectionRefEntity2> refEntities2 = new ArrayList<MultipleCollectionRefEntity2>();
public Long getId() {
return id;
}
public Integer getVersion() {
return version;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
public List<MultipleCollectionRefEntity1> getRefEntities1() {
return refEntities1;
}
public void addRefEntity1(MultipleCollectionRefEntity1 refEntity1) {
refEntities1.add( refEntity1 );
}
public void removeRefEntity1(MultipleCollectionRefEntity1 refEntity1) {
refEntities1.remove( refEntity1 );
}
public List<MultipleCollectionRefEntity2> getRefEntities2() {
return refEntities2;
}
public void addRefEntity2(MultipleCollectionRefEntity2 refEntity2) {
refEntities2.add( refEntity2 );
}
public void removeRefEntity2(MultipleCollectionRefEntity2 refEntity2) {
refEntities2.remove( refEntity2 );
}
/**
* For test purpose only.
*/
public void setRefEntities1(List<MultipleCollectionRefEntity1> refEntities1) {
this.refEntities1 = refEntities1;
}
/**
* For test purpose only.
*/
public void setRefEntities2(List<MultipleCollectionRefEntity2> refEntities2) {
this.refEntities2 = refEntities2;
}
@Override
public String toString() {
return "MultipleCollectionEntity [id=" + id + ", text=" + text
+ ", refEntities1=" + refEntities1 + ", refEntities2="
+ refEntities2 + "]";
}
@Override
public int hashCode() {
return Objects.hash( id );
}
@Override
public boolean equals(Object obj) {
if ( this == obj )
return true;
if ( obj == null )
return false;
if ( getClass() != obj.getClass() )
return false;
MultipleCollectionEntity other = (MultipleCollectionEntity) obj;
return Objects.equals( id, other.id );
}
}
| MultipleCollectionEntity |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/util/BigIntArray.java | {
"start": 1057,
"end": 4790
} | class ____ extends AbstractBigByteArray implements IntArray {
private static final BigIntArray ESTIMATOR = new BigIntArray(0, BigArrays.NON_RECYCLING_INSTANCE, false);
static final VarHandle VH_PLATFORM_NATIVE_INT = MethodHandles.byteArrayViewVarHandle(int[].class, ByteOrder.nativeOrder());
/** Constructor. */
BigIntArray(long size, BigArrays bigArrays, boolean clearOnResize) {
super(INT_PAGE_SIZE, bigArrays, clearOnResize, size);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
writePages(out, size, pages, Integer.BYTES);
}
@Override
public int get(long index) {
final int pageIndex = pageIdx(index);
final int indexInPage = idxInPage(index);
return (int) VH_PLATFORM_NATIVE_INT.get(pages[pageIndex], indexInPage << 2);
}
@Override
public int getAndSet(long index, int value) {
final int pageIndex = pageIdx(index);
final int indexInPage = idxInPage(index);
final byte[] page = getPageForWriting(pageIndex);
final int ret = (int) VH_PLATFORM_NATIVE_INT.get(page, indexInPage << 2);
VH_PLATFORM_NATIVE_INT.set(page, indexInPage << 2, value);
return ret;
}
@Override
public void set(long index, int value) {
final int pageIndex = pageIdx(index);
final int indexInPage = idxInPage(index);
VH_PLATFORM_NATIVE_INT.set(getPageForWriting(pageIndex), indexInPage << 2, value);
}
@Override
public int increment(long index, int inc) {
final int pageIndex = pageIdx(index);
final int indexInPage = idxInPage(index);
final byte[] page = getPageForWriting(pageIndex);
final int newVal = (int) VH_PLATFORM_NATIVE_INT.get(page, indexInPage << 2) + inc;
VH_PLATFORM_NATIVE_INT.set(page, indexInPage << 2, newVal);
return newVal;
}
@Override
public void fill(long fromIndex, long toIndex, int value) {
if (fromIndex > toIndex) {
throw new IllegalArgumentException();
}
final int fromPage = pageIdx(fromIndex);
final int toPage = pageIdx(toIndex - 1);
if (fromPage == toPage) {
fill(getPageForWriting(fromPage), idxInPage(fromIndex), idxInPage(toIndex - 1) + 1, value);
} else {
fill(getPageForWriting(fromPage), idxInPage(fromIndex), INT_PAGE_SIZE, value);
for (int i = fromPage + 1; i < toPage; ++i) {
fill(getPageForWriting(i), 0, INT_PAGE_SIZE, value);
}
fill(getPageForWriting(toPage), 0, idxInPage(toIndex - 1) + 1, value);
}
}
@Override
public void fillWith(StreamInput in) throws IOException {
readPages(in);
}
public static void fill(byte[] page, int from, int to, int value) {
if (from < to) {
VH_PLATFORM_NATIVE_INT.set(page, from << 2, value);
fillBySelfCopy(page, from << 2, to << 2, Integer.BYTES);
}
}
@Override
protected int numBytesPerElement() {
return Integer.BYTES;
}
/** Estimates the number of bytes that would be consumed by an array of the given size. */
public static long estimateRamBytes(final long size) {
return ESTIMATOR.ramBytesEstimated(size);
}
@Override
public void set(long index, byte[] buf, int offset, int len) {
set(index, buf, offset, len, 2);
}
private static final int PAGE_SHIFT = Integer.numberOfTrailingZeros(INT_PAGE_SIZE);
private static int pageIdx(long index) {
return (int) (index >>> PAGE_SHIFT);
}
private static int idxInPage(long index) {
return (int) (index & INT_PAGE_SIZE - 1);
}
}
| BigIntArray |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/matcher/AssertionMatcher.java | {
"start": 1656,
"end": 3228
} | class ____<T> extends BaseMatcher<T> {
private AssertionError firstError;
/**
* {@inheritDoc}
*/
@SuppressWarnings("unchecked")
@Override
public boolean matches(Object argument) {
T actual = (T) argument;
try {
assertion(actual);
return true;
} catch (AssertionError e) {
firstError = e;
return false;
}
}
/**
* Perform the assertions implemented in this method when the {@link AssertionMatcher} is used as an Hamcrest {@link Matcher}.
*
* If the matcher fails, the description will contain the stacktrace of the first failed assertion.
* <p>
* Example with Mockito:
* <pre><code class='java'> verify(customerRepository).save(argThat(new AssertionMatcher<Customer>() {
* @Override
* public void assertion(Customer actual) throws AssertionError {
* assertThat(actual).hasName("John")
* .hasAge(30);
* }
* })
* );</code></pre>
*
* @param actual assertion object
* @throws AssertionError if the assertion object fails assertion
*/
public abstract void assertion(T actual) throws AssertionError;
/**
* {@inheritDoc}
*/
@Override
public void describeTo(Description description) {
if (firstError != null) {
description.appendText("AssertionError with message: ");
description.appendText(firstError.getMessage());
description.appendText("%n%nStacktrace was: ".formatted());
description.appendText(Throwables.getStackTrace(firstError));
}
}
} | AssertionMatcher |
java | spring-projects__spring-security | oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/web/HttpSessionOAuth2AuthorizationRequestRepositoryTests.java | {
"start": 1384,
"end": 10784
} | class ____ {
private HttpSessionOAuth2AuthorizationRequestRepository authorizationRequestRepository = new HttpSessionOAuth2AuthorizationRequestRepository();
@Test
public void loadAuthorizationRequestWhenHttpServletRequestIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.authorizationRequestRepository.loadAuthorizationRequest(null));
}
@Test
public void loadAuthorizationRequestWhenNotSavedThenReturnNull() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter(OAuth2ParameterNames.STATE, "state-1234");
OAuth2AuthorizationRequest authorizationRequest = this.authorizationRequestRepository
.loadAuthorizationRequest(request);
assertThat(authorizationRequest).isNull();
}
@Test
public void loadAuthorizationRequestWhenSavedThenReturnAuthorizationRequest() {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
OAuth2AuthorizationRequest authorizationRequest = createAuthorizationRequest().build();
this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest, request, response);
request.addParameter(OAuth2ParameterNames.STATE, authorizationRequest.getState());
OAuth2AuthorizationRequest loadedAuthorizationRequest = this.authorizationRequestRepository
.loadAuthorizationRequest(request);
assertThat(loadedAuthorizationRequest).isEqualTo(authorizationRequest);
}
@Test
public void loadAuthorizationRequestWhenSavedAndStateParameterNullThenReturnNull() {
MockHttpServletRequest request = new MockHttpServletRequest();
OAuth2AuthorizationRequest authorizationRequest = createAuthorizationRequest().build();
this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest, request,
new MockHttpServletResponse());
assertThat(this.authorizationRequestRepository.loadAuthorizationRequest(request)).isNull();
}
@Test
public void saveAuthorizationRequestWhenHttpServletRequestIsNullThenThrowIllegalArgumentException() {
OAuth2AuthorizationRequest authorizationRequest = createAuthorizationRequest().build();
assertThatIllegalArgumentException().isThrownBy(() -> this.authorizationRequestRepository
.saveAuthorizationRequest(authorizationRequest, null, new MockHttpServletResponse()));
}
@Test
public void saveAuthorizationRequestWhenHttpServletResponseIsNullThenThrowIllegalArgumentException() {
OAuth2AuthorizationRequest authorizationRequest = createAuthorizationRequest().build();
assertThatIllegalArgumentException().isThrownBy(() -> this.authorizationRequestRepository
.saveAuthorizationRequest(authorizationRequest, new MockHttpServletRequest(), null));
}
@Test
public void saveAuthorizationRequestWhenStateNullThenThrowIllegalArgumentException() {
OAuth2AuthorizationRequest authorizationRequest = createAuthorizationRequest().state(null).build();
assertThatIllegalArgumentException()
.isThrownBy(() -> this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest,
new MockHttpServletRequest(), new MockHttpServletResponse()));
}
@Test
public void saveAuthorizationRequestWhenNotNullThenSaved() {
MockHttpServletRequest request = new MockHttpServletRequest();
OAuth2AuthorizationRequest authorizationRequest = createAuthorizationRequest().build();
this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest, request,
new MockHttpServletResponse());
request.addParameter(OAuth2ParameterNames.STATE, authorizationRequest.getState());
OAuth2AuthorizationRequest loadedAuthorizationRequest = this.authorizationRequestRepository
.loadAuthorizationRequest(request);
assertThat(loadedAuthorizationRequest).isEqualTo(authorizationRequest);
}
@Test
public void saveAuthorizationRequestWhenNoExistingSessionAndDistributedSessionThenSaved() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setSession(new MockDistributedHttpSession());
OAuth2AuthorizationRequest authorizationRequest = createAuthorizationRequest().build();
this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest, request,
new MockHttpServletResponse());
request.addParameter(OAuth2ParameterNames.STATE, authorizationRequest.getState());
OAuth2AuthorizationRequest loadedAuthorizationRequest = this.authorizationRequestRepository
.loadAuthorizationRequest(request);
assertThat(loadedAuthorizationRequest).isEqualTo(authorizationRequest);
}
@Test
public void saveAuthorizationRequestWhenExistingSessionAndDistributedSessionThenSaved() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setSession(new MockDistributedHttpSession());
OAuth2AuthorizationRequest authorizationRequest1 = createAuthorizationRequest().build();
this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest1, request,
new MockHttpServletResponse());
OAuth2AuthorizationRequest authorizationRequest2 = createAuthorizationRequest().build();
this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest2, request,
new MockHttpServletResponse());
request.addParameter(OAuth2ParameterNames.STATE, authorizationRequest2.getState());
OAuth2AuthorizationRequest loadedAuthorizationRequest = this.authorizationRequestRepository
.loadAuthorizationRequest(request);
assertThat(loadedAuthorizationRequest).isEqualTo(authorizationRequest2);
}
@Test
public void saveAuthorizationRequestWhenNullThenRemoved() {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
OAuth2AuthorizationRequest authorizationRequest = createAuthorizationRequest().build();
this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest, request, response);
request.addParameter(OAuth2ParameterNames.STATE, authorizationRequest.getState());
this.authorizationRequestRepository.saveAuthorizationRequest(null, request, response);
OAuth2AuthorizationRequest loadedAuthorizationRequest = this.authorizationRequestRepository
.loadAuthorizationRequest(request);
assertThat(loadedAuthorizationRequest).isNull();
}
@Test
public void removeAuthorizationRequestWhenHttpServletRequestIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.authorizationRequestRepository
.removeAuthorizationRequest(null, new MockHttpServletResponse()));
}
@Test
public void removeAuthorizationRequestWhenHttpServletResponseIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.authorizationRequestRepository
.removeAuthorizationRequest(new MockHttpServletRequest(), null));
}
@Test
public void removeAuthorizationRequestWhenSavedThenRemoved() {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
OAuth2AuthorizationRequest authorizationRequest = createAuthorizationRequest().build();
this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest, request, response);
request.addParameter(OAuth2ParameterNames.STATE, authorizationRequest.getState());
OAuth2AuthorizationRequest removedAuthorizationRequest = this.authorizationRequestRepository
.removeAuthorizationRequest(request, response);
OAuth2AuthorizationRequest loadedAuthorizationRequest = this.authorizationRequestRepository
.loadAuthorizationRequest(request);
assertThat(removedAuthorizationRequest).isNotNull();
assertThat(loadedAuthorizationRequest).isNull();
}
// gh-5263
@Test
public void removeAuthorizationRequestWhenSavedThenRemovedFromSession() {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
OAuth2AuthorizationRequest authorizationRequest = createAuthorizationRequest().build();
this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest, request, response);
request.addParameter(OAuth2ParameterNames.STATE, authorizationRequest.getState());
OAuth2AuthorizationRequest removedAuthorizationRequest = this.authorizationRequestRepository
.removeAuthorizationRequest(request, response);
String sessionAttributeName = HttpSessionOAuth2AuthorizationRequestRepository.class.getName()
+ ".AUTHORIZATION_REQUEST";
assertThat(removedAuthorizationRequest).isNotNull();
assertThat(request.getSession().getAttribute(sessionAttributeName)).isNull();
}
@Test
public void removeAuthorizationRequestWhenNotSavedThenNotRemoved() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter(OAuth2ParameterNames.STATE, "state-1234");
MockHttpServletResponse response = new MockHttpServletResponse();
OAuth2AuthorizationRequest removedAuthorizationRequest = this.authorizationRequestRepository
.removeAuthorizationRequest(request, response);
assertThat(removedAuthorizationRequest).isNull();
}
protected OAuth2AuthorizationRequest.Builder createAuthorizationRequest() {
return OAuth2AuthorizationRequest.authorizationCode()
.authorizationUri("https://example.com/oauth2/authorize")
.clientId("client-id-1234")
.state("state-1234");
}
static | HttpSessionOAuth2AuthorizationRequestRepositoryTests |
java | quarkusio__quarkus | integration-tests/reactive-mssql-client/src/test/java/io/quarkus/it/reactive/mssql/client/HealthCheckTest.java | {
"start": 377,
"end": 839
} | class ____ {
@Test
public void testHealthCheck() {
RestAssured.when().get("/q/health").then()
.contentType(ContentType.JSON)
.header("Content-Type", containsString("charset=UTF-8"))
.body("status", is("UP"),
"checks.status", containsInAnyOrder("UP"),
"checks.name", containsInAnyOrder("Reactive MS SQL connections health check"));
}
}
| HealthCheckTest |
java | elastic__elasticsearch | plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiNumberFilterFactory.java | {
"start": 842,
"end": 1202
} | class ____ extends AbstractTokenFilterFactory {
public KuromojiNumberFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new JapaneseNumberFilter(tokenStream);
}
}
| KuromojiNumberFilterFactory |
java | quarkusio__quarkus | extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/jaxrs/JaxrsEndPointValidationInterceptor.java | {
"start": 2843,
"end": 5018
} | class ____.
*/
private List<MediaType> doGetProduces(Method originalMethod) {
Class<?> currentClass = originalMethod.getDeclaringClass();
List<Class<?>> interfaces = new ArrayList<>();
do {
List<MediaType> classMethodProducedMediaTypes = getProducesFromMethod(currentClass, originalMethod);
if (!classMethodProducedMediaTypes.isEmpty()) {
return classMethodProducedMediaTypes;
}
for (Class<?> interfaze : currentClass.getInterfaces()) {
interfaces.add(interfaze);
}
currentClass = currentClass.getSuperclass();
} while (!Object.class.equals(currentClass));
for (Class<?> interfaze : interfaces) {
List<MediaType> interfaceMethodProducedMediaTypes = getProducesFromMethod(interfaze, originalMethod);
if (!interfaceMethodProducedMediaTypes.isEmpty()) {
return interfaceMethodProducedMediaTypes;
}
}
List<MediaType> classProducedMediaTypes = getProduces(originalMethod.getDeclaringClass().getAnnotation(Produces.class));
if (!classProducedMediaTypes.isEmpty()) {
return classProducedMediaTypes;
}
for (Class<?> interfaze : interfaces) {
List<MediaType> interfaceProducedMediaTypes = getProduces(interfaze.getAnnotation(Produces.class));
if (!interfaceProducedMediaTypes.isEmpty()) {
return interfaceProducedMediaTypes;
}
}
return Collections.emptyList();
}
private List<MediaType> getProducesFromMethod(Class<?> currentClass, Method originalMethod) {
if (currentClass.equals(originalMethod.getDeclaringClass())) {
return getProduces(originalMethod.getAnnotation(Produces.class));
}
try {
return getProduces(currentClass
.getMethod(originalMethod.getName(), originalMethod.getParameterTypes()).getAnnotation(Produces.class));
} catch (NoSuchMethodException | SecurityException e) {
// we don't have a visible method around, let's ignore this | annotations |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/EclipseLinkNamespaceUserRepositoryTests.java | {
"start": 1361,
"end": 4807
} | class ____ extends NamespaceUserRepositoryTests {
/**
* Ignored until https://bugs.eclipse.org/bugs/show_bug.cgi?id=422450 is resolved.
*/
@Override
void sortByAssociationPropertyShouldUseLeftOuterJoin() {}
/**
* Ignored until https://bugs.eclipse.org/bugs/show_bug.cgi?id=422450 is resolved.
*/
@Override
void sortByAssociationPropertyInPageableShouldUseLeftOuterJoin() {}
/**
* Ignored until https://bugs.eclipse.org/bugs/show_bug.cgi?id=349477 is resolved.
*/
@Override
void findByElementCollectionAttribute() {}
/**
* This test will fail once https://bugs.eclipse.org/bugs/show_bug.cgi?id=521915 is fixed.
*/
@Override
@Test // DATAJPA-1172
void queryProvidesCorrectNumberOfParametersForNativeQuery() {
Query query = em.createNativeQuery("select 1 from User where firstname=? and lastname=?");
assertThat(query.getParameters()).describedAs(
"Due to a bug eclipse has size 0; If this is no longer the case the special code path triggered in NamedOrIndexedQueryParameterSetter.registerExcessParameters can be removed")
.isEmpty();
}
/**
* Ignored until https://bugs.eclipse.org/bugs/show_bug.cgi?id=525319 is fixed.
*/
@Disabled
@Override
@Test // DATAJPA-980
void supportsInterfaceProjectionsWithNativeQueries() {}
/**
* Ignored until https://bugs.eclipse.org/bugs/show_bug.cgi?id=525319 is fixed.
*/
@Disabled
@Override
@Test // DATAJPA-1248
void supportsProjectionsWithNativeQueriesAndCamelCaseProperty() {}
/**
* Ignored until https://bugs.eclipse.org/bugs/show_bug.cgi?id=525319 is fixed.
*/
@Disabled
@Override
@Test // DATAJPA-1301
void returnsNullValueInMap() {}
/**
* TODO: Remove, once https://bugs.eclipse.org/bugs/show_bug.cgi?id=289141 is fixed.
*/
@Disabled
@Override
@Test
void bindsNativeQueryResultsToProjectionByName() {}
/**
* Ignores the test. Reconsider once https://bugs.eclipse.org/bugs/show_bug.cgi?id=533240 is fixed.
*/
@Override
void findByEmptyArrayOfIntegers() {}
/**
* Ignores the test. Reconsider once https://bugs.eclipse.org/bugs/show_bug.cgi?id=533240 is fixed.
*/
@Override
void findByAgeWithEmptyArrayOfIntegersOrFirstName() {}
/**
* Ignores the test. Reconsider once https://bugs.eclipse.org/bugs/show_bug.cgi?id=533240 is fixed.
*/
@Override
void findByEmptyCollectionOfIntegers() {}
/**
* Ignores the test. Reconsider once https://bugs.eclipse.org/bugs/show_bug.cgi?id=533240 is fixed.
*/
@Override
void findByEmptyCollectionOfStrings() {}
/**
* Ignores the test for EclipseLink.
*/
@Override
@Test
@Disabled
void savingUserThrowsAnException() {}
/**
* Ignored until https://bugs.eclipse.org/bugs/show_bug.cgi?id=349477 is resolved.
*/
@Disabled
@Override
@Test // DATAJPA-1303
void findByElementCollectionInAttributeIgnoreCase() {}
/**
* Ignored until https://bugs.eclipse.org/bugs/show_bug.cgi?id=349477 is resolved.
*/
@Disabled
@Override
@Test // DATAJPA-1303
void findByElementCollectionNotInAttributeIgnoreCase() {}
/**
* Ignored until https://bugs.eclipse.org/bugs/show_bug.cgi?id=349477 is resolved.
*/
@Disabled
@Override
@Test // DATAJPA-1303
void findByElementVarargInAttributeIgnoreCase() {}
/**
* Ignored until https://bugs.eclipse.org/bugs/show_bug.cgi?id=349477 is resolved.
*/
@Disabled
@Override
@Test // DATAJPA-1303
void findByElementCollectionInAttributeIgnoreCaseWithNulls() {}
}
| EclipseLinkNamespaceUserRepositoryTests |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/InvalidOffsetException.java | {
"start": 1198,
"end": 1393
} | class ____ extends KafkaException {
public InvalidOffsetException(String message) {
super(message);
}
public abstract Set<TopicPartition> partitions();
}
| InvalidOffsetException |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/ManyToManyAbstractTablePerClassTest.java | {
"start": 3295,
"end": 3903
} | class ____ {
@Id
Integer id;
@ManyToMany
@JoinTable(name = "children_set")
Set<TablePerClassBase> childrenSet = new HashSet<>();
@ManyToMany
@JoinTable(name = "children_list")
@OrderColumn(name = "listIndex")
List<TablePerClassBase> childrenList = new ArrayList<>();
@ManyToMany
@JoinTable(name = "children_map")
Map<Integer, TablePerClassBase> childrenMap = new HashMap<>();
public TablePerClassBase() {
}
public TablePerClassBase(Integer id) {
this.id = id;
}
}
@Entity(name = "TablePerClassSub1")
@Table(name = "table_per_class_sub_1")
public static | TablePerClassBase |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/artifact/resolver/filter/AbstractScopeArtifactFilter.java | {
"start": 995,
"end": 2795
} | class ____ implements ArtifactFilter {
private boolean compileScope;
private boolean runtimeScope;
private boolean testScope;
private boolean providedScope;
private boolean systemScope;
void addScopeInternal(String scope) {
if (Artifact.SCOPE_COMPILE.equals(scope)) {
systemScope = true;
providedScope = true;
compileScope = true;
} else if (Artifact.SCOPE_RUNTIME.equals(scope)) {
compileScope = true;
runtimeScope = true;
} else if (Artifact.SCOPE_COMPILE_PLUS_RUNTIME.equals(scope)) {
systemScope = true;
providedScope = true;
compileScope = true;
runtimeScope = true;
} else if (Artifact.SCOPE_RUNTIME_PLUS_SYSTEM.equals(scope)) {
systemScope = true;
compileScope = true;
runtimeScope = true;
} else if (Artifact.SCOPE_TEST.equals(scope)) {
systemScope = true;
providedScope = true;
compileScope = true;
runtimeScope = true;
testScope = true;
}
}
@Override
public boolean include(Artifact artifact) {
if (Artifact.SCOPE_COMPILE.equals(artifact.getScope())) {
return compileScope;
} else if (Artifact.SCOPE_RUNTIME.equals(artifact.getScope())) {
return runtimeScope;
} else if (Artifact.SCOPE_TEST.equals(artifact.getScope())) {
return testScope;
} else if (Artifact.SCOPE_PROVIDED.equals(artifact.getScope())) {
return providedScope;
} else if (Artifact.SCOPE_SYSTEM.equals(artifact.getScope())) {
return systemScope;
} else {
return true;
}
}
}
| AbstractScopeArtifactFilter |
java | quarkusio__quarkus | extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/mpmetrics/MetricDotNames.java | {
"start": 2474,
"end": 4344
} | class ____ a REST endpoint or JAX-RS provider
static final DotName JAXRS_PATH = DotName.createSimple("jakarta.ws.rs.Path");
static final DotName REST_CONTROLLER = DotName
.createSimple("org.springframework.web.bind.annotation.RestController");
// Interceptors and producers
static final DotName CONCURRENT_GAUGE_INTERCEPTOR = DotName
.createSimple("io.quarkus.micrometer.runtime.binder.mpmetrics.ConcurrentGaugeInterceptor");
static final DotName COUNTED_INTERCEPTOR = DotName
.createSimple("io.quarkus.micrometer.runtime.binder.mpmetrics.CountedInterceptor");
static final DotName INJECTED_METRIC_PRODUCER = DotName
.createSimple("io.quarkus.micrometer.runtime.binder.mpmetrics.InjectedMetricProducer");
static final DotName TIMED_INTERCEPTOR = DotName
.createSimple("io.quarkus.micrometer.runtime.binder.mpmetrics.TimedInterceptor");
static final DotName MP_METRICS_REGISTRY_PRODUCER = DotName
.createSimple("io.quarkus.micrometer.runtime.binder.mpmetrics.MpMetricsRegistryProducer");
/**
* @param annotations
* @return true if the map of all annotations contains any MP Metrics
* annotations
*/
static boolean containsMetricAnnotation(Map<DotName, List<AnnotationInstance>> annotations) {
for (DotName name : individualMetrics) {
if (annotations.containsKey(name)) {
return true;
}
}
return false;
}
/**
* @return true for known metrics subsystem classes that should not
* be inspected for lifecycle constraints, etc.
*/
static boolean knownClass(ClassInfo classInfo) {
return classInfo.name().toString().startsWith(MICROMETER_EXTENSION_PKG);
}
/**
* @param classInfo
* @return true if the specified | is |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/issues/AdviceWithTryCatchFinallyTest.java | {
"start": 1033,
"end": 2023
} | class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testAdviceTryCatchFinally() throws Exception {
context.addRoutes(createRouteBuilder());
adviceWith(context, "my-route", a -> a.weaveById("replace-me")
.replace().to("mock:replaced"));
context.start();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").routeId("my-route")
.doTry()
.log("try")
.to("mock:replace-me").id("replace-me")
.doCatch(Exception.class)
.log("catch")
.doFinally()
.log("finally")
.end();
}
};
}
}
| AdviceWithTryCatchFinallyTest |
java | spring-projects__spring-boot | module/spring-boot-zipkin/src/main/java/org/springframework/boot/zipkin/autoconfigure/ZipkinProperties.java | {
"start": 976,
"end": 2088
} | class ____ {
/**
* URL to the Zipkin API.
*/
private String endpoint = "http://localhost:9411/api/v2/spans";
/**
* How to encode the POST body to the Zipkin API.
*/
private Encoding encoding = Encoding.JSON;
/**
* Connection timeout for requests to Zipkin.
*/
private Duration connectTimeout = Duration.ofSeconds(1);
/**
* Read timeout for requests to Zipkin.
*/
private Duration readTimeout = Duration.ofSeconds(10);
public String getEndpoint() {
return this.endpoint;
}
public void setEndpoint(String endpoint) {
this.endpoint = endpoint;
}
public Encoding getEncoding() {
return this.encoding;
}
public void setEncoding(Encoding encoding) {
this.encoding = encoding;
}
public Duration getConnectTimeout() {
return this.connectTimeout;
}
public void setConnectTimeout(Duration connectTimeout) {
this.connectTimeout = connectTimeout;
}
public Duration getReadTimeout() {
return this.readTimeout;
}
public void setReadTimeout(Duration readTimeout) {
this.readTimeout = readTimeout;
}
/**
* Zipkin message encoding.
*/
public | ZipkinProperties |
java | google__truth | extensions/proto/src/test/java/com/google/common/truth/extensions/proto/FieldScopesTest.java | {
"start": 1650,
"end": 52535
} | class ____ extends ProtoSubjectTestBase {
@Parameters(name = "{0}")
public static Collection<Object[]> parameters() {
return ProtoSubjectTestBase.parameters();
}
// Set up for the ignoringTopLevelField tests.
// ignoringFieldMessage and ignoringFieldDiffMessage are simple messages with two fields set. They
// are the same for the "good" field, and different for the "bad" field. The *FieldNumber and
// *FieldDescriptor members point to these fields.
private final Message ignoringFieldMessage;
private final Message ignoringFieldDiffMessage;
private final int goodFieldNumber;
private final int badFieldNumber;
private final FieldDescriptor goodFieldDescriptor;
private final FieldDescriptor badFieldDescriptor;
public FieldScopesTest(TestType testType) {
super(testType);
ignoringFieldMessage = parse("o_int: 3 r_string: \"foo\"");
ignoringFieldDiffMessage = parse("o_int: 3 r_string: \"bar\"");
goodFieldNumber = getFieldNumber("o_int");
badFieldNumber = getFieldNumber("r_string");
goodFieldDescriptor = getFieldDescriptor("o_int");
badFieldDescriptor = getFieldDescriptor("r_string");
}
@Test
public void unequalMessages() {
Message message = parse("o_int: 3 r_string: \"foo\"");
Message diffMessage = parse("o_int: 5 r_string: \"bar\"");
expectThat(diffMessage).isNotEqualTo(message);
}
@Test
public void fieldScopes_all() {
Message message = parse("o_int: 3 r_string: \"foo\"");
Message diffMessage = parse("o_int: 5 r_string: \"bar\"");
expectThat(diffMessage).withPartialScope(FieldScopes.all()).isNotEqualTo(message);
expectThat(diffMessage).ignoringFieldScope(FieldScopes.all()).isEqualTo(message);
expectFailureWhenTesting()
.that(diffMessage)
.ignoringFieldScope(FieldScopes.all())
.isNotEqualTo(message);
expectIsNotEqualToFailed();
expectThatFailure().hasMessageThat().contains("ignored: o_int");
expectThatFailure().hasMessageThat().contains("ignored: r_string");
}
@Test
public void fieldScopes_none() {
Message message = parse("o_int: 3 r_string: \"foo\"");
Message diffMessage = parse("o_int: 5 r_string: \"bar\"");
expectThat(diffMessage).ignoringFieldScope(FieldScopes.none()).isNotEqualTo(message);
expectThat(diffMessage).withPartialScope(FieldScopes.none()).isEqualTo(message);
expectFailureWhenTesting()
.that(diffMessage)
.withPartialScope(FieldScopes.none())
.isNotEqualTo(message);
expectIsNotEqualToFailed();
expectThatFailure().hasMessageThat().contains("ignored: o_int");
expectThatFailure().hasMessageThat().contains("ignored: r_string");
}
@Test
public void fieldScopes_none_withAnyField() {
String typeUrl =
isProto3()
? "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage3"
: "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage2";
Message message = parse("o_int: 3 o_any_message { [" + typeUrl + "]: { r_string: \"foo\" } }");
Message diffMessage =
parse("o_int: 5 o_any_message { [" + typeUrl + "]: { r_string: \"bar\" } }");
expectThat(diffMessage).ignoringFieldScope(FieldScopes.none()).isNotEqualTo(message);
expectThat(diffMessage).withPartialScope(FieldScopes.none()).isEqualTo(message);
expectFailureWhenTesting()
.that(diffMessage)
.withPartialScope(FieldScopes.none())
.isNotEqualTo(message);
expectIsNotEqualToFailed();
expectThatFailure().hasMessageThat().contains("ignored: o_int");
expectThatFailure().hasMessageThat().contains("ignored: o_any_message");
}
@Test
public void ignoringTopLevelField_ignoringField() {
expectThat(ignoringFieldDiffMessage)
.ignoringFields(goodFieldNumber)
.isNotEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.ignoringFields(badFieldNumber)
.isEqualTo(ignoringFieldMessage);
expectFailureWhenTesting()
.that(ignoringFieldDiffMessage)
.ignoringFields(goodFieldNumber)
.isEqualTo(ignoringFieldMessage);
expectIsEqualToFailed();
expectThatFailure().hasMessageThat().contains("modified: r_string[0]: \"foo\" -> \"bar\"");
expectFailureWhenTesting()
.that(ignoringFieldDiffMessage)
.ignoringFields(badFieldNumber)
.isNotEqualTo(ignoringFieldMessage);
expectIsNotEqualToFailed();
expectThatFailure().hasMessageThat().contains("ignored: r_string");
}
@Test
public void ignoringTopLevelAnyField_ignoringField() {
String typeUrl =
isProto3()
? "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage3"
: "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage2";
Message message = parse("o_int: 1 o_any_message { [" + typeUrl + "]: { r_string: \"foo\" } }");
Message diffMessage = parse("o_int: 1");
int goodFieldNumber = getFieldNumber("o_int");
int badFieldNumber = getFieldNumber("o_any_message");
expectThat(diffMessage).ignoringFields(goodFieldNumber).isNotEqualTo(message);
expectThat(diffMessage).ignoringFields(badFieldNumber).isEqualTo(diffMessage);
expectFailureWhenTesting().that(diffMessage).ignoringFields(goodFieldNumber).isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure().hasMessageThat().contains("deleted: o_any_message");
expectFailureWhenTesting()
.that(diffMessage)
.ignoringFields(badFieldNumber)
.isNotEqualTo(message);
expectIsNotEqualToFailed();
expectThatFailure().hasMessageThat().contains("ignored: o_any_message");
}
@Test
public void ignoringTopLevelField_fieldScopes_ignoringFields() {
expectThat(ignoringFieldDiffMessage)
.withPartialScope(FieldScopes.ignoringFields(goodFieldNumber))
.isNotEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.ignoringFieldScope(FieldScopes.ignoringFields(goodFieldNumber))
.isEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.withPartialScope(FieldScopes.ignoringFields(badFieldNumber))
.isEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.ignoringFieldScope(FieldScopes.ignoringFields(badFieldNumber))
.isNotEqualTo(ignoringFieldMessage);
}
@Test
public void ignoringTopLevelField_fieldScopes_allowingFields() {
expectThat(ignoringFieldDiffMessage)
.withPartialScope(FieldScopes.allowingFields(goodFieldNumber))
.isEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.ignoringFieldScope(FieldScopes.allowingFields(goodFieldNumber))
.isNotEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.withPartialScope(FieldScopes.allowingFields(badFieldNumber))
.isNotEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.ignoringFieldScope(FieldScopes.allowingFields(badFieldNumber))
.isEqualTo(ignoringFieldMessage);
}
@Test
public void ignoringTopLevelAnyField_fieldScopes_allowingFields() {
String typeUrl =
isProto3()
? "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage3"
: "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage2";
Message message =
parse("o_int: 1 o_any_message { [" + typeUrl + "]: { o_int: 2 r_string: \"foo\" } }");
Message diffMessage = parse("o_int: 1");
int goodFieldNumber = getFieldNumber("o_int");
expectThat(message)
.withPartialScope(FieldScopes.allowingFields(goodFieldNumber))
.isEqualTo(diffMessage);
}
@Test
public void ignoringTopLevelField_fieldScopes_allowingFieldDescriptors() {
expectThat(ignoringFieldDiffMessage)
.withPartialScope(FieldScopes.allowingFieldDescriptors(goodFieldDescriptor))
.isEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.ignoringFieldScope(FieldScopes.allowingFieldDescriptors(goodFieldDescriptor))
.isNotEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.withPartialScope(FieldScopes.allowingFieldDescriptors(badFieldDescriptor))
.isNotEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.ignoringFieldScope(FieldScopes.allowingFieldDescriptors(badFieldDescriptor))
.isEqualTo(ignoringFieldMessage);
}
@Test
public void ignoringTopLevelField_fieldScopes_ignoringFieldDescriptors() {
expectThat(ignoringFieldDiffMessage)
.withPartialScope(FieldScopes.ignoringFieldDescriptors(goodFieldDescriptor))
.isNotEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.ignoringFieldScope(FieldScopes.ignoringFieldDescriptors(goodFieldDescriptor))
.isEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.withPartialScope(FieldScopes.ignoringFieldDescriptors(badFieldDescriptor))
.isEqualTo(ignoringFieldMessage);
expectThat(ignoringFieldDiffMessage)
.ignoringFieldScope(FieldScopes.ignoringFieldDescriptors(badFieldDescriptor))
.isNotEqualTo(ignoringFieldMessage);
}
@Test
public void emptySubMessage() {
Message message = parse("o_int: 1 o_sub_test_message: { }");
Message eqMessage = parse("o_int: 2 o_sub_test_message: { }");
Message diffMessage = parse("o_int: 3");
// Different logic gets exercised when we add an 'ignore' clause.
// Let's ensure o_sub_test_message is compared properly in all cases.
int fieldNumber = getFieldNumber("o_int");
expectThat(eqMessage).isNotEqualTo(message);
expectThat(eqMessage).ignoringFieldAbsence().isNotEqualTo(message);
expectThat(eqMessage).ignoringFields(fieldNumber).isEqualTo(message);
expectThat(eqMessage).ignoringFields(fieldNumber).ignoringFieldAbsence().isEqualTo(message);
expectThat(diffMessage).isNotEqualTo(message);
expectThat(diffMessage).ignoringFieldAbsence().isNotEqualTo(message);
expectThat(diffMessage).ignoringFields(fieldNumber).isNotEqualTo(message);
expectThat(diffMessage).ignoringFields(fieldNumber).ignoringFieldAbsence().isEqualTo(message);
}
@Test
public void ignoreSubMessageField() {
Message message = parse("o_int: 1 o_sub_test_message: { o_int: 2 }");
Message diffMessage = parse("o_int: 2 o_sub_test_message: { o_int: 2 }");
Message eqMessage1 = parse("o_int: 1");
Message eqMessage2 = parse("o_int: 1 o_sub_test_message: {}");
Message eqMessage3 = parse("o_int: 1 o_sub_test_message: { o_int: 3 r_string: \"x\" }");
int fieldNumber = getFieldNumber("o_sub_test_message");
expectThat(diffMessage).ignoringFields(fieldNumber).isNotEqualTo(message);
expectThat(eqMessage1).ignoringFields(fieldNumber).isEqualTo(message);
expectThat(eqMessage2).ignoringFields(fieldNumber).isEqualTo(message);
expectThat(eqMessage3).ignoringFields(fieldNumber).isEqualTo(message);
expectFailureWhenTesting().that(diffMessage).ignoringFields(fieldNumber).isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure().hasMessageThat().contains("modified: o_int: 1 -> 2");
expectFailureWhenTesting().that(eqMessage3).ignoringFields(fieldNumber).isNotEqualTo(message);
expectIsNotEqualToFailed();
expectThatFailure().hasMessageThat().contains("ignored: o_sub_test_message");
}
@Test
public void ignoreFieldOfSubMessage() {
// Ignore o_int of sub message fields.
Message message = parse("o_int: 1 o_sub_test_message: { o_int: 2 r_string: \"foo\" }");
Message diffMessage1 = parse("o_int: 2 o_sub_test_message: { o_int: 2 r_string: \"foo\" }");
Message diffMessage2 = parse("o_int: 1 o_sub_test_message: { o_int: 2 r_string: \"bar\" }");
Message eqMessage = parse("o_int: 1 o_sub_test_message: { o_int: 3 r_string: \"foo\" }");
FieldDescriptor fieldDescriptor =
getFieldDescriptor("o_sub_test_message").getMessageType().findFieldByName("o_int");
FieldScope partialScope = FieldScopes.ignoringFieldDescriptors(fieldDescriptor);
expectThat(diffMessage1).withPartialScope(partialScope).isNotEqualTo(message);
expectThat(diffMessage2).withPartialScope(partialScope).isNotEqualTo(message);
expectThat(eqMessage).withPartialScope(partialScope).isEqualTo(message);
expectFailureWhenTesting().that(diffMessage1).withPartialScope(partialScope).isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure().hasMessageThat().contains("modified: o_int: 1 -> 2");
expectFailureWhenTesting().that(diffMessage2).withPartialScope(partialScope).isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure()
.hasMessageThat()
.contains("modified: o_sub_test_message.r_string[0]: \"foo\" -> \"bar\"");
}
@Test
public void ignoringFieldOfAnyMessage() throws Exception {
String typeUrl =
isProto3()
? "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage3"
: "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage2";
Message message =
parse("o_int: 1 o_any_message { [" + typeUrl + "]: { o_int: 2 r_string: \"foo\" } }");
Message diffMessage1 =
parse("o_int: 2 o_any_message { [" + typeUrl + "]: { o_int: 2 r_string: \"foo\" } }");
Message diffMessage2 =
parse("o_int: 1 o_any_message { [" + typeUrl + "]: { o_int: 2 r_string: \"bar\" } }");
Message eqMessage =
parse("o_int: 1 o_any_message { [" + typeUrl + "]: { o_int: 3 r_string: \"foo\" } }");
FieldDescriptor fieldDescriptor =
getTypeRegistry().getDescriptorForTypeUrl(typeUrl).findFieldByName("o_int");
FieldScope partialScope = FieldScopes.ignoringFieldDescriptors(fieldDescriptor);
expectThat(diffMessage1)
.unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry())
.withPartialScope(partialScope)
.isNotEqualTo(message);
expectThat(diffMessage2)
.unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry())
.withPartialScope(partialScope)
.isNotEqualTo(message);
expectThat(eqMessage)
.unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry())
.withPartialScope(partialScope)
.isEqualTo(message);
expectFailureWhenTesting()
.that(diffMessage1)
.unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry())
.withPartialScope(partialScope)
.isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure().hasMessageThat().contains("modified: o_int: 1 -> 2");
expectFailureWhenTesting()
.that(diffMessage2)
.unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry())
.withPartialScope(partialScope)
.isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure()
.hasMessageThat()
.contains("modified: o_any_message.value.r_string[0]: \"foo\" -> \"bar\"");
}
@Test
public void anyMessageComparingExpectedFieldsOnly() {
String typeUrl =
isProto3()
? "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage3"
: "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage2";
Message message = parse("o_any_message { [" + typeUrl + "]: { o_int: 2 } }");
Message eqMessage =
parse("o_any_message { [" + typeUrl + "]: { o_int: 2 r_string: \"foo\" } }");
Message diffMessage =
parse("o_any_message { [" + typeUrl + "]: { o_int: 3 r_string: \"bar\" } }");
expectThat(eqMessage)
.unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry())
.comparingExpectedFieldsOnly()
.isEqualTo(message);
expectThat(diffMessage)
.unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry())
.comparingExpectedFieldsOnly()
.isNotEqualTo(message);
}
@Test
public void invalidAnyMessageComparingExpectedFieldsOnly() {
Message message = parse("o_any_message { type_url: 'invalid-type' value: 'abc123' }");
Message eqMessage = parse("o_any_message { type_url: 'invalid-type' value: 'abc123' }");
Message diffMessage = parse("o_any_message { type_url: 'invalid-type' value: 'def456' }");
expectThat(eqMessage)
.unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry())
.comparingExpectedFieldsOnly()
.isEqualTo(message);
expectThat(diffMessage)
.unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry())
.comparingExpectedFieldsOnly()
.isNotEqualTo(message);
}
@Test
public void differentAnyMessagesComparingExpectedFieldsOnly() {
// 'o_int' and 'o_float' have the same field numbers in both messages. However, to compare
// accurately, we incorporate the unpacked Descriptor type into the FieldNumberTree as well to
// disambiguate.
String typeUrl1 =
isProto3()
? "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage3"
: "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage2";
String typeUrl2 =
isProto3()
? "type.googleapis.com/com.google.common.truth.extensions.proto.SubSubTestMessage3"
: "type.googleapis.com/com.google.common.truth.extensions.proto.SubSubTestMessage2";
Message message =
parse(
"r_any_message { ["
+ typeUrl1
+ "]: { o_int: 2 } } r_any_message { ["
+ typeUrl2
+ "]: { o_float: 3.1 } }");
Message eqMessage =
parse(
"r_any_message { ["
+ typeUrl1
+ "]: { o_int: 2 o_float: 1.9 } } r_any_message { ["
+ typeUrl2
+ "]: { o_int: 5 o_float: 3.1 } }");
Message diffMessage =
parse(
"r_any_message { ["
+ typeUrl1
+ "]: { o_int: 5 o_float: 3.1 } } r_any_message { ["
+ typeUrl2
+ "]: { o_int: 2 o_float: 1.9 } }");
expectThat(eqMessage)
.unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry())
.ignoringRepeatedFieldOrder()
.comparingExpectedFieldsOnly()
.isEqualTo(message);
expectThat(diffMessage)
.unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry())
.ignoringRepeatedFieldOrder()
.comparingExpectedFieldsOnly()
.isNotEqualTo(message);
}
@Test
public void ignoringAllButOneFieldOfSubMessage() {
// Consider all of TestMessage, but none of o_sub_test_message, except
// o_sub_test_message.o_int.
Message message =
parse(
"o_int: 3 o_sub_test_message: { o_int: 4 r_string: \"foo\" } "
+ "r_sub_test_message: { o_int: 5 r_string: \"bar\" }");
// All of these differ in a critical field.
Message diffMessage1 =
parse(
"o_int: 999999 o_sub_test_message: { o_int: 4 r_string: \"foo\" } "
+ "r_sub_test_message: { o_int: 5 r_string: \"bar\" }");
Message diffMessage2 =
parse(
"o_int: 3 o_sub_test_message: { o_int: 999999 r_string: \"foo\" } "
+ "r_sub_test_message: { o_int: 5 r_string: \"bar\" }");
Message diffMessage3 =
parse(
"o_int: 3 o_sub_test_message: { o_int: 4 r_string: \"foo\" } "
+ "r_sub_test_message: { o_int: 999999 r_string: \"bar\" }");
Message diffMessage4 =
parse(
"o_int: 3 o_sub_test_message: { o_int: 4 r_string: \"foo\" } "
+ "r_sub_test_message: { o_int: 5 r_string: \"999999\" }");
// This one only differs in o_sub_test_message.r_string, which is ignored.
Message eqMessage =
parse(
"o_int: 3 o_sub_test_message: { o_int: 4 r_string: \"999999\" } "
+ "r_sub_test_message: { o_int: 5 r_string: \"bar\" }");
FieldScope fieldScope =
FieldScopes.ignoringFields(getFieldNumber("o_sub_test_message"))
.allowingFieldDescriptors(
getFieldDescriptor("o_sub_test_message").getMessageType().findFieldByName("o_int"));
expectThat(diffMessage1).withPartialScope(fieldScope).isNotEqualTo(message);
expectThat(diffMessage2).withPartialScope(fieldScope).isNotEqualTo(message);
expectThat(diffMessage3).withPartialScope(fieldScope).isNotEqualTo(message);
expectThat(diffMessage4).withPartialScope(fieldScope).isNotEqualTo(message);
expectThat(eqMessage).withPartialScope(fieldScope).isEqualTo(message);
expectFailureWhenTesting().that(diffMessage4).withPartialScope(fieldScope).isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure()
.hasMessageThat()
.contains("modified: r_sub_test_message[0].r_string[0]: \"bar\" -> \"999999\"");
expectFailureWhenTesting().that(eqMessage).withPartialScope(fieldScope).isNotEqualTo(message);
expectIsNotEqualToFailed();
expectThatFailure().hasMessageThat().contains("ignored: o_sub_test_message.r_string");
}
@Test
public void fromSetFields() {
Message scopeMessage =
parse(
"o_int: 1 r_string: \"x\" o_test_message: { o_int: 1 } "
+ "r_test_message: { r_string: \"x\" } r_test_message: { o_int: 1 } "
+ "o_sub_test_message: { o_test_message: { o_int: 1 } }");
// 1 = compared, [2, 3] = ignored, 4 = compared and fails
Message message =
parse(
"o_int: 1 r_string: \"1\" o_test_message: {o_int: 1 r_string: \"2\" } "
+ "r_test_message: { o_int: 1 r_string: \"1\" } "
+ "r_test_message: { o_int: 1 r_string: \"1\" } "
+ "o_sub_test_message: { o_int: 2 o_test_message: { o_int: 1 r_string: \"2\" } }");
Message diffMessage =
parse(
"o_int: 4 r_string: \"4\" o_test_message: {o_int: 4 r_string: \"3\" } "
+ "r_test_message: { o_int: 4 r_string: \"4\" } "
+ "r_test_message: { o_int: 4 r_string: \"4\" }"
+ "o_sub_test_message: { r_string: \"3\" o_int: 3 "
+ "o_test_message: { o_int: 4 r_string: \"3\" } }");
Message eqMessage =
parse(
"o_int: 1 r_string: \"1\" o_test_message: {o_int: 1 r_string: \"3\" } "
+ "r_test_message: { o_int: 1 r_string: \"1\" } "
+ "r_test_message: { o_int: 1 r_string: \"1\" }"
+ "o_sub_test_message: { o_int: 3 o_test_message: { o_int: 1 r_string: \"3\" } }");
expectThat(diffMessage).isNotEqualTo(message);
expectThat(eqMessage).isNotEqualTo(message);
expectThat(diffMessage)
.withPartialScope(FieldScopes.fromSetFields(scopeMessage))
.isNotEqualTo(message);
expectThat(eqMessage)
.withPartialScope(FieldScopes.fromSetFields(scopeMessage))
.isEqualTo(message);
expectFailureWhenTesting().that(diffMessage).isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure().hasMessageThat().contains("1 -> 4");
expectThatFailure().hasMessageThat().contains("\"1\" -> \"4\"");
expectThatFailure().hasMessageThat().contains("2 -> 3");
expectThatFailure().hasMessageThat().contains("\"2\" -> \"3\"");
expectFailureWhenTesting()
.that(diffMessage)
.withPartialScope(FieldScopes.fromSetFields(scopeMessage))
.isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure().hasMessageThat().contains("1 -> 4");
expectThatFailure().hasMessageThat().contains("\"1\" -> \"4\"");
expectThatFailure().hasMessageThat().doesNotContain("2 -> 3");
expectThatFailure().hasMessageThat().doesNotContain("\"2\" -> \"3\"");
expectFailureWhenTesting()
.that(eqMessage)
.withPartialScope(FieldScopes.fromSetFields(scopeMessage))
.isNotEqualTo(message);
expectIsNotEqualToFailed();
expectThatFailure().hasMessageThat().contains("ignored: o_test_message.r_string");
expectThatFailure().hasMessageThat().contains("ignored: o_sub_test_message.o_int");
expectThatFailure()
.hasMessageThat()
.contains("ignored: o_sub_test_message.o_test_message.r_string");
}
@Test
public void fromSetFields_comparingExpectedFieldsOnly() throws InvalidProtocolBufferException {
Message message1 = parse("o_int: 1 o_double: 333 oneof_message1: { o_int: 3 o_double: 333 }");
Message message2 =
parse("o_int: 333 o_double: 1.2 oneof_message2: { o_int: 333 o_double: 3.14 }");
Message diffMessage1 = parse("o_int: 1 oneof_message1: { o_int: 4 }");
Message diffMessage2 = parse("o_double: 1.2 oneof_message2: { o_double: 4.14 }");
Message eqMessage1 = parse("o_int: 1 oneof_message1: { o_int: 3 }");
Message eqMessage2 = parse("o_double: 1.2 oneof_message2: { o_double: 3.14 }");
expectThat(message1).comparingExpectedFieldsOnly().isEqualTo(eqMessage1);
expectThat(message2).comparingExpectedFieldsOnly().isEqualTo(eqMessage2);
expectFailureWhenTesting().that(message1).comparingExpectedFieldsOnly().isEqualTo(diffMessage1);
expectFailureWhenTesting().that(message2).comparingExpectedFieldsOnly().isEqualTo(diffMessage2);
expectThat(listOf(message1, message2))
.comparingExpectedFieldsOnly()
.containsExactly(eqMessage1, eqMessage2);
expectFailureWhenTesting()
.that(listOf(message1, message2))
.comparingExpectedFieldsOnly()
.containsExactly(diffMessage1, eqMessage2);
expectFailureWhenTesting()
.that(listOf(message1, message2))
.comparingExpectedFieldsOnly()
.containsExactly(eqMessage1, diffMessage2);
}
@Test
public void fromSetFields_unknownFields() throws InvalidProtocolBufferException {
// Make sure that merging of repeated fields, separation by tag number, and separation by
// unknown field type all work.
Message scopeMessage =
fromUnknownFields(
UnknownFieldSet.newBuilder()
.addField(333, Field.newBuilder().addFixed32(1).addFixed64(1).build())
.addField(
444,
Field.newBuilder()
.addVarint(1)
.addLengthDelimited(ByteString.copyFrom("1", UTF_8))
.addGroup(
UnknownFieldSet.newBuilder()
.addField(1, Field.newBuilder().addFixed32(1).build())
.build())
.addGroup(
UnknownFieldSet.newBuilder()
.addField(2, Field.newBuilder().addFixed64(1).build())
.build())
.build())
.build());
// 1 = compared, [2, 3] = ignored, 4 = compared and fails
Message message =
fromUnknownFields(
UnknownFieldSet.newBuilder()
.addField(222, Field.newBuilder().addFixed32(2).addFixed64(2).build())
.addField(
333,
Field.newBuilder()
.addFixed32(1)
.addFixed64(1)
.addVarint(2)
.addLengthDelimited(ByteString.copyFrom("2", UTF_8))
.addGroup(
UnknownFieldSet.newBuilder()
.addField(1, Field.newBuilder().addFixed32(2).build())
.build())
.build())
.addField(
444,
Field.newBuilder()
.addFixed32(2)
.addFixed64(2)
.addVarint(1)
.addLengthDelimited(ByteString.copyFrom("1", UTF_8))
.addGroup(
UnknownFieldSet.newBuilder()
.addField(1, Field.newBuilder().addFixed32(1).addFixed64(2).build())
.addField(2, Field.newBuilder().addFixed32(2).addFixed64(1).build())
.addField(3, Field.newBuilder().addFixed32(2).build())
.build())
.build())
.build());
Message diffMessage =
fromUnknownFields(
UnknownFieldSet.newBuilder()
.addField(222, Field.newBuilder().addFixed32(3).addFixed64(3).build())
.addField(
333,
Field.newBuilder()
.addFixed32(4)
.addFixed64(4)
.addVarint(3)
.addLengthDelimited(ByteString.copyFrom("3", UTF_8))
.addGroup(
UnknownFieldSet.newBuilder()
.addField(1, Field.newBuilder().addFixed32(3).build())
.build())
.build())
.addField(
444,
Field.newBuilder()
.addFixed32(3)
.addFixed64(3)
.addVarint(4)
.addLengthDelimited(ByteString.copyFrom("4", UTF_8))
.addGroup(
UnknownFieldSet.newBuilder()
.addField(1, Field.newBuilder().addFixed32(4).addFixed64(3).build())
.addField(2, Field.newBuilder().addFixed32(3).addFixed64(4).build())
.addField(3, Field.newBuilder().addFixed32(3).build())
.build())
.build())
.build());
Message eqMessage =
fromUnknownFields(
UnknownFieldSet.newBuilder()
.addField(222, Field.newBuilder().addFixed32(3).addFixed64(3).build())
.addField(
333,
Field.newBuilder()
.addFixed32(1)
.addFixed64(1)
.addVarint(3)
.addLengthDelimited(ByteString.copyFrom("3", UTF_8))
.addGroup(
UnknownFieldSet.newBuilder()
.addField(1, Field.newBuilder().addFixed32(3).build())
.build())
.build())
.addField(
444,
Field.newBuilder()
.addFixed32(3)
.addFixed64(3)
.addVarint(1)
.addLengthDelimited(ByteString.copyFrom("1", UTF_8))
.addGroup(
UnknownFieldSet.newBuilder()
.addField(1, Field.newBuilder().addFixed32(1).addFixed64(3).build())
.addField(2, Field.newBuilder().addFixed32(3).addFixed64(1).build())
.addField(3, Field.newBuilder().addFixed32(3).build())
.build())
.build())
.build());
expectThat(diffMessage).isNotEqualTo(message);
expectThat(eqMessage).isNotEqualTo(message);
expectThat(diffMessage)
.withPartialScope(FieldScopes.fromSetFields(scopeMessage))
.isNotEqualTo(message);
expectThat(eqMessage)
.withPartialScope(FieldScopes.fromSetFields(scopeMessage))
.isEqualTo(message);
expectFailureWhenTesting().that(diffMessage).isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure().hasMessageThat().contains("1 -> 4");
expectThatFailure().hasMessageThat().contains("\"1\" -> \"4\"");
expectThatFailure().hasMessageThat().contains("2 -> 3");
expectThatFailure().hasMessageThat().contains("\"2\" -> \"3\"");
expectFailureWhenTesting()
.that(diffMessage)
.withPartialScope(FieldScopes.fromSetFields(scopeMessage))
.isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure().hasMessageThat().contains("1 -> 4");
expectThatFailure().hasMessageThat().contains("\"1\" -> \"4\"");
expectThatFailure().hasMessageThat().doesNotContain("2 -> 3");
expectThatFailure().hasMessageThat().doesNotContain("\"2\" -> \"3\"");
expectFailureWhenTesting()
.that(eqMessage)
.withPartialScope(FieldScopes.fromSetFields(scopeMessage))
.isNotEqualTo(message);
expectIsNotEqualToFailed();
expectThatFailure().hasMessageThat().doesNotContain("2 -> 3");
expectThatFailure().hasMessageThat().doesNotContain("\"2\" -> \"3\"");
}
@Test
public void fieldNumbersAreRecursive() {
// o_int is compared, r_string is not.
Message message =
parse("o_int: 1 r_string: \"foo\" r_test_message: { o_int: 2 r_string: \"bar\" }");
Message diffMessage =
parse("o_int: 2 r_string: \"bar\" r_test_message: { o_int: 1 r_string: \"foo\" }");
Message eqMessage =
parse("o_int: 1 r_string: \"bar\" r_test_message: { o_int: 2 r_string: \"foo\" }");
int fieldNumber = getFieldNumber("o_int");
FieldDescriptor fieldDescriptor = getFieldDescriptor("o_int");
expectThat(diffMessage)
.withPartialScope(FieldScopes.allowingFields(fieldNumber))
.isNotEqualTo(message);
expectThat(eqMessage)
.withPartialScope(FieldScopes.allowingFields(fieldNumber))
.isEqualTo(message);
expectThat(diffMessage)
.withPartialScope(FieldScopes.allowingFieldDescriptors(fieldDescriptor))
.isNotEqualTo(message);
expectThat(eqMessage)
.withPartialScope(FieldScopes.allowingFieldDescriptors(fieldDescriptor))
.isEqualTo(message);
expectFailureWhenTesting()
.that(diffMessage)
.withPartialScope(FieldScopes.allowingFields(fieldNumber))
.isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure().hasMessageThat().contains("modified: o_int: 1 -> 2");
expectThatFailure().hasMessageThat().contains("modified: r_test_message[0].o_int: 2 -> 1");
expectFailureWhenTesting()
.that(eqMessage)
.withPartialScope(FieldScopes.allowingFields(fieldNumber))
.isNotEqualTo(message);
expectIsNotEqualToFailed();
expectThatFailure().hasMessageThat().contains("ignored: r_test_message[0].r_string");
}
@Test
public void multipleFieldNumbers() {
Message message = parse("o_int: 1 r_string: \"x\" o_enum: TWO");
Message diffMessage = parse("o_int: 2 r_string: \"y\" o_enum: TWO");
Message eqMessage =
parse("o_int: 1 r_string: \"x\" o_enum: ONE o_sub_test_message: { r_string: \"bar\" }");
FieldScope fieldScope =
FieldScopes.allowingFields(getFieldNumber("o_int"), getFieldNumber("r_string"));
expectThat(diffMessage).withPartialScope(fieldScope).isNotEqualTo(message);
expectThat(eqMessage).withPartialScope(fieldScope).isEqualTo(message);
expectFailureWhenTesting().that(diffMessage).withPartialScope(fieldScope).isEqualTo(message);
expectIsEqualToFailed();
expectThatFailure().hasMessageThat().contains("modified: o_int: 1 -> 2");
expectThatFailure().hasMessageThat().contains("modified: r_string[0]: \"x\" -> \"y\"");
expectFailureWhenTesting().that(eqMessage).withPartialScope(fieldScope).isNotEqualTo(message);
expectIsNotEqualToFailed();
expectThatFailure().hasMessageThat().contains("ignored: o_enum");
expectThatFailure().hasMessageThat().contains("ignored: o_sub_test_message");
}
@Test
public void invalidFieldNumber() {
Message message1 = parse("o_int: 44");
Message message2 = parse("o_int: 33");
Exception expected =
assertThrows(
Exception.class, () -> assertThat(message1).ignoringFields(999).isEqualTo(message2));
Throwable cause = expected;
while (cause != null) {
if (cause
.getMessage()
.contains("Message type " + fullMessageName() + " has no field with number 999.")) {
break;
} else {
cause = cause.getCause();
}
}
if (cause == null) {
fail("No cause with field number error message.");
}
}
@Test
public void ignoreFieldsAtDifferentLevels() {
// Ignore all 'o_int' fields, in different ways.
Message message =
parse(
"o_int: 1 r_string: \"foo\" o_sub_test_message: { o_int: 2 "
+ "o_sub_sub_test_message: { o_int: 3 r_string: \"bar\" } }");
// Even though o_int is ignored, message presence is not. So these all fail.
Message diffMessage1 = parse("r_string: \"baz\"");
Message diffMessage2 = parse("r_string: \"foo\"");
Message diffMessage3 = parse("r_string: \"foo\" o_sub_test_message: {}");
Message diffMessage4 =
parse("r_string: \"foo\" o_sub_test_message: { o_sub_sub_test_message: {} }");
// All of these messages are equivalent, because all o_int are ignored.
Message eqMessage1 =
parse(
"o_int: 111 r_string: \"foo\" o_sub_test_message: { o_int: 222 "
+ "o_sub_sub_test_message: { o_int: 333 r_string: \"bar\" } }");
Message eqMessage2 =
parse(
"o_int: 1 r_string: \"foo\" o_sub_test_message: { o_int: 2 "
+ "o_sub_sub_test_message: { o_int: 3 r_string: \"bar\" } }");
Message eqMessage3 =
parse(
"r_string: \"foo\" o_sub_test_message: { "
+ "o_sub_sub_test_message: { r_string: \"bar\" } }");
Message eqMessage4 =
parse(
"o_int: 333 r_string: \"foo\" o_sub_test_message: { o_int: 111 "
+ "o_sub_sub_test_message: { o_int: 222 r_string: \"bar\" } }");
FieldDescriptor top = getFieldDescriptor("o_int");
FieldDescriptor middle =
getFieldDescriptor("o_sub_test_message").getMessageType().findFieldByName("o_int");
FieldDescriptor bottom =
getFieldDescriptor("o_sub_test_message")
.getMessageType()
.findFieldByName("o_sub_sub_test_message")
.getMessageType()
.findFieldByName("o_int");
ImmutableMap<String, FieldScope> fieldScopes =
ImmutableMap.of(
"BASIC", FieldScopes.ignoringFieldDescriptors(top, middle, bottom),
"CHAINED",
FieldScopes.ignoringFieldDescriptors(top)
.ignoringFieldDescriptors(middle)
.ignoringFieldDescriptors(bottom),
"REPEATED",
FieldScopes.ignoringFieldDescriptors(top, middle)
.ignoringFieldDescriptors(middle, bottom));
for (String scopeName : fieldScopes.keySet()) {
String msg = "FieldScope(" + scopeName + ")";
FieldScope scope = fieldScopes.get(scopeName);
expectThatWithMessage(msg, diffMessage1).withPartialScope(scope).isNotEqualTo(message);
expectThatWithMessage(msg, diffMessage2).withPartialScope(scope).isNotEqualTo(message);
expectThatWithMessage(msg, diffMessage3).withPartialScope(scope).isNotEqualTo(message);
expectThatWithMessage(msg, diffMessage4).withPartialScope(scope).isNotEqualTo(message);
expectThatWithMessage(msg, eqMessage1).withPartialScope(scope).isEqualTo(message);
expectThatWithMessage(msg, eqMessage2).withPartialScope(scope).isEqualTo(message);
expectThatWithMessage(msg, eqMessage3).withPartialScope(scope).isEqualTo(message);
expectThatWithMessage(msg, eqMessage4).withPartialScope(scope).isEqualTo(message);
}
}
@Test
public void fromSetFields_skipNulls() {
Message message1 = parse("o_int: 1 r_string: \"foo\" r_string: \"bar\"");
Message eqMessage1 = parse("o_int: 1 r_string: \"foo\" r_string: \"bar\"");
Message eqIgnoredMessage1 = parse("o_int: 2 r_string: \"foo\" r_string: \"bar\"");
Message message2 = parse("o_int: 3 r_string: \"baz\" r_string: \"qux\"");
Message eqMessage2 = parse("o_int: 3 r_string: \"baz\" r_string: \"qux\"");
Message eqIgnoredMessage2 = parse("o_int: 4 r_string: \"baz\" r_string: \"qux\"");
List<Message> messages = new ArrayList<>();
Message nullMessage = null;
messages.add(parse("o_int: -1"));
messages.add(nullMessage);
messages.add(parse("r_string: \"NaN\""));
expectThat(listOf(message1, message2))
.withPartialScope(FieldScopes.fromSetFields(messages))
.containsExactly(eqMessage1, eqMessage2);
expectThat(listOf(message1, message2))
.withPartialScope(
FieldScopes.fromSetFields(parse("o_int: -1"), nullMessage, parse("r_string: \"NaN\"")))
.containsExactly(eqMessage1, eqMessage2);
expectFailureWhenTesting()
.that(listOf(message1, message2))
.withPartialScope(FieldScopes.fromSetFields(messages))
.containsExactly(eqIgnoredMessage1, eqIgnoredMessage2);
expectThatFailure()
.factValue("testing whether")
.contains(
"is equivalent according to "
+ "assertThat(proto)"
+ ".withPartialScope("
+ "FieldScopes.fromSetFields(["
+ "{o_int: -1\n}, null, {r_string: \"NaN\"\n}]))"
+ ".isEqualTo(target)");
expectFailureWhenTesting()
.that(listOf(message1, message2))
.withPartialScope(
FieldScopes.fromSetFields(parse("o_int: -1"), nullMessage, parse("r_string: \"NaN\"")))
.containsExactly(eqIgnoredMessage1, eqIgnoredMessage2);
expectThatFailure()
.factValue("testing whether")
.contains(
"is equivalent according to "
+ "assertThat(proto)"
+ ".withPartialScope("
+ "FieldScopes.fromSetFields(["
+ "{o_int: -1\n}, null, {r_string: \"NaN\"\n}]))"
+ ".isEqualTo(target)");
}
@Test
public void fromSetFields_iterables_vacuousIfEmptyOrAllNull() {
Message message1 = parse("o_int: 1 r_string: \"foo\" r_string: \"bar\"");
Message eqIgnoredMessage1 = parse("o_int: 2 r_string: \"foo\" r_string: \"bar\"");
Message message2 = parse("o_int: 3 r_string: \"baz\" r_string: \"qux\"");
Message eqIgnoredMessage2 = parse("o_int: 4 r_string: \"baz\" r_string: \"qux\"");
List<Message> messages = new ArrayList<>();
messages.add(null);
messages.add(null);
expectThat(listOf(message1, message2))
.withPartialScope(FieldScopes.fromSetFields(ImmutableList.<Message>of()))
.containsExactly(eqIgnoredMessage1, eqIgnoredMessage2);
expectThat(listOf(message1, message2))
.withPartialScope(FieldScopes.fromSetFields(messages))
.containsExactly(eqIgnoredMessage1, eqIgnoredMessage2);
expectFailureWhenTesting()
.that(listOf(message1, message2))
.withPartialScope(FieldScopes.fromSetFields(ImmutableList.<Message>of()))
.containsNoneOf(eqIgnoredMessage1, eqIgnoredMessage2);
expectFailureWhenTesting()
.that(listOf(message1, message2))
.withPartialScope(FieldScopes.fromSetFields(messages))
.containsNoneOf(eqIgnoredMessage1, eqIgnoredMessage2);
expectThatFailure().isNotNull();
}
@Test
public void fromSetFields_iterables_errorForDifferentMessageTypes() {
// Don't run this test twice.
if (!testIsRunOnce()) {
return;
}
RuntimeException expected =
assertThrows(
RuntimeException.class,
() ->
FieldScopes.fromSetFields(
TestMessage2.newBuilder().setOInt(2).build(),
TestMessage3.newBuilder().setOInt(2).build()));
expect
.that(expected)
.hasMessageThat()
.contains("Cannot create scope from messages with different descriptors");
expect.that(expected).hasMessageThat().contains(TestMessage2.getDescriptor().getFullName());
expect.that(expected).hasMessageThat().contains(TestMessage3.getDescriptor().getFullName());
}
@Test
public void fromSetFields_iterables_errorIfDescriptorMismatchesSubject() {
// Don't run this test twice.
if (!testIsRunOnce()) {
return;
}
Message message =
TestMessage2.newBuilder().setOInt(1).addRString("foo").addRString("bar").build();
Message eqMessage =
TestMessage2.newBuilder().setOInt(1).addRString("foo").addRString("bar").build();
RuntimeException expected =
assertThrows(
RuntimeException.class,
() ->
assertThat(message)
.withPartialScope(
FieldScopes.fromSetFields(
TestMessage3.newBuilder().setOInt(2).build(),
TestMessage3.newBuilder().addRString("foo").build()))
.isEqualTo(eqMessage));
expect
.that(expected)
.hasMessageThat()
.contains(
"Message given to FieldScopes.fromSetFields() "
+ "does not have the same descriptor as the message being tested");
expect.that(expected).hasMessageThat().contains(TestMessage2.getDescriptor().getFullName());
expect.that(expected).hasMessageThat().contains(TestMessage3.getDescriptor().getFullName());
}
@Test
public void fromSetFields_iterables_unionsElements() {
Message message = parse("o_int: 1 r_string: \"foo\" r_string: \"bar\"");
Message diffMessage1 = parse("o_int: 2 r_string: \"foo\" r_string: \"bar\"");
Message diffMessage2 = parse("o_int: 4 r_string: \"baz\" r_string: \"qux\"");
expectThat(listOf(message))
.ignoringFieldScope(FieldScopes.fromSetFields(parse("o_int: 1"), parse("o_enum: TWO")))
.containsExactly(diffMessage1);
expectFailureWhenTesting()
.that(listOf(message))
.ignoringFieldScope(FieldScopes.fromSetFields(parse("o_int: 1"), parse("o_enum: TWO")))
.containsExactly(diffMessage2);
expectThatFailure().isNotNull();
}
@Test
public void iterableFieldScopeMethodVariants_protoSubject() {
Message message = parse("o_int: 1 r_string: \"foo\"");
Message eqExceptInt = parse("o_int: 2 r_string: \"foo\"");
expectThat(message).ignoringFields(listOf(getFieldNumber("o_int"))).isEqualTo(eqExceptInt);
expectThat(message)
.reportingMismatchesOnly()
.ignoringFields(listOf(getFieldNumber("o_int")))
.isEqualTo(eqExceptInt);
expectThat(message)
.ignoringFieldScope(FieldScopes.allowingFields(listOf(getFieldNumber("o_int"))))
.isEqualTo(eqExceptInt);
expectThat(message)
.withPartialScope(FieldScopes.ignoringFields(listOf(getFieldNumber("o_int"))))
.isEqualTo(eqExceptInt);
expectThat(message)
.ignoringFieldDescriptors(listOf(getFieldDescriptor("o_int")))
.isEqualTo(eqExceptInt);
expectThat(message)
.reportingMismatchesOnly()
.ignoringFieldDescriptors(listOf(getFieldDescriptor("o_int")))
.isEqualTo(eqExceptInt);
expectThat(message)
.ignoringFieldScope(
FieldScopes.allowingFieldDescriptors(listOf(getFieldDescriptor("o_int"))))
.isEqualTo(eqExceptInt);
expectThat(message)
.withPartialScope(FieldScopes.ignoringFieldDescriptors(listOf(getFieldDescriptor("o_int"))))
.isEqualTo(eqExceptInt);
}
@Test
public void iterableFieldScopeMethodVariants_iterableOfProtosSubject() {
ImmutableList<Message> messages = listOf(parse("o_int: 1 r_string: \"foo\""));
ImmutableList<Message> eqExceptInt = listOf(parse("o_int: 2 r_string: \"foo\""));
expectThat(messages)
.ignoringFields(listOf(getFieldNumber("o_int")))
.containsExactlyElementsIn(eqExceptInt);
expectThat(messages)
.reportingMismatchesOnly()
.ignoringFields(listOf(getFieldNumber("o_int")))
.containsExactlyElementsIn(eqExceptInt);
expectThat(messages)
.ignoringFieldDescriptors(listOf(getFieldDescriptor("o_int")))
.containsExactlyElementsIn(eqExceptInt);
expectThat(messages)
.reportingMismatchesOnly()
.ignoringFieldDescriptors(listOf(getFieldDescriptor("o_int")))
.containsExactlyElementsIn(eqExceptInt);
}
@Test
public void iterableFieldScopeMethodVariants_mapWithProtoValuesSubject() {
ImmutableMap<String, Message> messages =
ImmutableMap.of("foo", parse("o_int: 1 r_string: \"foo\""));
ImmutableMap<String, Message> eqExceptInt =
ImmutableMap.of("foo", parse("o_int: 2 r_string: \"foo\""));
expectThat(messages)
.ignoringFieldsForValues(listOf(getFieldNumber("o_int")))
.containsExactlyEntriesIn(eqExceptInt);
expectThat(messages)
.reportingMismatchesOnlyForValues()
.ignoringFieldsForValues(listOf(getFieldNumber("o_int")))
.containsExactlyEntriesIn(eqExceptInt);
expectThat(messages)
.ignoringFieldDescriptorsForValues(listOf(getFieldDescriptor("o_int")))
.containsExactlyEntriesIn(eqExceptInt);
expectThat(messages)
.reportingMismatchesOnlyForValues()
.ignoringFieldDescriptorsForValues(listOf(getFieldDescriptor("o_int")))
.containsExactlyEntriesIn(eqExceptInt);
}
@Test
public void iterableFieldScopeMethodVariants_multimapWithProtoValuesSubject() {
ImmutableMultimap<String, Message> messages =
ImmutableMultimap.of("foo", parse("o_int: 1 r_string: \"foo\""));
ImmutableMultimap<String, Message> eqExceptInt =
ImmutableMultimap.of("foo", parse("o_int: 2 r_string: \"foo\""));
expectThat(messages)
.ignoringFieldsForValues(listOf(getFieldNumber("o_int")))
.containsExactlyEntriesIn(eqExceptInt);
expectThat(messages)
.reportingMismatchesOnlyForValues()
.ignoringFieldsForValues(listOf(getFieldNumber("o_int")))
.containsExactlyEntriesIn(eqExceptInt);
expectThat(messages)
.ignoringFieldDescriptorsForValues(listOf(getFieldDescriptor("o_int")))
.containsExactlyEntriesIn(eqExceptInt);
expectThat(messages)
.reportingMismatchesOnlyForValues()
.ignoringFieldDescriptorsForValues(listOf(getFieldDescriptor("o_int")))
.containsExactlyEntriesIn(eqExceptInt);
}
@Test
public void testFieldScopeToString_isTextFormat() {
Message message = parse("o_int: 3 r_string: \"foo\"");
FieldScope fieldScope = FieldScopes.fromSetFields(message);
expect.that(fieldScope.toString()).contains("FieldScopes.fromSetFields(o_int: 3");
FieldScope fieldScopeFromList = FieldScopes.fromSetFields(ImmutableList.of(message));
expect.that(fieldScopeFromList.toString()).contains("FieldScopes.fromSetFields(o_int: 3");
}
}
| FieldScopesTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java | {
"start": 620,
"end": 936
} | class ____ extends ActionType<AcknowledgedResponse> {
public static final SetUpgradeModeAction INSTANCE = new SetUpgradeModeAction();
public static final String NAME = "cluster:admin/xpack/ml/upgrade_mode";
private SetUpgradeModeAction() {
super(NAME);
}
public static | SetUpgradeModeAction |
java | alibaba__nacos | plugin-default-impl/nacos-default-control-plugin/src/main/java/com/alibaba/nacos/plugin/control/impl/NacosControlManagerBuilder.java | {
"start": 960,
"end": 1382
} | class ____ implements ControlManagerBuilder {
@Override
public String getName() {
return "nacos";
}
@Override
public ConnectionControlManager buildConnectionControlManager() {
return new NacosConnectionControlManager();
}
@Override
public TpsControlManager buildTpsControlManager() {
return new NacosTpsControlManager();
}
}
| NacosControlManagerBuilder |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/AbstractWasbTestBase.java | {
"start": 1289,
"end": 1662
} | class ____ provides basic setup and teardown of testing Azure
* Storage account. Each subclass defines a different set of test cases to run
* and overrides {@link #createTestAccount()} to set up the testing account used
* to run those tests. The returned account might integrate with Azure Storage
* directly or it might be a mock implementation.
*/
public abstract | that |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/inheritance/singletable/DuplicatedDiscriminatorValueTest.java | {
"start": 2966,
"end": 3146
} | class ____ extends Building {
}
@Entity(name = "Building2")
@DiscriminatorValue(DISCRIMINATOR_VALUE) // Duplicated discriminator value in single hierarchy.
public static | Building1 |
java | quarkusio__quarkus | extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/registry/json/JsonExporter.java | {
"start": 1150,
"end": 9893
} | class ____ {
private static final Map<String, ?> JSON_CONFIG = Collections.singletonMap(JsonGenerator.PRETTY_PRINTING, true);
public JsonExporter() {
}
public StringBuilder exportEverything(JsonMeterRegistry meterRegistry) {
JsonObjectBuilder root = jsonProvider().createObjectBuilder();
List<Gauge> gauges = new ArrayList<>();
List<Counter> counters = new ArrayList<>();
List<TimeGauge> timeGauges = new ArrayList<>();
List<FunctionCounter> functionCounters = new ArrayList<>();
List<Timer> timers = new ArrayList<>();
List<LongTaskTimer> longTaskTimers = new ArrayList<>();
List<FunctionTimer> functionTimers = new ArrayList<>();
List<DistributionSummary> distributionSummaries = new ArrayList<>();
List<Meter> meters = new ArrayList<>();
meterRegistry.getMeters().forEach(meter -> meter.match(gauges::add,
counters::add,
timers::add,
distributionSummaries::add,
longTaskTimers::add,
timeGauges::add,
functionCounters::add,
functionTimers::add,
meters::add));
exportCounters(counters).forEach(root::add);
exportGauges(gauges).forEach(root::add);
exportTimeGauges(timeGauges).forEach(root::add);
exportFunctionCounters(functionCounters).forEach(root::add);
exportTimers(timers).forEach(root::add);
exportLongTaskTimers(longTaskTimers).forEach(root::add);
exportFunctionTimers(functionTimers).forEach(root::add);
exportDistributionSummaries(distributionSummaries).forEach(root::add);
return stringify(root.build());
}
private Map<String, JsonValue> exportGauges(Collection<Gauge> gauges) {
Map<String, JsonValue> result = new HashMap<String, JsonValue>(gauges.size());
for (Gauge g : gauges) {
double value = g.value();
if (Double.isFinite(value)) {
result.put(createExportKey(g.getId()), jsonProvider().createValue(value));
}
}
return result;
}
private Map<String, JsonValue> exportTimeGauges(Collection<TimeGauge> timeGauges) {
Map<String, JsonValue> result = new HashMap<String, JsonValue>(timeGauges.size());
for (TimeGauge g : timeGauges) {
double value = g.value();
if (Double.isFinite(value)) {
result.put(createExportKey(g.getId()), jsonProvider().createValue(value));
}
}
return result;
}
private Map<String, JsonValue> exportCounters(Collection<Counter> counters) {
return counters.stream()
.collect(Collectors.toMap(counter -> createExportKey(counter.getId()),
counter -> jsonProvider().createValue(counter.count())));
}
private Map<String, JsonValue> exportFunctionCounters(Collection<FunctionCounter> counters) {
return counters.stream()
.collect(Collectors.toMap(counter -> createExportKey(counter.getId()),
counter -> jsonProvider().createValue(counter.count())));
}
private Map<String, JsonValue> exportTimers(Collection<Timer> timers) {
Map<String, List<Timer>> groups = timers.stream().collect(Collectors.groupingBy(timer -> timer.getId().getName()));
Map<String, JsonValue> result = new HashMap<>();
for (Map.Entry<String, List<Timer>> group : groups.entrySet()) {
JsonObjectBuilder builder = jsonProvider().createObjectBuilder();
for (Timer timer : group.getValue()) {
builder.add(createExportKey("count", timer.getId()), timer.count());
builder.add(createExportKey("elapsedTime", timer.getId()), timer.totalTime(timer.baseTimeUnit()));
}
result.put(group.getKey(), builder.build());
}
return result;
}
private Map<String, JsonValue> exportLongTaskTimers(Collection<LongTaskTimer> timers) {
Map<String, List<LongTaskTimer>> groups = timers.stream()
.collect(Collectors.groupingBy(timer -> timer.getId().getName()));
Map<String, JsonValue> result = new HashMap<>();
for (Map.Entry<String, List<LongTaskTimer>> group : groups.entrySet()) {
JsonObjectBuilder builder = jsonProvider().createObjectBuilder();
for (LongTaskTimer timer : group.getValue()) {
builder.add(createExportKey("activeTasks", timer.getId()), timer.activeTasks());
builder.add(createExportKey("duration", timer.getId()), timer.duration(timer.baseTimeUnit()));
builder.add(createExportKey("max", timer.getId()), timer.max(timer.baseTimeUnit()));
builder.add(createExportKey("mean", timer.getId()), timer.mean(timer.baseTimeUnit()));
}
result.put(group.getKey(), builder.build());
}
return result;
}
private Map<String, JsonValue> exportFunctionTimers(Collection<FunctionTimer> timers) {
Map<String, List<FunctionTimer>> groups = timers.stream()
.collect(Collectors.groupingBy(timer -> timer.getId().getName()));
Map<String, JsonValue> result = new HashMap<>();
for (Map.Entry<String, List<FunctionTimer>> group : groups.entrySet()) {
JsonObjectBuilder builder = jsonProvider().createObjectBuilder();
for (FunctionTimer timer : group.getValue()) {
builder.add(createExportKey("count", timer.getId()), timer.count());
builder.add(createExportKey("elapsedTime", timer.getId()), timer.totalTime(timer.baseTimeUnit()));
}
result.put(group.getKey(), builder.build());
}
return result;
}
private Map<String, JsonValue> exportDistributionSummaries(Collection<DistributionSummary> distributionSummaries) {
Map<String, List<DistributionSummary>> groups = distributionSummaries.stream()
.collect(Collectors.groupingBy(summary -> summary.getId().getName()));
Map<String, JsonValue> result = new HashMap<>();
for (Map.Entry<String, List<DistributionSummary>> group : groups.entrySet()) {
JsonObjectBuilder builder = jsonProvider().createObjectBuilder();
for (DistributionSummary summary : group.getValue()) {
HistogramSnapshot snapshot = summary.takeSnapshot();
if (summary instanceof JsonDistributionSummary) {
double min = ((JsonDistributionSummary) summary).min();
// if there are no samples yet, show 0 as the min
builder.add(createExportKey("min", summary.getId()), !Double.isNaN(min) ? min : 0);
}
builder.add(createExportKey("count", summary.getId()), snapshot.count());
builder.add(createExportKey("max", summary.getId()), snapshot.max());
builder.add(createExportKey("mean", summary.getId()), snapshot.mean());
for (ValueAtPercentile valueAtPercentile : snapshot.percentileValues()) {
if (Math.abs(valueAtPercentile.percentile() - 0.999) < 0.000001) {
builder.add(createExportKey("p999", summary.getId()),
valueAtPercentile.value());
} else {
builder.add(
createExportKey("p" + (int) Math.floor(valueAtPercentile.percentile() * 100), summary.getId()),
valueAtPercentile.value());
}
}
}
result.put(group.getKey(), builder.build());
}
return result;
}
private StringBuilder stringify(JsonObject obj) {
StringWriter out = new StringWriter();
try (JsonWriter writer = jsonProvider().createWriterFactory(JSON_CONFIG).createWriter(out)) {
writer.writeObject(obj);
}
return new StringBuilder(out.toString());
}
private String createExportKey(Meter.Id id) {
return id.getName() + createTagsString(id.getTags());
}
private String createExportKey(String componentKey, Meter.Id id) {
return componentKey + createTagsString(id.getTags());
}
private String createTagsString(List<Tag> tags) {
if (tags == null || tags.isEmpty()) {
return "";
} else {
return ";" + tags.stream()
.map(tag -> tag.getKey() + "=" + tag.getValue()
.replace(";", "_"))
.collect(Collectors.joining(";"));
}
}
}
| JsonExporter |
java | quarkusio__quarkus | extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaNativeContainer.java | {
"start": 659,
"end": 4400
} | class ____ extends GenericContainer<KafkaNativeContainer> implements Startable {
private static final String STARTER_SCRIPT = "/work/run.sh";
private final Integer fixedExposedPort;
private final boolean useSharedNetwork;
private String additionalArgs = null;
private int exposedPort = -1;
private final String hostName;
public KafkaNativeContainer(DockerImageName dockerImageName, int fixedExposedPort, String defaultNetworkId,
boolean useSharedNetwork) {
super(dockerImageName);
this.fixedExposedPort = fixedExposedPort;
this.useSharedNetwork = useSharedNetwork;
String cmd = String.format("while [ ! -f %s ]; do sleep 0.1; done; sleep 0.1; %s", STARTER_SCRIPT, STARTER_SCRIPT);
withCommand("sh", "-c", cmd);
waitingFor(Wait.forLogMessage(".*Kafka broker started.*", 1));
this.hostName = ConfigureUtil.configureNetwork(this, defaultNetworkId, useSharedNetwork, "kafka");
}
public KafkaNativeContainer withSharedServiceLabel(LaunchMode launchMode, String serviceName) {
return configureSharedServiceLabel(this, launchMode, DEV_SERVICE_LABEL, serviceName);
}
@Override
protected void containerIsStarting(InspectContainerResponse containerInfo, boolean reused) {
super.containerIsStarting(containerInfo, reused);
// Set exposed port
this.exposedPort = getMappedPort(DevServicesKafkaProcessor.KAFKA_PORT);
// follow output
// Start and configure the advertised address
String cmd = "#!/bin/bash\n";
cmd += "/work/kafka";
cmd += " -Dkafka.advertised.listeners=" + getBootstrapServers();
if (useSharedNetwork) {
cmd += " -Dkafka.listeners=BROKER://:9093,PLAINTEXT://:9092,CONTROLLER://:9094";
cmd += " -Dkafka.interbroker.listener.name=BROKER";
cmd += " -Dkafka.controller.listener.names=CONTROLLER";
cmd += " -Dkafka.listener.security.protocol.map=BROKER:PLAINTEXT,CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT";
cmd += " -Dkafka.early.start.listeners=BROKER,CONTROLLER,PLAINTEXT";
}
if (additionalArgs != null) {
cmd += " " + additionalArgs;
}
// docker exec since docker cp doesn't work with kubedock yet
try {
execInContainer("sh", "-c",
String.format("echo -e \"%1$s\" >> %2$s && chmod 777 %2$s", cmd, STARTER_SCRIPT));
} catch (Exception e) {
throw new RuntimeException("Can't create run script in the Kafka native container.", e);
}
}
private String getKafkaAdvertisedListeners() {
List<String> addresses = new ArrayList<>();
if (useSharedNetwork) {
addresses.add(String.format("BROKER://%s:9093", hostName));
}
// See https://github.com/quarkusio/quarkus/issues/21819
// Kafka is always exposed to the Docker host network
addresses.add(String.format("PLAINTEXT://%s:%d", getHost(), getExposedKafkaPort()));
return String.join(",", addresses);
}
public int getExposedKafkaPort() {
return exposedPort;
}
@Override
protected void configure() {
super.configure();
addExposedPort(DevServicesKafkaProcessor.KAFKA_PORT);
if (fixedExposedPort != null) {
addFixedExposedPort(fixedExposedPort, DevServicesKafkaProcessor.KAFKA_PORT);
}
}
public String getBootstrapServers() {
return getKafkaAdvertisedListeners();
}
@Override
public String getConnectionInfo() {
return getBootstrapServers();
}
@Override
public void close() {
super.close();
}
}
| KafkaNativeContainer |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java | {
"start": 895832,
"end": 905581
} | class ____ etc. Not all 3rd party API gateways and tools supports vendor-extensions when importing your API docs.", displayName = "Api Vendor Extension"),
@YamlProperty(name = "bindingMode", type = "enum:auto,json,json_xml,off,xml", defaultValue = "off", description = "Sets the binding mode to use. The default value is off", displayName = "Binding Mode"),
@YamlProperty(name = "bindingPackageScan", type = "string", description = "Package name to use as base (offset) for classpath scanning of POJO classes are located when using binding mode is enabled for JSon or XML. Multiple package names can be separated by comma.", displayName = "Binding Package Scan"),
@YamlProperty(name = "clientRequestValidation", type = "boolean", defaultValue = "false", description = "Whether to enable validation of the client request to check: 1) Content-Type header matches what the Rest DSL consumes; returns HTTP Status 415 if validation error. 2) Accept header matches what the Rest DSL produces; returns HTTP Status 406 if validation error. 3) Missing required data (query parameters, HTTP headers, body); returns HTTP Status 400 if validation error. 4) Parsing error of the message body (JSon, XML or Auto binding mode must be enabled); returns HTTP Status 400 if validation error.", displayName = "Client Request Validation"),
@YamlProperty(name = "clientResponseValidation", type = "boolean", defaultValue = "false", description = "Whether to check what Camel is returning as response to the client: 1) Status-code and Content-Type matches Rest DSL response messages. 2) Check whether expected headers is included according to the Rest DSL repose message headers. 3) If the response body is JSon then check whether its valid JSon. Returns 500 if validation error detected.", displayName = "Client Response Validation"),
@YamlProperty(name = "component", type = "enum:platform-http,servlet,jetty,undertow,netty-http,coap", description = "The Camel Rest component to use for the REST transport (consumer), such as netty-http, jetty, servlet, undertow. If no component has been explicit configured, then Camel will lookup if there is a Camel component that integrates with the Rest DSL, or if a org.apache.camel.spi.RestConsumerFactory is registered in the registry. If either one is found, then that is being used.", displayName = "Component"),
@YamlProperty(name = "componentProperty", type = "array:org.apache.camel.model.rest.RestPropertyDefinition", description = "Allows to configure as many additional properties for the rest component in use.", displayName = "Component Property"),
@YamlProperty(name = "consumerProperty", type = "array:org.apache.camel.model.rest.RestPropertyDefinition", description = "Allows to configure as many additional properties for the rest consumer in use.", displayName = "Consumer Property"),
@YamlProperty(name = "contextPath", type = "string", description = "Sets a leading context-path the REST services will be using. This can be used when using components such as camel-servlet where the deployed web application is deployed using a context-path. Or for components such as camel-jetty or camel-netty-http that includes a HTTP server.", displayName = "Context Path"),
@YamlProperty(name = "corsHeaders", type = "array:org.apache.camel.model.rest.RestPropertyDefinition", description = "Allows to configure custom CORS headers.", displayName = "Cors Headers"),
@YamlProperty(name = "dataFormatProperty", type = "array:org.apache.camel.model.rest.RestPropertyDefinition", description = "Allows to configure as many additional properties for the data formats in use. For example set property prettyPrint to true to have json outputted in pretty mode. The properties can be prefixed to denote the option is only for either JSON or XML and for either the IN or the OUT. The prefixes are: json.in. json.out. xml.in. xml.out. For example a key with value xml.out.mustBeJAXBElement is only for the XML data format for the outgoing. A key without a prefix is a common key for all situations.", displayName = "Data Format Property"),
@YamlProperty(name = "enableCORS", type = "boolean", defaultValue = "false", description = "Whether to enable CORS headers in the HTTP response. The default value is false.", displayName = "Enable CORS"),
@YamlProperty(name = "enableNoContentResponse", type = "boolean", defaultValue = "false", description = "Whether to return HTTP 204 with an empty body when a response contains an empty JSON object or XML root object. The default value is false.", displayName = "Enable No Content Response"),
@YamlProperty(name = "endpointProperty", type = "array:org.apache.camel.model.rest.RestPropertyDefinition", description = "Allows to configure as many additional properties for the rest endpoint in use.", displayName = "Endpoint Property"),
@YamlProperty(name = "host", type = "string", description = "The hostname to use for exposing the REST service.", displayName = "Host"),
@YamlProperty(name = "hostNameResolver", type = "enum:allLocalIp,localHostName,localIp,none", defaultValue = "allLocalIp", description = "If no hostname has been explicit configured, then this resolver is used to compute the hostname the REST service will be using.", displayName = "Host Name Resolver"),
@YamlProperty(name = "inlineRoutes", type = "boolean", defaultValue = "true", description = "Inline routes in rest-dsl which are linked using direct endpoints. Each service in Rest DSL is an individual route, meaning that you would have at least two routes per service (rest-dsl, and the route linked from rest-dsl). By inlining (default) allows Camel to optimize and inline this as a single route, however this requires to use direct endpoints, which must be unique per service. If a route is not using direct endpoint then the rest-dsl is not inlined, and will become an individual route. This option is default true.", displayName = "Inline Routes"),
@YamlProperty(name = "jsonDataFormat", type = "enum:jackson,jsonb,fastjson,gson", defaultValue = "jackson", description = "Name of specific json data format to use. By default, jackson will be used. Important: This option is only for setting a custom name of the data format, not to refer to an existing data format instance.", displayName = "Json Data Format"),
@YamlProperty(name = "port", type = "string", description = "The port number to use for exposing the REST service. Notice if you use servlet component then the port number configured here does not apply, as the port number in use is the actual port number the servlet component is using. eg if using Apache Tomcat its the tomcat http port, if using Apache Karaf its the HTTP service in Karaf that uses port 8181 by default etc. Though in those situations setting the port number here, allows tooling and JMX to know the port number, so its recommended to set the port number to the number that the servlet engine uses.", displayName = "Port"),
@YamlProperty(name = "producerApiDoc", type = "string", description = "Sets the location of the api document the REST producer will use to validate the REST uri and query parameters are valid accordingly to the api document. The location of the api document is loaded from classpath by default, but you can use file: or http: to refer to resources to load from file or http url.", displayName = "Producer Api Doc"),
@YamlProperty(name = "producerComponent", type = "enum:vertx-http,http,undertow,netty-http", description = "Sets the name of the Camel component to use as the REST producer", displayName = "Producer Component"),
@YamlProperty(name = "scheme", type = "string", description = "The scheme to use for exposing the REST service. Usually http or https is supported. The default value is http", displayName = "Scheme"),
@YamlProperty(name = "skipBindingOnErrorCode", type = "boolean", defaultValue = "false", description = "Whether to skip binding on output if there is a custom HTTP error code header. This allows to build custom error messages that do not bind to json / xml etc, as success messages otherwise will do.", displayName = "Skip Binding On Error Code"),
@YamlProperty(name = "useXForwardHeaders", type = "boolean", defaultValue = "false", description = "Whether to use X-Forward headers to set host etc. for OpenApi. This may be needed in special cases involving reverse-proxy and networking going from HTTP to HTTPS etc. Then the proxy can send X-Forward headers (X-Forwarded-Proto) that influences the host names in the OpenAPI schema that camel-openapi-java generates from Rest DSL routes.", displayName = "Use XForward Headers"),
@YamlProperty(name = "validationLevels", type = "array:org.apache.camel.model.rest.RestPropertyDefinition", description = "Allows to configure custom validation levels when using camel-openapi-validator with client request/response validator.", displayName = "Validation Levels"),
@YamlProperty(name = "xmlDataFormat", type = "enum:jaxb,jacksonXml", defaultValue = "jaxb", description = "Name of specific XML data format to use. By default jaxb will be used, but jacksonXml is also supported. Important: This option is only for setting a custom name of the data format, not to refer to an existing data format instance.", displayName = "Xml Data Format")
}
)
public static | names |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/OptionalShouldContainInstanceOf.java | {
"start": 803,
"end": 1123
} | class ____ extends BasicErrorMessageFactory {
private OptionalShouldContainInstanceOf(String message) {
super(message);
}
/**
* Indicates that a value should be present in an empty {@link java.util.Optional}.
*
* @param value Optional to be checked.
* @param clazz the | OptionalShouldContainInstanceOf |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/state/internals/TimestampedKeyValueStoreBuilder.java | {
"start": 1703,
"end": 3752
} | class ____<K, V>
extends AbstractStoreBuilder<K, ValueAndTimestamp<V>, TimestampedKeyValueStore<K, V>> {
private final KeyValueBytesStoreSupplier storeSupplier;
public TimestampedKeyValueStoreBuilder(final KeyValueBytesStoreSupplier storeSupplier,
final Serde<K> keySerde,
final Serde<V> valueSerde,
final Time time) {
super(
storeSupplier.name(),
keySerde,
valueSerde == null ? null : new ValueAndTimestampSerde<>(valueSerde),
time);
Objects.requireNonNull(storeSupplier, "storeSupplier can't be null");
Objects.requireNonNull(storeSupplier.metricsScope(), "storeSupplier's metricsScope can't be null");
this.storeSupplier = storeSupplier;
}
@Override
public TimestampedKeyValueStore<K, V> build() {
KeyValueStore<Bytes, byte[]> store = storeSupplier.get();
if (!(store instanceof TimestampedBytesStore)) {
if (store.persistent()) {
store = new KeyValueToTimestampedKeyValueByteStoreAdapter(store);
} else {
store = new InMemoryTimestampedKeyValueStoreMarker(store);
}
}
return new MeteredTimestampedKeyValueStore<>(
maybeWrapCaching(maybeWrapLogging(store)),
storeSupplier.metricsScope(),
time,
keySerde,
valueSerde);
}
private KeyValueStore<Bytes, byte[]> maybeWrapCaching(final KeyValueStore<Bytes, byte[]> inner) {
if (!enableCaching) {
return inner;
}
return new CachingKeyValueStore(inner, true);
}
private KeyValueStore<Bytes, byte[]> maybeWrapLogging(final KeyValueStore<Bytes, byte[]> inner) {
if (!enableLogging) {
return inner;
}
return new ChangeLoggingTimestampedKeyValueBytesStore(inner);
}
private static final | TimestampedKeyValueStoreBuilder |
java | apache__logging-log4j2 | log4j-couchdb/src/main/java/org/apache/logging/log4j/couchdb/CouchDbProvider.java | {
"start": 1876,
"end": 3781
} | class ____ implements NoSqlProvider<CouchDbConnection> {
private static final int HTTP = 80;
private static final int HTTPS = 443;
private static final Logger LOGGER = StatusLogger.getLogger();
private final CouchDbClient client;
private final String description;
private CouchDbProvider(final CouchDbClient client, final String description) {
this.client = client;
this.description = "couchDb{ " + description + " }";
}
@Override
public CouchDbConnection getConnection() {
return new CouchDbConnection(this.client);
}
@Override
public String toString() {
return this.description;
}
/**
* Factory method for creating an Apache CouchDB provider within the plugin manager.
*
* @param databaseName The name of the database to which log event documents will be written.
* @param protocol Either "http" or "https," defaults to "http" and mutually exclusive with
* {@code factoryClassName&factoryMethodName!=null}.
* @param server The host name of the CouchDB server, defaults to localhost and mutually exclusive with
* {@code factoryClassName&factoryMethodName!=null}.
* @param port The port that CouchDB is listening on, defaults to 80 if {@code protocol} is "http" and 443 if
* {@code protocol} is "https," and mutually exclusive with
* {@code factoryClassName&factoryMethodName!=null}.
* @param username The username to authenticate against the MongoDB server with, mutually exclusive with
* {@code factoryClassName&factoryMethodName!=null}.
* @param password The password to authenticate against the MongoDB server with, mutually exclusive with
* {@code factoryClassName&factoryMethodName!=null}.
* @param factoryClassName A fully qualified | CouchDbProvider |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java | {
"start": 1744,
"end": 2903
} | class ____ {
private final ScoreScript scoreScript;
private final ScriptDocValues<?> docValues;
private final int saltedSeed;
public RandomScoreField(ScoreScript scoreScript, int seed, String fieldName) {
this.scoreScript = scoreScript;
this.docValues = scoreScript.getDoc().get(fieldName);
int salt = (scoreScript._getIndex().hashCode() << 10) | scoreScript._getShardId();
this.saltedSeed = mix32(salt ^ seed);
}
public double randomScore() {
try {
docValues.getSupplier().setNextDocId(scoreScript._getDocId());
String seedValue = String.valueOf(docValues.get(0));
int hash = StringHelper.murmurhash3_x86_32(new BytesRef(seedValue), saltedSeed);
return (hash & 0x00FFFFFF) / (float) (1 << 24); // only use the lower 24 bits to construct a float from 0.0-1.0
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
}
// random score based on the internal Lucene document Ids
public static final | RandomScoreField |
java | spring-projects__spring-framework | spring-messaging/src/test/java/org/springframework/messaging/converter/DefaultContentTypeResolverTests.java | {
"start": 1227,
"end": 2933
} | class ____ {
private final DefaultContentTypeResolver resolver = new DefaultContentTypeResolver();
@Test
void resolve() {
MessageHeaders headers = headers(MimeTypeUtils.APPLICATION_JSON);
assertThat(this.resolver.resolve(headers)).isEqualTo(MimeTypeUtils.APPLICATION_JSON);
}
@Test
void resolveStringContentType() {
MessageHeaders headers = headers(MimeTypeUtils.APPLICATION_JSON_VALUE);
assertThat(this.resolver.resolve(headers)).isEqualTo(MimeTypeUtils.APPLICATION_JSON);
}
@Test
void resolveInvalidStringContentType() {
MessageHeaders headers = headers("invalidContentType");
assertThatExceptionOfType(InvalidMimeTypeException.class).isThrownBy(() -> this.resolver.resolve(headers));
}
@Test
void resolveUnknownHeaderType() {
MessageHeaders headers = headers(1);
assertThatIllegalArgumentException().isThrownBy(() -> this.resolver.resolve(headers));
}
@Test
void resolveNoContentTypeHeader() {
MessageHeaders headers = new MessageHeaders(Collections.emptyMap());
assertThat(this.resolver.resolve(headers)).isNull();
}
@Test
void resolveDefaultMimeType() {
this.resolver.setDefaultMimeType(MimeTypeUtils.APPLICATION_JSON);
MessageHeaders headers = new MessageHeaders(Collections.emptyMap());
assertThat(this.resolver.resolve(headers)).isEqualTo(MimeTypeUtils.APPLICATION_JSON);
}
@Test
void resolveDefaultMimeTypeWithNoHeader() {
this.resolver.setDefaultMimeType(MimeTypeUtils.APPLICATION_JSON);
assertThat(this.resolver.resolve(null)).isEqualTo(MimeTypeUtils.APPLICATION_JSON);
}
private MessageHeaders headers(Object mimeType) {
return new MessageHeaders(Map.of(MessageHeaders.CONTENT_TYPE, mimeType));
}
}
| DefaultContentTypeResolverTests |
java | elastic__elasticsearch | libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/ExternalAccess.java | {
"start": 2220,
"end": 2767
} | class ____ can be implied
if ("PROTECTED".equals(accessAsString)) {
return EnumSet.of(ExternalAccess.PUBLIC_CLASS, ExternalAccess.PROTECTED_METHOD);
}
if ("PUBLIC-METHOD".equals(accessAsString)) {
return EnumSet.of(ExternalAccess.PUBLIC_METHOD);
}
if ("PRIVATE".equals(accessAsString)) {
return EnumSet.noneOf(ExternalAccess.class);
}
return EnumSet.copyOf(Arrays.stream(accessAsString.split(DELIMITER)).map(ExternalAccess::valueOf).toList());
}
}
| access |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/QuartzEndpointBuilderFactory.java | {
"start": 28485,
"end": 28802
} | class ____ extends AbstractEndpointBuilder implements QuartzEndpointBuilder, AdvancedQuartzEndpointBuilder {
public QuartzEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new QuartzEndpointBuilderImpl(path);
}
} | QuartzEndpointBuilderImpl |
java | elastic__elasticsearch | qa/packaging/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java | {
"start": 2267,
"end": 29010
} | class ____ extends PackagingTestCase {
@BeforeClass
public static void filterDistros() {
assumeTrue("only archives", distribution.isArchive());
}
public void test10Install() throws Exception {
installation = installArchive(sh, distribution());
verifyArchiveInstallation(installation, distribution());
setFileSuperuser("test_superuser", "test_superuser_password");
// See https://bugs.openjdk.java.net/browse/JDK-8267701. In short, when generating PKCS#12 keystores in JDK 12 and later
// the MAC algorithm used for integrity protection is incompatible with any previous JDK version. This affects us as we generate
// PKCS12 keystores on startup ( with the bundled JDK ) but we also need to run certain tests with a JDK other than the bundled
// one, and we still use JDK11 for that.
// We're manually setting the HMAC algorithm to something that is compatible with previous versions here. Moving forward, when
// min compat JDK is JDK17, we can remove this hack and use the standard security properties file.
final Path jdkSecurityProperties = installation.bundledJdk.resolve("conf").resolve("security").resolve("java.security");
List<String> lines;
try (Stream<String> allLines = Files.readAllLines(jdkSecurityProperties).stream()) {
lines = allLines.filter(s -> s.startsWith("#keystore.pkcs12.macAlgorithm") == false)
.filter(s -> s.startsWith("#keystore.pkcs12.macIterationCount") == false)
.collect(Collectors.toList());
}
lines.add("keystore.pkcs12.macAlgorithm = HmacPBESHA1");
lines.add("keystore.pkcs12.macIterationCount = 100000");
Files.write(jdkSecurityProperties, lines, TRUNCATE_EXISTING);
}
public void test20PluginsListWithNoPlugins() throws Exception {
final Installation.Executables bin = installation.executables();
final Result r = bin.pluginTool.run("list");
assertThat(r.stdout(), emptyString());
}
public void test31BadJavaHome() throws Exception {
final Installation.Executables bin = installation.executables();
sh.getEnv().put("ES_JAVA_HOME", "doesnotexist");
// ask for elasticsearch version to quickly exit if java is actually found (ie test failure)
final Result runResult = sh.runIgnoreExitCode(bin.elasticsearch.toString() + " -V");
assertThat(runResult.exitCode(), is(1));
assertThat(runResult.stderr(), containsString("could not find java in ES_JAVA_HOME"));
}
public void test32SpecialCharactersInJdkPath() throws Exception {
final Installation.Executables bin = installation.executables();
assumeTrue("Only run this test when we know where the JDK is.", distribution().hasJdk);
final Path relocatedJdk = installation.bundledJdk.getParent().resolve("a (special) path");
sh.getEnv().put("ES_JAVA_HOME", relocatedJdk.toString());
try {
mv(installation.bundledJdk, relocatedJdk);
// ask for elasticsearch version to avoid starting the app
final Result runResult = sh.run(bin.elasticsearch.toString() + " -V");
assertThat(runResult.stdout(), startsWith("Version: "));
} finally {
mv(relocatedJdk, installation.bundledJdk);
}
}
public void test40AutoconfigurationNotTriggeredWhenNodeIsMeantToJoinExistingCluster() throws Exception {
// auto-config requires that the archive owner and the process user be the same,
Platforms.onWindows(() -> sh.chown(installation.config, installation.getOwner()));
FileUtils.assertPathsDoNotExist(installation.data);
ServerUtils.addSettingToExistingConfiguration(installation, "discovery.seed_hosts", "[\"127.0.0.1:9300\"]");
startElasticsearch();
verifySecurityNotAutoConfigured(installation);
stopElasticsearch();
ServerUtils.removeSettingFromExistingConfiguration(installation, "discovery.seed_hosts");
Platforms.onWindows(() -> sh.chown(installation.config));
FileUtils.rm(installation.data);
}
public void test41AutoconfigurationNotTriggeredWhenNodeCannotContainData() throws Exception {
// auto-config requires that the archive owner and the process user be the same
Platforms.onWindows(() -> sh.chown(installation.config, installation.getOwner()));
ServerUtils.addSettingToExistingConfiguration(installation, "node.roles", "[\"voting_only\", \"master\"]");
startElasticsearch();
verifySecurityNotAutoConfigured(installation);
stopElasticsearch();
ServerUtils.removeSettingFromExistingConfiguration(installation, "node.roles");
Platforms.onWindows(() -> sh.chown(installation.config));
FileUtils.rm(installation.data);
}
public void test42AutoconfigurationNotTriggeredWhenNodeCannotBecomeMaster() throws Exception {
// auto-config requires that the archive owner and the process user be the same
Platforms.onWindows(() -> sh.chown(installation.config, installation.getOwner()));
ServerUtils.addSettingToExistingConfiguration(installation, "node.roles", "[\"ingest\"]");
startElasticsearch();
verifySecurityNotAutoConfigured(installation);
stopElasticsearch();
ServerUtils.removeSettingFromExistingConfiguration(installation, "node.roles");
Platforms.onWindows(() -> sh.chown(installation.config));
FileUtils.rm(installation.data);
}
public void test43AutoconfigurationNotTriggeredWhenTlsAlreadyConfigured() throws Exception {
// auto-config requires that the archive owner and the process user be the same
Platforms.onWindows(() -> sh.chown(installation.config, installation.getOwner()));
ServerUtils.addSettingToExistingConfiguration(installation, "xpack.security.http.ssl.enabled", "false");
startElasticsearch();
verifySecurityNotAutoConfigured(installation);
stopElasticsearch();
ServerUtils.removeSettingFromExistingConfiguration(installation, "xpack.security.http.ssl.enabled");
Platforms.onWindows(() -> sh.chown(installation.config));
FileUtils.rm(installation.data);
}
public void test44AutoConfigurationNotTriggeredOnNotWriteableConfDir() throws Exception {
Platforms.onWindows(() -> {
// auto-config requires that the archive owner and the process user be the same
sh.chown(installation.config, installation.getOwner());
// prevent modifications to the config directory
sh.run(
String.format(
Locale.ROOT,
"$ACL = Get-ACL -Path '%s'; "
+ "$AccessRule = New-Object System.Security.AccessControl.FileSystemAccessRule('%s','Write','Deny'); "
+ "$ACL.SetAccessRule($AccessRule); "
+ "$ACL | Set-Acl -Path '%s';",
installation.config,
installation.getOwner(),
installation.config
)
);
});
Platforms.onLinux(() -> { sh.run("chmod u-w " + installation.config); });
try {
startElasticsearch();
verifySecurityNotAutoConfigured(installation);
// the node still starts, with Security enabled, but without TLS auto-configured (so only authentication)
runElasticsearchTests();
stopElasticsearch();
} finally {
Platforms.onWindows(() -> {
sh.run(
String.format(
Locale.ROOT,
"$ACL = Get-ACL -Path '%s'; "
+ "$AccessRule = New-Object System.Security.AccessControl.FileSystemAccessRule('%s','Write','Deny'); "
+ "$ACL.RemoveAccessRule($AccessRule); "
+ "$ACL | Set-Acl -Path '%s';",
installation.config,
installation.getOwner(),
installation.config
)
);
sh.chown(installation.config);
});
Platforms.onLinux(() -> { sh.run("chmod u+w " + installation.config); });
FileUtils.rm(installation.data);
}
}
public void test50AutoConfigurationFailsWhenCertificatesNotGenerated() throws Exception {
// auto-config requires that the archive owner and the process user be the same
Platforms.onWindows(() -> sh.chown(installation.config, installation.getOwner()));
FileUtils.assertPathsDoNotExist(installation.data);
Path tempDir = createTempDir("bc-backup");
Files.move(
installation.lib.resolve("tools").resolve("security-cli").resolve("bcprov-jdk18on-1.79.jar"),
tempDir.resolve("bcprov-jdk18on-1.79.jar")
);
Shell.Result result = runElasticsearchStartCommand(null, false, false);
assertElasticsearchFailure(result, "java.lang.NoClassDefFoundError: org/bouncycastle/", null);
Files.move(
tempDir.resolve("bcprov-jdk18on-1.79.jar"),
installation.lib.resolve("tools").resolve("security-cli").resolve("bcprov-jdk18on-1.79.jar")
);
Platforms.onWindows(() -> sh.chown(installation.config));
FileUtils.rm(tempDir);
}
public void test51AutoConfigurationWithPasswordProtectedKeystore() throws Exception {
/* Windows issue awaits fix: https://github.com/elastic/elasticsearch/issues/49340 */
assumeTrue("expect command isn't on Windows", distribution.platform != Distribution.Platform.WINDOWS);
FileUtils.assertPathsDoNotExist(installation.data);
final Installation.Executables bin = installation.executables();
final String password = "some-keystore-password";
Platforms.onLinux(() -> bin.keystoreTool.run("passwd", password + "\n" + password + "\n"));
Platforms.onWindows(() -> {
sh.run("Invoke-Command -ScriptBlock {echo '" + password + "'; echo '" + password + "'} | " + bin.keystoreTool + " passwd");
});
Shell.Result result = runElasticsearchStartCommand("some-wrong-password-here", false, false);
assertElasticsearchFailure(result, "Provided keystore password was incorrect", null);
verifySecurityNotAutoConfigured(installation);
if (RandomizedTest.randomBoolean()) {
ServerUtils.addSettingToExistingConfiguration(installation, "node.name", "my-custom-random-node-name-here");
}
awaitElasticsearchStartup(runElasticsearchStartCommand(password, true, true));
verifySecurityAutoConfigured(installation);
stopElasticsearch();
// Revert to an empty password for the rest of the tests
Platforms.onLinux(() -> bin.keystoreTool.run("passwd", password + "\n" + "" + "\n"));
Platforms.onWindows(
() -> sh.run("Invoke-Command -ScriptBlock {echo '" + password + "'; echo '" + "" + "'} | " + bin.keystoreTool + " passwd")
);
}
public void test52AutoConfigurationOnWindows() throws Exception {
assumeTrue(
"run this in place of test51AutoConfigurationWithPasswordProtectedKeystore on windows",
distribution.platform == Distribution.Platform.WINDOWS
);
sh.chown(installation.config, installation.getOwner());
FileUtils.assertPathsDoNotExist(installation.data);
if (RandomizedTest.randomBoolean()) {
ServerUtils.addSettingToExistingConfiguration(installation, "node.name", "my-custom-random-node-name-here");
}
startElasticsearch();
verifySecurityAutoConfigured(installation);
stopElasticsearch();
sh.chown(installation.config);
}
public void test60StartAndStop() throws Exception {
startElasticsearch();
assertThat(installation.logs.resolve("gc.log"), fileExists());
runElasticsearchTests();
stopElasticsearch();
}
public void test61EsJavaHomeOverride() throws Exception {
Platforms.onLinux(() -> {
String systemJavaHome1 = sh.run("echo $SYSTEM_JAVA_HOME").stdout().trim();
sh.getEnv().put("ES_JAVA_HOME", systemJavaHome1);
});
Platforms.onWindows(() -> {
final String systemJavaHome1 = sh.run("$Env:SYSTEM_JAVA_HOME").stdout().trim();
sh.getEnv().put("ES_JAVA_HOME", systemJavaHome1);
});
startElasticsearch();
runElasticsearchTests();
stopElasticsearch();
String systemJavaHome1 = sh.getEnv().get("ES_JAVA_HOME");
assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz"), containsString(systemJavaHome1));
}
public void test62JavaHomeIgnored() throws Exception {
assumeTrue(distribution().hasJdk);
Platforms.onLinux(() -> {
String systemJavaHome1 = sh.run("echo $SYSTEM_JAVA_HOME").stdout().trim();
sh.getEnv().put("JAVA_HOME", systemJavaHome1);
// ensure that ES_JAVA_HOME is not set for the test
sh.getEnv().remove("ES_JAVA_HOME");
});
Platforms.onWindows(() -> {
final String systemJavaHome1 = sh.run("$Env:SYSTEM_JAVA_HOME").stdout().trim();
sh.getEnv().put("JAVA_HOME", systemJavaHome1);
// ensure that ES_JAVA_HOME is not set for the test
sh.getEnv().remove("ES_JAVA_HOME");
});
final Installation.Executables bin = installation.executables();
final Result runResult = sh.run(bin.elasticsearch.toString() + " -V");
assertThat(runResult.stderr(), containsString("warning: ignoring JAVA_HOME=" + systemJavaHome + "; using bundled JDK"));
startElasticsearch();
runElasticsearchTests();
stopElasticsearch();
// if the JDK started with the bundled JDK then we know that JAVA_HOME was ignored
String bundledJdk = installation.bundledJdk.toString();
assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz"), containsString(bundledJdk));
}
public void test63BundledJdkRemoved() throws Exception {
assumeThat(distribution().hasJdk, is(true));
Path relocatedJdk = installation.bundledJdk.getParent().resolve("jdk.relocated");
try {
mv(installation.bundledJdk, relocatedJdk);
Platforms.onLinux(() -> {
String systemJavaHome1 = sh.run("echo $SYSTEM_JAVA_HOME").stdout().trim();
sh.getEnv().put("ES_JAVA_HOME", systemJavaHome1);
});
Platforms.onWindows(() -> {
final String systemJavaHome1 = sh.run("$Env:SYSTEM_JAVA_HOME").stdout().trim();
sh.getEnv().put("ES_JAVA_HOME", systemJavaHome1);
});
startElasticsearch();
runElasticsearchTests();
stopElasticsearch();
String systemJavaHome1 = sh.getEnv().get("ES_JAVA_HOME");
assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz"), containsString(systemJavaHome1));
} finally {
mv(relocatedJdk, installation.bundledJdk);
}
}
public void test64JavaHomeWithSpecialCharacters() throws Exception {
Platforms.onWindows(() -> {
String javaPath = "C:\\Program Files (x86)\\java";
try {
// once windows 2012 is no longer supported and powershell 5.0 is always available we can change this command
sh.run("cmd /c mklink /D '" + javaPath + "' $Env:SYSTEM_JAVA_HOME");
sh.getEnv().put("ES_JAVA_HOME", "C:\\Program Files (x86)\\java");
// verify ES can start, stop and run plugin list
startElasticsearch();
runElasticsearchTests();
stopElasticsearch();
String pluginListCommand = installation.bin + "/elasticsearch-plugin list";
Result result = sh.run(pluginListCommand);
assertThat(result.exitCode(), equalTo(0));
} finally {
// clean up sym link
if (Files.exists(Paths.get(javaPath))) {
sh.run("cmd /c rmdir '" + javaPath + "' ");
}
}
});
Platforms.onLinux(() -> {
// Create temporary directory with a space and link to real java home
String testJavaHome = Paths.get("/tmp", "java home").toString();
try {
final String systemJavaHome = sh.run("echo $SYSTEM_JAVA_HOME").stdout().trim();
sh.run("ln -s \"" + systemJavaHome + "\" \"" + testJavaHome + "\"");
sh.getEnv().put("ES_JAVA_HOME", testJavaHome);
// verify ES can start, stop and run plugin list
startElasticsearch();
runElasticsearchTests();
stopElasticsearch();
String pluginListCommand = installation.bin + "/elasticsearch-plugin list";
Result result = sh.run(pluginListCommand);
assertThat(result.exitCode(), equalTo(0));
} finally {
FileUtils.rm(Paths.get(testJavaHome));
}
});
}
public void test65ForceBundledJdkEmptyJavaHome() throws Exception {
assumeThat(distribution().hasJdk, is(true));
sh.getEnv().put("ES_JAVA_HOME", "");
startElasticsearch();
runElasticsearchTests();
stopElasticsearch();
}
/**
* Checks that an installation succeeds when <code>POSIXLY_CORRECT</code> is set in the environment.
* <p>
* This test purposefully ignores the existence of the Windows POSIX sub-system.
*/
public void test66InstallUnderPosix() throws Exception {
sh.getEnv().put("POSIXLY_CORRECT", "1");
startElasticsearch();
runElasticsearchTests();
stopElasticsearch();
}
public void test70CustomPathConfAndJvmOptions() throws Exception {
withCustomConfig(tempConf -> {
setHeap("512m", tempConf);
final List<String> jvmOptions = List.of("-Dlog4j2.disable.jmx=true");
Files.write(tempConf.resolve("jvm.options"), jvmOptions, CREATE, APPEND);
sh.getEnv().put("ES_JAVA_OPTS", "-XX:-UseCompressedOops");
startElasticsearch();
final String nodesResponse = ServerUtils.makeRequest(
Request.Get("https://localhost:9200/_nodes"),
"test_superuser",
"test_superuser_password",
ServerUtils.getCaCert(tempConf)
);
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912"));
assertThat(nodesResponse, containsString("\"using_compressed_ordinary_object_pointers\":\"false\""));
stopElasticsearch();
});
}
public void test71CustomJvmOptionsDirectoryFile() throws Exception {
final Path heapOptions = installation.config(Paths.get("jvm.options.d", "heap.options"));
try {
setHeap(null); // delete default options
append(heapOptions, "-Xms512m\n-Xmx512m\n");
startElasticsearch();
final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912"));
stopElasticsearch();
} finally {
rm(heapOptions);
}
}
public void test72CustomJvmOptionsDirectoryFilesAreProcessedInSortedOrder() throws Exception {
final Path firstOptions = installation.config(Paths.get("jvm.options.d", "first.options"));
final Path secondOptions = installation.config(Paths.get("jvm.options.d", "second.options"));
try {
setHeap(null); // delete default options
/*
* We override the heap in the first file, and disable compressed oops, and override the heap in the second file. By doing this,
* we can test that both files are processed by the JVM options parser, and also that they are processed in lexicographic order.
*/
append(firstOptions, "-Xms384m\n-Xmx384m\n-XX:-UseCompressedOops\n");
append(secondOptions, "-Xms512m\n-Xmx512m\n");
startElasticsearch();
final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912"));
assertThat(nodesResponse, containsString("\"using_compressed_ordinary_object_pointers\":\"false\""));
stopElasticsearch();
} finally {
rm(firstOptions);
rm(secondOptions);
}
}
public void test73CustomJvmOptionsDirectoryFilesWithoutOptionsExtensionIgnored() throws Exception {
final Path jvmOptionsIgnored = installation.config(Paths.get("jvm.options.d", "jvm.options.ignored"));
try {
append(jvmOptionsIgnored, "-Xthis_is_not_a_valid_option\n");
startElasticsearch();
runElasticsearchTests();
stopElasticsearch();
} finally {
rm(jvmOptionsIgnored);
}
}
public void test74CustomJvmOptionsTotalMemoryOverride() throws Exception {
final Path heapOptions = installation.config(Paths.get("jvm.options.d", "total_memory.options"));
try {
setHeap(null); // delete default options
// Work as though total system memory is 850MB
append(heapOptions, "-Des.total_memory_bytes=891289600\n");
startElasticsearch();
final String nodesStatsResponse = makeRequest("https://localhost:9200/_nodes/stats");
assertThat(nodesStatsResponse, containsString("\"adjusted_total_in_bytes\":891289600"));
final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
// 40% of 850MB
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":356515840"));
stopElasticsearch();
} finally {
rm(heapOptions);
}
}
public void test80RelativePathConf() throws Exception {
withCustomConfig(tempConf -> {
ServerUtils.removeSettingFromExistingConfiguration(tempConf, "node.name");
ServerUtils.addSettingToExistingConfiguration(tempConf, "node.name", "relative");
startElasticsearch();
final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
assertThat(nodesResponse, containsString("\"name\":\"relative\""));
stopElasticsearch();
});
}
public void test90SecurityCliPackaging() throws Exception {
final Installation.Executables bin = installation.executables();
assertThat(installation.lib.resolve("tools").resolve("security-cli"), fileExists());
final Platforms.PlatformAction action = () -> {
Result result = sh.run(bin.certutilTool + " --help");
assertThat(result.stdout(), containsString("Simplifies certificate creation for use with the Elastic Stack"));
// Ensure that the exit code from the java command is passed back up through the shell script
result = sh.runIgnoreExitCode(bin.certutilTool + " invalid-command");
assertThat(result.exitCode(), is(not(0)));
assertThat(result.stderr(), containsString("Unknown command [invalid-command]"));
};
Platforms.onLinux(action);
Platforms.onWindows(action);
}
public void test91ElasticsearchShardCliPackaging() throws Exception {
final Installation.Executables bin = installation.executables();
Platforms.PlatformAction action = () -> {
final Result result = sh.run(bin.shardTool + " -h");
assertThat(result.stdout(), containsString("A CLI tool to remove corrupted parts of unrecoverable shards"));
};
Platforms.onLinux(action);
Platforms.onWindows(action);
}
public void test92ElasticsearchNodeCliPackaging() throws Exception {
final Installation.Executables bin = installation.executables();
Platforms.PlatformAction action = () -> {
final Result result = sh.run(bin.nodeTool + " -h");
assertThat(result.stdout(), containsString("A CLI tool to do unsafe cluster and index manipulations on current node"));
};
Platforms.onLinux(action);
Platforms.onWindows(action);
}
public void test93ElasticsearchNodeCustomDataPathAndNotEsHomeWorkDir() throws Exception {
Path relativeDataPath = getRootTempDir().resolve("custom_data");
append(installation.config("elasticsearch.yml"), "path.data: " + relativeDataPath);
sh.setWorkingDirectory(getRootTempDir());
startElasticsearch();
stopElasticsearch();
String nodeTool = installation.executables().nodeTool.toString();
if (Platforms.WINDOWS == false) {
nodeTool = "sudo -E -u " + ARCHIVE_OWNER + " " + nodeTool;
}
Result result = sh.run("echo y | " + nodeTool + " unsafe-bootstrap");
assertThat(result.stdout(), containsString("Master node was successfully bootstrapped"));
}
public void test94ElasticsearchNodeExecuteCliNotEsHomeWorkDir() throws Exception {
final Installation.Executables bin = installation.executables();
// Run the cli tools from the tmp dir
sh.setWorkingDirectory(getRootTempDir());
Platforms.PlatformAction action = () -> {
Result result = sh.run(bin.certutilTool + " -h");
assertThat(result.stdout(), containsString("Simplifies certificate creation for use with the Elastic Stack"));
result = sh.run(bin.syskeygenTool + " -h");
assertThat(result.stdout(), containsString("system key tool"));
result = sh.run(bin.setupPasswordsTool + " -h");
assertThat(result.stdout(), containsString("Sets the passwords for reserved users"));
result = sh.run(bin.usersTool + " -h");
assertThat(result.stdout(), containsString("Manages elasticsearch file users"));
result = sh.run(bin.serviceTokensTool + " -h");
assertThat(result.stdout(), containsString("Manages elasticsearch service account file-tokens"));
};
Platforms.onLinux(action);
Platforms.onWindows(action);
}
}
| ArchiveTests |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/engine/CamelPostProcessorHelperTest.java | {
"start": 33288,
"end": 33489
} | class ____ {
@BeanInject("foo")
public FooBar foo;
public String doSomething(String body) {
return foo.hello(body);
}
}
public static | MyBeanInjectBean |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/StartEndTimesBase.java | {
"start": 2085,
"end": 6632
} | class ____ assumes that the contents of AppContext.getAllJobs
// never changes. Is that right?
//
// This assumption comes in in several places, mostly in data structure that
// can grow without limit if a AppContext gets new Job's when the old ones
// run out. Also, these mapper statistics blocks won't cover the Job's
// we don't know about.
protected final Map<Job, DataStatistics> mapperStatistics
= new HashMap<Job, DataStatistics>();
protected final Map<Job, DataStatistics> reducerStatistics
= new HashMap<Job, DataStatistics>();
private final Map<Job, Float> slowTaskRelativeThresholds
= new HashMap<Job, Float>();
protected final Set<Task> doneTasks = new HashSet<Task>();
@Override
public void enrollAttempt(TaskAttemptStatus status, long timestamp) {
startTimes.put(status.id,timestamp);
}
@Override
public long attemptEnrolledTime(TaskAttemptId attemptID) {
Long result = startTimes.get(attemptID);
return result == null ? Long.MAX_VALUE : result;
}
@Override
public void contextualize(Configuration conf, AppContext context) {
this.context = context;
Map<JobId, Job> allJobs = context.getAllJobs();
for (Map.Entry<JobId, Job> entry : allJobs.entrySet()) {
final Job job = entry.getValue();
mapperStatistics.put(job, new DataStatistics());
reducerStatistics.put(job, new DataStatistics());
slowTaskRelativeThresholds.put
(job, conf.getFloat(MRJobConfig.SPECULATIVE_SLOWTASK_THRESHOLD,1.0f));
}
}
protected DataStatistics dataStatisticsForTask(TaskId taskID) {
JobId jobID = taskID.getJobId();
Job job = context.getJob(jobID);
if (job == null) {
return null;
}
Task task = job.getTask(taskID);
if (task == null) {
return null;
}
return task.getType() == TaskType.MAP
? mapperStatistics.get(job)
: task.getType() == TaskType.REDUCE
? reducerStatistics.get(job)
: null;
}
@Override
public long thresholdRuntime(TaskId taskID) {
JobId jobID = taskID.getJobId();
Job job = context.getJob(jobID);
TaskType type = taskID.getTaskType();
DataStatistics statistics
= dataStatisticsForTask(taskID);
int completedTasksOfType
= type == TaskType.MAP
? job.getCompletedMaps() : job.getCompletedReduces();
int totalTasksOfType
= type == TaskType.MAP
? job.getTotalMaps() : job.getTotalReduces();
if (completedTasksOfType < MINIMUM_COMPLETE_NUMBER_TO_SPECULATE
|| (((float)completedTasksOfType) / totalTasksOfType)
< MINIMUM_COMPLETE_PROPORTION_TO_SPECULATE ) {
return Long.MAX_VALUE;
}
long result = statistics == null
? Long.MAX_VALUE
: (long)statistics.outlier(slowTaskRelativeThresholds.get(job));
return result;
}
@Override
public long estimatedNewAttemptRuntime(TaskId id) {
DataStatistics statistics = dataStatisticsForTask(id);
if (statistics == null) {
return -1L;
}
return (long) statistics.mean();
}
@Override
public void updateAttempt(TaskAttemptStatus status, long timestamp) {
TaskAttemptId attemptID = status.id;
TaskId taskID = attemptID.getTaskId();
JobId jobID = taskID.getJobId();
Job job = context.getJob(jobID);
if (job == null) {
return;
}
Task task = job.getTask(taskID);
if (task == null) {
return;
}
Long boxedStart = startTimes.get(attemptID);
long start = boxedStart == null ? Long.MIN_VALUE : boxedStart;
TaskAttempt taskAttempt = task.getAttempt(attemptID);
if (taskAttempt.getState() == TaskAttemptState.SUCCEEDED) {
boolean isNew = false;
// is this a new success?
synchronized (doneTasks) {
if (!doneTasks.contains(task)) {
doneTasks.add(task);
isNew = true;
}
}
// It's a new completion
// Note that if a task completes twice [because of a previous speculation
// and a race, or a success followed by loss of the machine with the
// local data] we only count the first one.
if (isNew) {
long finish = timestamp;
if (start > 1L && finish > 1L && start <= finish) {
long duration = finish - start;
DataStatistics statistics
= dataStatisticsForTask(taskID);
if (statistics != null) {
statistics.add(duration);
}
}
}
}
}
}
| design |
java | apache__camel | components/camel-hl7/src/test/java/org/apache/camel/component/hl7/HL7NettyRouteTest.java | {
"start": 1573,
"end": 6732
} | class ____ extends HL7TestSupport {
@BindToRegistry("hl7service")
MyHL7BusinessLogic logic = new MyHL7BusinessLogic();
@BindToRegistry("hl7decoder")
public HL7MLLPNettyDecoderFactory addDecoder() {
HL7MLLPNettyDecoderFactory decoder = new HL7MLLPNettyDecoderFactory();
decoder.setCharset("iso-8859-1");
return decoder;
}
@BindToRegistry("hl7encoder")
public HL7MLLPNettyEncoderFactory addEncoder() {
HL7MLLPNettyEncoderFactory encoder = new HL7MLLPNettyEncoderFactory();
encoder.setCharset("iso-8859-1");
return encoder;
}
@Test
public void testSendA19() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:a19");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(Message.class);
String line1 = "MSH|^~\\&|MYSENDER|MYSENDERAPP|MYCLIENT|MYCLIENTAPP|200612211200||QRY^A19|1234|P|2.4";
String line2 = "QRD|200612211200|R|I|GetPatient|||1^RD|0101701234|DEM||";
StringBuilder in = new StringBuilder();
in.append(line1);
in.append("\r");
in.append(line2);
String out = template.requestBody(
"netty:tcp://127.0.0.1:" + getPort() + "?sync=true&decoders=#hl7decoder&encoders=#hl7encoder", in.toString(),
String.class);
String[] lines = out.split("\r");
assertEquals("MSH|^~\\&|MYSENDER||||200701011539||ADR^A19||||123", lines[0]);
assertEquals("MSA|AA|123", lines[1]);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSendA01() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:a01");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(Message.class);
String line1 = "MSH|^~\\&|MYSENDER|MYSENDERAPP|MYCLIENT|MYCLIENTAPP|200612211200||ADT^A01|123|P|2.4";
String line2 = "PID|||123456||Doe^John";
StringBuilder in = new StringBuilder();
in.append(line1);
in.append("\r");
in.append(line2);
String out = template.requestBody(
"netty:tcp://127.0.0.1:" + getPort() + "?sync=true&decoders=#hl7decoder&encoders=#hl7encoder", in.toString(),
String.class);
String[] lines = out.split("\r");
assertEquals("MSH|^~\\&|MYSENDER||||200701011539||ADT^A01||||123", lines[0]);
assertEquals("PID|||123||Doe^John", lines[1]);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSendUnknown() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:unknown");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(Message.class);
String line1 = "MSH|^~\\&|MYSENDER|MYSENDERAPP|MYCLIENT|MYCLIENTAPP|200612211200||ADT^A02|1234|P|2.4";
String line2 = "PID|||123456||Doe^John";
StringBuilder in = new StringBuilder();
in.append(line1);
in.append("\r");
in.append(line2);
template.requestBody("netty:tcp://127.0.0.1:" + getPort() + "?sync=true&decoders=#hl7decoder&encoders=#hl7encoder",
in.toString());
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// START SNIPPET: e1
DataFormat hl7 = new HL7DataFormat();
// we setup or HL7 listener on port 8888 (using the hl7codec)
// and in sync mode so we can return a response
from("netty:tcp://127.0.0.1:" + getPort() + "?sync=true&decoders=#hl7decoder&encoders=#hl7encoder")
// we use the HL7 data format to unmarshal from HL7 stream
// to the HAPI Message model
// this ensures that the camel message has been enriched
// with hl7 specific headers to
// make the routing much easier (see below)
.unmarshal(hl7)
// using choice as the content base router
.choice()
// where we choose that A19 queries invoke the handleA19
// method on our hl7service bean
.when(header("CamelHL7TriggerEvent").isEqualTo("A19")).bean("hl7service", "handleA19").to("mock:a19")
// and A01 should invoke the handleA01 method on our
// hl7service bean
.when(header("CamelHL7TriggerEvent").isEqualTo("A01")).to("mock:a01").bean("hl7service", "handleA01")
.to("mock:a19")
// other types should go to mock:unknown
.otherwise().to("mock:unknown")
// end choice block
.end()
// marshal response back
.marshal(hl7);
// END SNIPPET: e1
}
};
}
// START SNIPPET: e2
public | HL7NettyRouteTest |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/http/impl/NettyFileUpload.java | {
"start": 1087,
"end": 7898
} | class ____ implements FileUpload, ReadStream<Buffer> {
private final String name;
private String contentType;
private String filename;
private String contentTransferEncoding;
private Charset charset;
private boolean completed;
private long maxSize = -1;
private final HttpServerRequest request;
private final InboundBuffer<Object> pending;
private Handler<Void> endHandler;
private Handler<Throwable> exceptionHandler;
private Handler<Buffer> dataHandler;
private final long size;
NettyFileUpload(Context context, HttpServerRequest request, String name, String filename, String contentType, String contentTransferEncoding, Charset charset, long size) {
this.name = name;
this.filename = filename;
this.contentType = contentType;
this.contentTransferEncoding = contentTransferEncoding;
this.charset = charset;
this.request = request;
this.size = size;
this.pending = new InboundBuffer<>(context)
.drainHandler(v -> request.resume())
.handler(buff -> {
if (buff == InboundBuffer.END_SENTINEL) {
Handler<Void> handler = endHandler();
if (handler != null) {
handler.handle(null);
}
} else {
Handler<Buffer> handler = handler();
if (handler != null) {
handler.handle((Buffer) buff);
}
}
});
}
@Override
public synchronized NettyFileUpload exceptionHandler(Handler<Throwable> handler) {
exceptionHandler = handler;
return this;
}
private Handler<Buffer> handler() {
return dataHandler;
}
@Override
public synchronized NettyFileUpload handler(Handler<Buffer> handler) {
dataHandler = handler;
return this;
}
@Override
public NettyFileUpload pause() {
pending.pause();
return this;
}
@Override
public NettyFileUpload resume() {
return fetch(Long.MAX_VALUE);
}
@Override
public NettyFileUpload fetch(long amount) {
pending.fetch(amount);
return this;
}
private synchronized Handler<Void> endHandler() {
return endHandler;
}
@Override
public synchronized NettyFileUpload endHandler(Handler<Void> handler) {
endHandler = handler;
return this;
}
private void receiveData(Buffer data) {
if (data.length() != 0) {
if (!pending.write(data)) {
request.pause();
}
}
}
private void end() {
pending.write(InboundBuffer.END_SENTINEL);
}
public void handleException(Throwable err) {
Handler<Throwable> handler;
synchronized (this) {
handler = exceptionHandler;
}
if (handler != null) {
handler.handle(err);
}
}
@Override
public void setContent(ByteBuf channelBuffer) throws IOException {
completed = true;
receiveData(BufferInternal.buffer(channelBuffer));
end();
}
@Override
public void addContent(ByteBuf channelBuffer, boolean last) throws IOException {
receiveData(BufferInternal.buffer(channelBuffer));
if (last) {
completed = true;
end();
}
}
@Override
public void setContent(File file) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void setContent(InputStream inputStream) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public boolean isCompleted() {
return completed;
}
@Override
public long length() {
return size;
}
@Override
public void delete() {
throw new UnsupportedOperationException();
}
@Override
public long definedLength() {
return size;
}
@Override
public void checkSize(long newSize) throws IOException {
if (maxSize >= 0 && newSize > maxSize) {
throw new IOException("Size exceed allowed maximum capacity");
}
}
@Override
public long getMaxSize() {
return maxSize;
}
@Override
public void setMaxSize(long maxSize) {
this.maxSize = maxSize;
}
@Override
public byte[] get() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public ByteBuf getChunk(int i) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public String getString() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public String getString(Charset charset) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void setCharset(Charset charset) {
this.charset = charset;
}
@Override
public Charset getCharset() {
return charset;
}
@Override
public boolean renameTo(File file) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public boolean isInMemory() {
return false;
}
@Override
public File getFile() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public String getName() {
return name;
}
@Override
public HttpDataType getHttpDataType() {
throw new UnsupportedOperationException();
}
@Override
public int compareTo(InterfaceHttpData o) {
return 0;
}
@Override
public String getFilename() {
return filename;
}
@Override
public void setFilename(String filename) {
this.filename = filename;
}
@Override
public void setContentType(String contentType) {
this.contentType = contentType;
}
@Override
public String getContentType() {
return contentType;
}
@Override
public void setContentTransferEncoding(String contentTransferEncoding) {
this.contentTransferEncoding = contentTransferEncoding;
}
@Override
public String getContentTransferEncoding() {
return contentTransferEncoding;
}
@Override
public ByteBuf getByteBuf() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public FileUpload copy() {
throw new UnsupportedOperationException();
}
//@Override
public FileUpload duplicate() {
throw new UnsupportedOperationException();
}
@Override
public FileUpload retainedDuplicate() {
throw new UnsupportedOperationException();
}
@Override
public FileUpload replace(ByteBuf content) {
throw new UnsupportedOperationException();
}
@Override
public FileUpload retain() {
return this;
}
@Override
public FileUpload retain(int increment) {
return this;
}
@Override
public FileUpload touch(Object hint) {
return this;
}
@Override
public FileUpload touch() {
return this;
}
@Override
public ByteBuf content() {
throw new UnsupportedOperationException();
}
@Override
public int refCnt() {
return 1;
}
@Override
public boolean release() {
return false;
}
@Override
public boolean release(int decrement) {
return false;
}
}
| NettyFileUpload |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/testkit/EqualsHashCodeContractTestCase.java | {
"start": 831,
"end": 1311
} | interface ____ {
void should_not_be_equal_to_Object_of_different_type();
/**
* If two objects are equal, they must remain equal as long as they are not modified.
*/
void equals_should_be_consistent();
/**
* The object must be equal to itself, which it would be at any given instance; unless you intentionally override the
* equals method to behave otherwise.
*/
void equals_should_be_reflexive();
/**
* If object of one | EqualsHashCodeContractTestCase |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/AnnotatedElementUtilsTests.java | {
"start": 62748,
"end": 62883
} | interface ____ {
String[] value();
}
@Retention(RetentionPolicy.RUNTIME)
@ValueAttribute("FromValueAttributeMeta")
@ | ValueAttribute |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/proxy/ProxyClassReuseTest.java | {
"start": 3332,
"end": 4823
} | class ____ and hence be different
assertNotSame( proxyClass1, proxyClass2 );
assertSame( proxyClass1.getClassLoader(), cl1 );
assertSame( proxyClass2.getClassLoader(), cl2 );
}
<T> T withFactory(Function<SessionFactoryImplementor, T> consumer, BytecodeProvider bytecodeProvider, ClassLoader classLoader) {
final ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
try {
if (classLoader != null) {
Thread.currentThread().setContextClassLoader( classLoader );
}
final BootstrapServiceRegistryBuilder bsr = new BootstrapServiceRegistryBuilder();
bsr.applyTcclLookupPrecedence( TcclLookupPrecedence.BEFORE );
final StandardServiceRegistryBuilder builder = ServiceRegistryUtil.serviceRegistryBuilder(bsr.build());
if ( bytecodeProvider != null ) {
builder.addService( BytecodeProvider.class, bytecodeProvider );
}
final StandardServiceRegistry ssr = builder.build();
try (final SessionFactoryImplementor sf = (SessionFactoryImplementor) new MetadataSources( ssr )
.addAnnotatedClassName( ProxyClassReuseTest.class.getName() + "$MyEntity" )
.buildMetadata()
.getSessionFactoryBuilder()
.build()) {
return consumer.apply( sf );
}
catch (Exception e) {
StandardServiceRegistryBuilder.destroy( ssr );
throw e;
}
}
finally {
if (classLoader != null) {
Thread.currentThread().setContextClassLoader( oldClassLoader );
}
}
}
@Entity(name = "MyEntity")
public static | loaders |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/TestDataFileReflect.java | {
"start": 7183,
"end": 7720
} | class ____ {
private int nbr;
@SuppressWarnings("unused")
public BazRecord() {
}
public BazRecord(int nbr) {
this.nbr = nbr;
}
@Override
public boolean equals(Object that) {
if (that instanceof BazRecord) {
return this.nbr == ((BazRecord) that).nbr;
}
return false;
}
@Override
public int hashCode() {
return nbr;
}
@Override
public String toString() {
return BazRecord.class.getSimpleName() + "{cnt=" + nbr + "}";
}
}
}
| BazRecord |
java | square__retrofit | retrofit/src/main/java/retrofit2/DefaultMethodSupport.java | {
"start": 1033,
"end": 1108
} | class ____ multi-release jar variants for newer versions of Java.
*/
final | has |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/filter/tps/StatItem.java | {
"start": 1171,
"end": 2309
} | class ____ {
private final String name;
private final AtomicLong lastResetTime;
private final long interval;
private final AtomicInteger token;
private final int rate;
StatItem(String name, int rate, long interval) {
this.name = name;
this.rate = rate;
this.interval = interval;
this.lastResetTime = new AtomicLong(System.currentTimeMillis());
this.token = new AtomicInteger(rate);
}
public boolean isAllowable() {
long now = System.currentTimeMillis();
if (now > lastResetTime.get() + interval) {
token.set(rate);
lastResetTime.set(now);
}
return token.decrementAndGet() >= 0;
}
public long getInterval() {
return interval;
}
public int getRate() {
return rate;
}
long getLastResetTime() {
return lastResetTime.get();
}
int getToken() {
return token.get();
}
@Override
public String toString() {
return "StatItem " + "[name=" + name + ", " + "rate = " + rate + ", " + "interval = " + interval + ']';
}
}
| StatItem |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/BeanToFileTest.java | {
"start": 1190,
"end": 2248
} | class ____ extends ContextTestSupport {
private static final String TEST_FILE_NAME = "BeanToFileTest" + UUID.randomUUID() + ".txt";
@Test
public void testBeanToFile() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.expectedFileExists(testFile(TEST_FILE_NAME), "Bye World");
template.sendBody("direct:in", "World");
assertMockEndpointsSatisfied();
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry answer = super.createCamelRegistry();
answer.bind("myBean", new MyBean());
return answer;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:in").to("bean:myBean").setHeader(Exchange.FILE_NAME, constant(TEST_FILE_NAME))
.to(fileUri("?fileExist=Override"), "mock:result");
}
};
}
public static | BeanToFileTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/FederationMembershipStateStore.java | {
"start": 2187,
"end": 5853
} | interface ____ {
/**
* Register a <em>subcluster</em> by publishing capabilities as represented by
* {@code SubClusterInfo} to indicate participation in federation. This is
* typically done during initialization or restart/failover of the
* subcluster's <code>ResourceManager</code>. Upon successful registration, an
* identifier for the <em>subcluster</em> which is unique across the federated
* cluster is returned. The identifier is static, i.e. preserved across
* restarts and failover.
*
* @param registerSubClusterRequest the capabilities of the subcluster that
* wants to participate in federation. The subcluster id is also
* specified in case registration is triggered by restart/failover
* @return response empty on successfully if registration was successful
* @throws YarnException if the request is invalid/fails
*/
SubClusterRegisterResponse registerSubCluster(
SubClusterRegisterRequest registerSubClusterRequest) throws YarnException;
/**
* Deregister a <em>subcluster</em> identified by {@code SubClusterId} to
* change state in federation. This can be done to mark the sub cluster lost,
* deregistered, or decommissioned.
*
* @param subClusterDeregisterRequest - the request to deregister the
* sub-cluster from federation.
* @return response empty on successfully deregistering the subcluster state
* @throws YarnException if the request is invalid/fails
*/
SubClusterDeregisterResponse deregisterSubCluster(
SubClusterDeregisterRequest subClusterDeregisterRequest)
throws YarnException;
/**
* Periodic heartbeat from a <code>ResourceManager</code> participating in
* federation to indicate liveliness. The heartbeat publishes the current
* capabilities as represented by {@code SubClusterInfo} of the subcluster.
* Currently response is empty if the operation was successful, if not an
* exception reporting reason for a failure.
*
* @param subClusterHeartbeatRequest the capabilities of the subcluster that
* wants to keep alive its participation in federation
* @return response currently empty on if heartbeat was successfully processed
* @throws YarnException if the request is invalid/fails
*/
SubClusterHeartbeatResponse subClusterHeartbeat(
SubClusterHeartbeatRequest subClusterHeartbeatRequest)
throws YarnException;
/**
* Get the membership information of <em>subcluster</em> as identified by
* {@code SubClusterId}. The membership information includes the cluster
* endpoint and current capabilities as represented by {@code SubClusterInfo}.
*
* @param subClusterRequest the subcluster whose information is required
* @return the {@code SubClusterInfo}, or {@code null} if there is no mapping
* for the subcluster
* @throws YarnException if the request is invalid/fails
*/
GetSubClusterInfoResponse getSubCluster(
GetSubClusterInfoRequest subClusterRequest) throws YarnException;
/**
* Get the membership information of all the <em>subclusters</em> that are
* currently participating in federation. The membership information includes
* the cluster endpoint and current capabilities as represented by
* {@code SubClusterInfo}.
*
* @param subClustersRequest request for sub-clusters information
* @return a map of {@code SubClusterInfo} keyed by the {@code SubClusterId}
* @throws YarnException if the request is invalid/fails
*/
GetSubClustersInfoResponse getSubClusters(
GetSubClustersInfoRequest subClustersRequest) throws YarnException;
}
| FederationMembershipStateStore |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/management/SpringManagedCustomProcessorTest.java | {
"start": 1620,
"end": 3101
} | class ____ extends SpringTestSupport {
@Override
protected boolean useJmx() {
return true;
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/spring/management/SpringManagedCustomProcessorTest.xml");
}
protected MBeanServer getMBeanServer() {
return context.getManagementStrategy().getManagementAgent().getMBeanServer();
}
@Test
public void testManageCustomProcessor() throws Exception {
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = getCamelObjectName(TYPE_PROCESSOR, "custom");
getMockEndpoint("mock:result").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedHeaderReceived("foo", "hey");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
String foo = (String) mbeanServer.getAttribute(on, "Foo");
assertEquals("hey", foo);
// change foo
mbeanServer.setAttribute(on, new Attribute("Foo", "changed"));
resetMocks();
getMockEndpoint("mock:result").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedHeaderReceived("foo", "changed");
template.sendBody("direct:start", "Bye World");
assertMockEndpointsSatisfied();
}
@ManagedResource(description = "My Managed Component")
public static | SpringManagedCustomProcessorTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/fielddata/BinaryScriptFieldData.java | {
"start": 2336,
"end": 2491
} | class ____ implements LeafFieldData {
@Override
public long ramBytesUsed() {
return 0;
}
}
}
| BinaryScriptLeafFieldData |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy-mutiny/deployment/src/test/java/io/quarkus/resteasy/mutiny/test/MutinyResource.java | {
"start": 524,
"end": 2494
} | class ____ {
@Path("uni")
@GET
public Uni<String> uni() {
return Uni.createFrom().item("hello");
}
@Produces(MediaType.APPLICATION_JSON)
@Path("multi")
@GET
@Stream
public Multi<String> multi() {
return Multi.createFrom().items("hello", "world");
}
@Path("injection")
@GET
public Uni<Integer> injection(@Context Integer value) {
return Uni.createFrom().item(value);
}
@Path("injection-async")
@GET
public Uni<Integer> injectionAsync(@Async @Context Integer value) {
return Uni.createFrom().item(value);
}
@Path("web-failure")
@GET
public Uni<String> failing() {
return Uni.createFrom().item("not ok")
.onItem().failWith(s -> new WebApplicationException(
Response.status(Response.Status.SERVICE_UNAVAILABLE).entity(s).build()));
}
@Path("app-failure")
@GET
public Uni<String> failingBecauseOfApplicationCode() {
return Uni.createFrom().item("not ok")
.onItem().transform(s -> {
throw new IllegalStateException("BOOM!");
});
}
@Path("response/tea-pot")
@GET
public Uni<Response> teapot() {
return Uni.createFrom().item(() -> Response.status(418).build());
}
@Path("response/no-content")
@GET
public Uni<Response> noContent() {
return Uni.createFrom().item(() -> Response.noContent().build());
}
@Path("response/accepted")
@GET
public Uni<Response> accepted() {
return Uni.createFrom().item(() -> Response.accepted("Hello").build());
}
@Path("response/conditional/{test}")
@GET
public Uni<Response> conditional(@PathParam("test") boolean test) {
return Uni.createFrom().item(test)
.map(b -> b ? Response.accepted() : Response.noContent())
.map(Response.ResponseBuilder::build);
}
}
| MutinyResource |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/createTable/MySqlCreateTableTest85.java | {
"start": 1178,
"end": 2442
} | class ____(10), data binary) engine=MYISAM;";
MySqlStatementParser parser = new MySqlStatementParser(sql, SQLParserFeature.KeepComments);
SQLStatement stmt = parser.parseCreateTable();
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
//
// Column column = visitor.getColumn("tb_custom_vip_show_message", "custom_vip_show_message_seq");
// assertNotNull(column);
// assertEquals("INT", column.getDataType());
System.out.println(stmt);
{
String output = SQLUtils.toMySqlString(stmt);
assertEquals("CREATE TABLE `some table $$` (\n" +
"\tid int PRIMARY KEY AUTO_INCREMENT,\n" +
"\tclass varchar(10),\n" +
"\tdata binary\n" +
") ENGINE = MYISAM", output);
}
{
String output = SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION);
assertEquals("create table `some table $$` (\n" +
"\tid int primary key auto_increment,\n" +
"\tclass varchar(10),\n" +
"\tdata binary\n" +
") engine = MYISAM", output);
}
}
}
| varchar |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/association/InheritedAttributeAssociationTest.java | {
"start": 989,
"end": 1422
} | class ____ {
@Test
public void test() {
// The mapping is wrong but the point is that the enhancement phase does not need to fail. See JIRA for further detail
// If enhancement of 'items' attribute fails, 'name' won't be enhanced
Author author = new Author();
author.name = "Bernardo Soares";
EnhancerTestUtils.checkDirtyTracking( author, "name" );
}
// --- //
@Entity
private static | InheritedAttributeAssociationTest |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/util/ReflectionUtil.java | {
"start": 1208,
"end": 6496
} | class ____ {
private ReflectionUtil() {}
/**
* Indicates whether or not a {@link Member} is both public and is contained in a public class.
*
* @param <T> type of the object whose accessibility to test
* @param member the Member to check for public accessibility (must not be {@code null}).
* @return {@code true} if {@code member} is public and contained in a public class.
* @throws NullPointerException if {@code member} is {@code null}.
*/
public static <T extends AccessibleObject & Member> boolean isAccessible(final T member) {
Objects.requireNonNull(member, "No member provided");
return Modifier.isPublic(member.getModifiers())
&& Modifier.isPublic(member.getDeclaringClass().getModifiers());
}
/**
* Makes a {@link Member} {@link AccessibleObject#isAccessible() accessible} if the member is not public.
*
* @param <T> type of the object to make accessible
* @param member the Member to make accessible (must not be {@code null}).
* @throws NullPointerException if {@code member} is {@code null}.
*/
public static <T extends AccessibleObject & Member> void makeAccessible(final T member) {
if (!isAccessible(member) && !member.isAccessible()) {
member.setAccessible(true);
}
}
/**
* Makes a {@link Field} {@link AccessibleObject#isAccessible() accessible} if it is not public or if it is final.
*
* <p>Note that using this method to make a {@code final} field writable will most likely not work very well due to
* compiler optimizations and the like.</p>
*
* @param field the Field to make accessible (must not be {@code null}).
* @throws NullPointerException if {@code field} is {@code null}.
*/
public static void makeAccessible(final Field field) {
Objects.requireNonNull(field, "No field provided");
if ((!isAccessible(field) || Modifier.isFinal(field.getModifiers())) && !field.isAccessible()) {
field.setAccessible(true);
}
}
/**
* Gets the value of a {@link Field}, making it accessible if required.
*
* @param field the Field to obtain a value from (must not be {@code null}).
* @param instance the instance to obtain the field value from or {@code null} only if the field is static.
* @return the value stored by the field.
* @throws NullPointerException if {@code field} is {@code null}, or if {@code instance} is {@code null} but
* {@code field} is not {@code static}.
* @see Field#get(Object)
*/
public static Object getFieldValue(final Field field, final Object instance) {
makeAccessible(field);
if (!Modifier.isStatic(field.getModifiers())) {
Objects.requireNonNull(instance, "No instance given for non-static field");
}
try {
return field.get(instance);
} catch (final IllegalAccessException e) {
throw new UnsupportedOperationException(e);
}
}
/**
* Gets the value of a static {@link Field}, making it accessible if required.
*
* @param field the Field to obtain a value from (must not be {@code null}).
* @return the value stored by the static field.
* @throws NullPointerException if {@code field} is {@code null}, or if {@code field} is not {@code static}.
* @see Field#get(Object)
*/
public static Object getStaticFieldValue(final Field field) {
return getFieldValue(field, null);
}
/**
* Sets the value of a {@link Field}, making it accessible if required.
*
* @param field the Field to write a value to (must not be {@code null}).
* @param instance the instance to write the value to or {@code null} only if the field is static.
* @param value the (possibly wrapped) value to write to the field.
* @throws NullPointerException if {@code field} is {@code null}, or if {@code instance} is {@code null} but
* {@code field} is not {@code static}.
* @see Field#set(Object, Object)
*/
public static void setFieldValue(final Field field, final Object instance, final Object value) {
makeAccessible(field);
if (!Modifier.isStatic(field.getModifiers())) {
Objects.requireNonNull(instance, "No instance given for non-static field");
}
try {
field.set(instance, value);
} catch (final IllegalAccessException e) {
throw new UnsupportedOperationException(e);
}
}
/**
* Sets the value of a static {@link Field}, making it accessible if required.
*
* @param field the Field to write a value to (must not be {@code null}).
* @param value the (possibly wrapped) value to write to the field.
* @throws NullPointerException if {@code field} is {@code null}, or if {@code field} is not {@code static}.
* @see Field#set(Object, Object)
*/
public static void setStaticFieldValue(final Field field, final Object value) {
setFieldValue(field, null, value);
}
/**
* Gets the default (no-arg) constructor for a given class.
*
* @param clazz the | ReflectionUtil |
java | apache__kafka | streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/AbstractJoinIntegrationTest.java | {
"start": 2281,
"end": 14002
} | class ____ {
private final MockTime time = new MockTime();
private static final Long COMMIT_INTERVAL = 100L;
static final String INPUT_TOPIC_RIGHT = "inputTopicRight";
static final String INPUT_TOPIC_LEFT = "inputTopicLeft";
static final String OUTPUT_TOPIC = "outputTopic";
static final long ANY_UNIQUE_KEY = 0L;
protected final List<Input<String>> input = Arrays.asList(
new Input<>(INPUT_TOPIC_LEFT, null, 1),
new Input<>(INPUT_TOPIC_RIGHT, null, 2),
new Input<>(INPUT_TOPIC_LEFT, "A", 3),
new Input<>(INPUT_TOPIC_RIGHT, "a", 4),
new Input<>(INPUT_TOPIC_LEFT, "B", 5),
new Input<>(INPUT_TOPIC_RIGHT, "b", 6),
new Input<>(INPUT_TOPIC_LEFT, null, 7),
new Input<>(INPUT_TOPIC_RIGHT, null, 8),
new Input<>(INPUT_TOPIC_LEFT, "C", 9),
new Input<>(INPUT_TOPIC_RIGHT, "c", 10),
new Input<>(INPUT_TOPIC_RIGHT, null, 11),
new Input<>(INPUT_TOPIC_LEFT, null, 12),
new Input<>(INPUT_TOPIC_RIGHT, null, 13),
new Input<>(INPUT_TOPIC_RIGHT, "d", 7), // out-of-order data with null as latest
new Input<>(INPUT_TOPIC_LEFT, "D", 6),
new Input<>(INPUT_TOPIC_LEFT, null, 2),
new Input<>(INPUT_TOPIC_RIGHT, null, 3),
new Input<>(INPUT_TOPIC_RIGHT, "e", 14),
new Input<>(INPUT_TOPIC_LEFT, "E", 15),
new Input<>(INPUT_TOPIC_LEFT, null, 10), // out-of-order data with non-null as latest
new Input<>(INPUT_TOPIC_RIGHT, null, 9),
new Input<>(INPUT_TOPIC_LEFT, "F", 4),
new Input<>(INPUT_TOPIC_RIGHT, "f", 3)
);
// used for stream-stream join tests where out-of-order data does not meaningfully affect
// the result, and the main `input` list results in too many result records/test noise.
// also used for table-table multi-join tests, since out-of-order data with table-table
// joins is already tested in non-multi-join settings.
protected final List<Input<String>> inputWithoutOutOfOrderData = Arrays.asList(
new Input<>(INPUT_TOPIC_LEFT, null, 1),
new Input<>(INPUT_TOPIC_RIGHT, null, 2),
new Input<>(INPUT_TOPIC_LEFT, "A", 3),
new Input<>(INPUT_TOPIC_RIGHT, "a", 4),
new Input<>(INPUT_TOPIC_LEFT, "B", 5),
new Input<>(INPUT_TOPIC_RIGHT, "b", 6),
new Input<>(INPUT_TOPIC_LEFT, null, 7),
new Input<>(INPUT_TOPIC_RIGHT, null, 8),
new Input<>(INPUT_TOPIC_LEFT, "C", 9),
new Input<>(INPUT_TOPIC_RIGHT, "c", 10),
new Input<>(INPUT_TOPIC_RIGHT, null, 11),
new Input<>(INPUT_TOPIC_LEFT, null, 12),
new Input<>(INPUT_TOPIC_RIGHT, null, 13),
new Input<>(INPUT_TOPIC_RIGHT, "d", 14),
new Input<>(INPUT_TOPIC_LEFT, "D", 15),
new Input<>(INPUT_TOPIC_LEFT, null, "E", 16),
new Input<>(INPUT_TOPIC_RIGHT, null, "e", 17)
);
// used for stream-stream self joins where only one input topic is needed
private final List<Input<String>> leftInput = Arrays.asList(
new Input<>(INPUT_TOPIC_LEFT, null, 1),
new Input<>(INPUT_TOPIC_LEFT, "A", 2),
new Input<>(INPUT_TOPIC_LEFT, "B", 3),
new Input<>(INPUT_TOPIC_LEFT, null, 4),
new Input<>(INPUT_TOPIC_LEFT, "C", 5),
new Input<>(INPUT_TOPIC_LEFT, null, 6),
new Input<>(INPUT_TOPIC_LEFT, "D", 7)
);
final ValueJoiner<String, String, String> valueJoiner = (value1, value2) -> value1 + "-" + value2;
Properties setupConfigsAndUtils(final boolean cacheEnabled) {
return setupConfigsAndUtils(cacheEnabled, true);
}
Properties setupConfigsAndUtils(final boolean cacheEnabled, final boolean setSerdes) {
final Properties streamsConfig = new Properties();
streamsConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
if (setSerdes) {
streamsConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.LongSerde.class);
streamsConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.StringSerde.class);
}
streamsConfig.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, COMMIT_INTERVAL);
if (!cacheEnabled) {
streamsConfig.put(StreamsConfig.STATESTORE_CACHE_MAX_BYTES_CONFIG, 0);
}
streamsConfig.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
return streamsConfig;
}
void runTestWithDriver(
final List<Input<String>> input,
final List<List<TestRecord<Long, String>>> expectedResult,
final Properties properties,
final Topology topology) {
runTestWithDriver(input, expectedResult, null, properties, topology);
}
void runTestWithDriver(
final List<Input<String>> input,
final List<List<TestRecord<Long, String>>> expectedResult,
final String storeName,
final Properties properties,
final Topology topology) {
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, properties)) {
final TestInputTopic<Long, String> right = driver.createInputTopic(INPUT_TOPIC_RIGHT, new LongSerializer(), new StringSerializer());
final TestInputTopic<Long, String> left = driver.createInputTopic(INPUT_TOPIC_LEFT, new LongSerializer(), new StringSerializer());
final TestOutputTopic<Long, String> outputTopic = driver.createOutputTopic(OUTPUT_TOPIC, new LongDeserializer(), new StringDeserializer());
final Map<String, TestInputTopic<Long, String>> testInputTopicMap = new HashMap<>();
testInputTopicMap.put(INPUT_TOPIC_RIGHT, right);
testInputTopicMap.put(INPUT_TOPIC_LEFT, left);
TestRecord<Long, String> expectedFinalResult = null;
final long baseTimestamp = time.milliseconds();
final Iterator<List<TestRecord<Long, String>>> resultIterator = expectedResult.iterator();
for (final Input<String> singleInputRecord : input) {
testInputTopicMap.get(singleInputRecord.topic).pipeInput(singleInputRecord.record.key, singleInputRecord.record.value, baseTimestamp + singleInputRecord.timestamp);
final List<TestRecord<Long, String>> expected = resultIterator.next();
if (expected != null) {
final List<TestRecord<Long, String>> updatedExpected = new LinkedList<>();
for (final TestRecord<Long, String> record : expected) {
updatedExpected.add(new TestRecord<>(record.key(), record.value(), null, baseTimestamp + record.timestamp()));
}
final List<TestRecord<Long, String>> output = outputTopic.readRecordsToList();
assertThat(output, equalTo(updatedExpected));
expectedFinalResult = updatedExpected.get(expected.size() - 1);
} else {
final List<TestRecord<Long, String>> output = outputTopic.readRecordsToList();
assertThat(output, equalTo(Collections.emptyList()));
}
}
if (storeName != null) {
checkQueryableStore(storeName, expectedFinalResult, driver);
}
}
}
void runTestWithDriver(
final List<Input<String>> input,
final TestRecord<Long, String> expectedFinalResult,
final String storeName,
final Properties streamsConfig,
final Topology topology) {
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, streamsConfig)) {
final TestInputTopic<Long, String> right = driver.createInputTopic(INPUT_TOPIC_RIGHT, new LongSerializer(), new StringSerializer());
final TestInputTopic<Long, String> left = driver.createInputTopic(INPUT_TOPIC_LEFT, new LongSerializer(), new StringSerializer());
final TestOutputTopic<Long, String> outputTopic = driver.createOutputTopic(OUTPUT_TOPIC, new LongDeserializer(), new StringDeserializer());
final Map<String, TestInputTopic<Long, String>> testInputTopicMap = new HashMap<>();
testInputTopicMap.put(INPUT_TOPIC_RIGHT, right);
testInputTopicMap.put(INPUT_TOPIC_LEFT, left);
final long baseTimestamp = time.milliseconds();
for (final Input<String> singleInputRecord : input) {
testInputTopicMap.get(singleInputRecord.topic).pipeInput(singleInputRecord.record.key, singleInputRecord.record.value, baseTimestamp + singleInputRecord.timestamp);
}
final TestRecord<Long, String> updatedExpectedFinalResult =
new TestRecord<>(
expectedFinalResult.key(),
expectedFinalResult.value(),
null,
baseTimestamp + expectedFinalResult.timestamp());
final List<TestRecord<Long, String>> output = outputTopic.readRecordsToList();
assertThat(output.get(output.size() - 1), equalTo(updatedExpectedFinalResult));
if (storeName != null) {
checkQueryableStore(storeName, updatedExpectedFinalResult, driver);
}
}
}
void runSelfJoinTestWithDriver(
final List<List<TestRecord<Long, String>>> expectedResult,
final Properties streamsConfig,
final Topology topology) {
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, streamsConfig)) {
final TestInputTopic<Long, String> left = driver.createInputTopic(INPUT_TOPIC_LEFT, new LongSerializer(), new StringSerializer());
final TestOutputTopic<Long, String> outputTopic = driver.createOutputTopic(OUTPUT_TOPIC, new LongDeserializer(), new StringDeserializer());
final long firstTimestamp = time.milliseconds();
long eventTimestamp = firstTimestamp;
final Iterator<List<TestRecord<Long, String>>> resultIterator = expectedResult.iterator();
for (final Input<String> singleInputRecord : leftInput) {
left.pipeInput(singleInputRecord.record.key, singleInputRecord.record.value, ++eventTimestamp);
final List<TestRecord<Long, String>> expected = resultIterator.next();
if (expected != null) {
final List<TestRecord<Long, String>> updatedExpected = new LinkedList<>();
for (final TestRecord<Long, String> record : expected) {
updatedExpected.add(new TestRecord<>(record.key(), record.value(), null, firstTimestamp + record.timestamp()));
}
final List<TestRecord<Long, String>> output = outputTopic.readRecordsToList();
assertThat(output, equalTo(updatedExpected));
}
}
}
}
private void checkQueryableStore(final String queryableName, final TestRecord<Long, String> expectedFinalResult, final TopologyTestDriver driver) {
final ReadOnlyKeyValueStore<Long, ValueAndTimestamp<String>> store = driver.getTimestampedKeyValueStore(queryableName);
try (final KeyValueIterator<Long, ValueAndTimestamp<String>> all = store.all()) {
final KeyValue<Long, ValueAndTimestamp<String>> onlyEntry = all.next();
assertThat(onlyEntry.key, is(expectedFinalResult.key()));
assertThat(onlyEntry.value.value(), is(expectedFinalResult.value()));
assertThat(onlyEntry.value.timestamp(), is(expectedFinalResult.timestamp()));
assertThat(all.hasNext(), is(false));
}
}
protected static final | AbstractJoinIntegrationTest |
java | apache__camel | dsl/camel-kamelet-main/src/main/java/org/apache/camel/main/download/DependencyDownloaderTransformerResolver.java | {
"start": 1445,
"end": 3447
} | class ____ extends DefaultTransformerResolver {
private final CamelCatalog catalog = new DefaultCamelCatalog();
private final DependencyDownloader downloader;
private final String stubPattern;
private final boolean silent;
public DependencyDownloaderTransformerResolver(CamelContext camelContext, String stubPattern, boolean silent) {
this.downloader = camelContext.hasService(DependencyDownloader.class);
this.stubPattern = stubPattern;
this.silent = silent;
}
@Override
public Transformer resolve(TransformerKey key, CamelContext context) {
String name = key.toString();
TransformerModel model = catalog.transformerModel(name);
if (model != null) {
downloadLoader(model.getGroupId(), model.getArtifactId(), model.getVersion());
}
Transformer answer;
boolean accept = accept(name);
if (accept) {
answer = super.resolve(key, context);
} else {
answer = new StubTransformer();
}
if (answer == null) {
List<String> suggestion = SuggestSimilarHelper.didYouMean(catalog.findTransformerNames(), name);
if (suggestion != null && !suggestion.isEmpty()) {
String s = String.join(", ", suggestion);
throw new IllegalArgumentException("Cannot find transformer with name: " + name + ". Did you mean: " + s);
}
}
return answer;
}
private void downloadLoader(String groupId, String artifactId, String version) {
if (!downloader.alreadyOnClasspath(groupId, artifactId, version)) {
downloader.downloadDependency(groupId, artifactId, version);
}
}
private boolean accept(String name) {
if (stubPattern == null) {
return true;
}
boolean stubbed = PatternHelper.matchPatterns(name, stubPattern.split(","));
return !stubbed;
}
}
| DependencyDownloaderTransformerResolver |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/FilterChainImplTest.java | {
"start": 569,
"end": 10364
} | class ____ extends TestCase {
private DruidDataSource dataSource;
protected void setUp() throws Exception {
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setFilters("stat,log4j,wall,encoding");
dataSource.getProxyFilters().add(new FilterAdapter() {
});
dataSource.setDbType("mysql");
dataSource.init();
}
protected void tearDown() throws Exception {
JdbcUtils.close(dataSource);
}
public void test_size() {
assertEquals(dataSource.getProxyFilters().size(), new FilterChainImpl(dataSource).getFilterSize());
}
public void test_unwrap() throws Exception {
assertNull(new FilterChainImpl(dataSource).unwrap(null, null));
}
public void test_unwrap_5() throws Exception {
assertNull(new FilterChainImpl(dataSource).wrap((ConnectionProxy) dataSource.getConnection().getConnection(),
(Clob) null));
}
public void test_unwrap_6() throws Exception {
Connection conn = dataSource.getConnection();
assertTrue(new FilterChainImpl(dataSource).wrap((ConnectionProxy) dataSource.getConnection().getConnection(),
new MockNClob()) instanceof NClob);
conn.close();
}
public void test_unwrap_8() throws Exception {
Connection conn = dataSource.getConnection();
assertTrue(new FilterChainImpl(dataSource).wrap((ConnectionProxy) dataSource.getConnection().getConnection(),
(Clob) new MockNClob()) instanceof NClob);
conn.close();
}
public void test_unwrap_7() throws Exception {
assertNull(new FilterChainImpl(dataSource).wrap((ConnectionProxy) dataSource.getConnection().getConnection(),
(NClob) null));
}
public void test_unwrap_9() throws Exception {
assertNull(new FilterChainImpl(dataSource).wrap((StatementProxy) null, (NClob) null));
}
public void test_getUnicodeStream() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getUnicodeStream(1));
rs.close();
stmt.close();
conn.close();
}
public void test_getUnicodeStream_1() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getUnicodeStream("1"));
rs.close();
stmt.close();
conn.close();
}
public void test_getRef() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getRef(1));
rs.close();
stmt.close();
conn.close();
}
public void test_getRef_1() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getRef("1"));
rs.close();
stmt.close();
conn.close();
}
public void test_getArray() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getArray(1));
rs.close();
stmt.close();
conn.close();
}
public void test_getArray_1() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getArray("1"));
rs.close();
stmt.close();
conn.close();
}
public void test_getURL() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getURL(1));
rs.close();
stmt.close();
conn.close();
}
public void test_getURL_1() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getURL("1"));
rs.close();
stmt.close();
conn.close();
}
public void test_getRowId() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getRowId(1));
rs.close();
stmt.close();
conn.close();
}
public void test_getRowId_1() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getRowId("1"));
rs.close();
stmt.close();
conn.close();
}
public void test_getNClob() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getNClob(1));
rs.close();
stmt.close();
conn.close();
}
public void test_getNClob_1() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getNClob("1"));
rs.close();
stmt.close();
conn.close();
}
public void test_getSQLXML() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getSQLXML(1));
rs.close();
stmt.close();
conn.close();
}
public void test_getSQLXML_1() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getSQLXML("1"));
rs.close();
stmt.close();
conn.close();
}
public void test_getNString() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getNString(1));
rs.close();
stmt.close();
conn.close();
}
public void test_getNString_1() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getNString("1"));
rs.close();
stmt.close();
conn.close();
}
public void test_getNCharacterStream() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getNCharacterStream(1));
rs.close();
stmt.close();
conn.close();
}
public void test_getNCharacterStream_1() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getNCharacterStream("1"));
rs.close();
stmt.close();
conn.close();
}
public void test_getObject() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getObject(1));
rs.close();
stmt.close();
conn.close();
}
public void test_getObject_1() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
ResultSet rs = stmt.executeQuery();
rs.next();
assertNull(rs.getObject("1"));
rs.close();
stmt.close();
conn.close();
}
}
| FilterChainImplTest |
java | apache__spark | sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java | {
"start": 46068,
"end": 47356
} | class ____ implements ParquetVectorUpdater {
private final int arrayLen;
FixedLenByteArrayAsLongUpdater(int arrayLen) {
this.arrayLen = arrayLen;
}
@Override
public void readValues(
int total,
int offset,
WritableColumnVector values,
VectorizedValuesReader valuesReader) {
for (int i = 0; i < total; i++) {
readValue(offset + i, values, valuesReader);
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipFixedLenByteArray(total, arrayLen);
}
@Override
public void readValue(
int offset,
WritableColumnVector values,
VectorizedValuesReader valuesReader) {
long value = ParquetRowConverter.binaryToUnscaledLong(valuesReader.readBinary(arrayLen));
values.putLong(offset, value);
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(offset));
values.putLong(offset, ParquetRowConverter.binaryToUnscaledLong(v));
}
}
private abstract static | FixedLenByteArrayAsLongUpdater |
java | spring-projects__spring-boot | module/spring-boot-artemis/src/main/java/org/springframework/boot/artemis/docker/compose/ArtemisDockerComposeConnectionDetailsFactory.java | {
"start": 1931,
"end": 2747
} | class ____ extends DockerComposeConnectionDetails
implements ArtemisConnectionDetails {
private final ArtemisEnvironment environment;
private final String brokerUrl;
protected ArtemisDockerComposeConnectionDetails(RunningService service) {
super(service);
this.environment = new ArtemisEnvironment(service.env());
this.brokerUrl = "tcp://" + service.host() + ":" + service.ports().get(ACTIVEMQ_PORT);
}
@Override
public ArtemisMode getMode() {
return ArtemisMode.NATIVE;
}
@Override
public String getBrokerUrl() {
return this.brokerUrl;
}
@Override
public @Nullable String getUser() {
return this.environment.getUser();
}
@Override
public @Nullable String getPassword() {
return this.environment.getPassword();
}
}
}
| ArtemisDockerComposeConnectionDetails |
java | google__dagger | javatests/dagger/internal/codegen/MissingBindingValidationTest.java | {
"start": 38682,
"end": 39705
} | interface ____ {}");
CompilerTests.daggerCompiler(component, module, notBound)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
String.join(
"\n",
"NotBound cannot be provided without an @Provides-annotated method.",
"",
" NotBound is injected at",
" [TestComponent] TestModule.object(notBound)",
" Object is requested at",
" [TestComponent] TestComponent.object()",
"It is also requested at:",
" TestModule.string(notBound, …)",
"The following other entry points also depend on it:",
" TestComponent.string()"))
.onSource(component)
.onLineContaining(" | NotBound |
java | apache__maven | compat/maven-model-builder/src/main/java/org/apache/maven/model/interpolation/reflection/ClassMap.java | {
"start": 6980,
"end": 7547
} | class ____ not
* public, retrieves methods with same signature as its public methods
* from public superclasses and interfaces (if they exist). Basically
* upcasts every method to the nearest acccessible method.
*/
private static Method[] getAccessibleMethods(Class<?> clazz) {
Method[] methods = clazz.getMethods();
// Short circuit for the (hopefully) majority of cases where the
// clazz is public
if (Modifier.isPublic(clazz.getModifiers())) {
return methods;
}
// No luck - the | is |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/accesstype/FieldAccessType.java | {
"start": 683,
"end": 1822
} | class ____ {
private Integer id1;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
scope.inTransaction( em -> {
FieldAccessTypeEntity fate = new FieldAccessTypeEntity( "data" );
em.persist( fate );
id1 = fate.readId();
} );
scope.inTransaction( em -> {
FieldAccessTypeEntity fate = em.find( FieldAccessTypeEntity.class, id1 );
fate.writeData( "data2" );
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
assertEquals( Arrays.asList( 1, 2 ),
AuditReaderFactory.get( em ).getRevisions( FieldAccessTypeEntity.class, id1 ) );
} );
}
@Test
public void testHistoryOfId1(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
FieldAccessTypeEntity ver1 = new FieldAccessTypeEntity( id1, "data" );
FieldAccessTypeEntity ver2 = new FieldAccessTypeEntity( id1, "data2" );
assertEquals( ver1, AuditReaderFactory.get( em ).find( FieldAccessTypeEntity.class, id1, 1 ) );
assertEquals( ver2, AuditReaderFactory.get( em ).find( FieldAccessTypeEntity.class, id1, 2 ) );
} );
}
}
| FieldAccessType |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/DoublePredicateAssertBaseTest.java | {
"start": 827,
"end": 923
} | class ____ {@link DoublePredicateAssert} tests.
*
* @author Filip Hrisafov
*/
public abstract | for |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/method/configuration/PrePostReactiveMethodSecurityConfigurationTests.java | {
"start": 25346,
"end": 25459
} | interface ____ {
String value();
}
@EnableReactiveMethodSecurity
@Configuration
public static | ResultContains |
java | quarkusio__quarkus | extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/RemoveBuilderImageResourceDecorator.java | {
"start": 468,
"end": 1280
} | class ____ extends Decorator<KubernetesListBuilder> {
private String name;
public RemoveBuilderImageResourceDecorator(String name) {
this.name = name;
}
@Override
public void visit(KubernetesListBuilder builder) {
List<HasMetadata> imageStreams = builder.buildItems().stream()
.filter(i -> i instanceof ImageStream)
.map(i -> (HasMetadata) i)
.filter(i -> i.getMetadata().getName().equalsIgnoreCase(name))
.collect(Collectors.toList());
builder.removeAllFromItems(imageStreams);
}
@Override
public Class<? extends Decorator>[] after() {
return new Class[] { ResourceProvidingDecorator.class, AddBuilderImageStreamResourceDecorator.class };
}
}
| RemoveBuilderImageResourceDecorator |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/util/context/ContextTest.java | {
"start": 25360,
"end": 26214
} | class ____ extends ForeignContextView implements Context {
ForeignContext(Object key, Object value) {
super(key, value);
}
ForeignContext(Map<Object, Object> data) {
super(data);
}
@Override
ForeignContext directPut(Object key, Object value) {
super.directPut(key, value);
return this;
}
@Override
public Context put(Object key, Object value) {
ForeignContext newContext = new ForeignContext(this.delegate);
newContext.delegate.put(key, value);
return newContext;
}
@Override
public Context delete(Object key) {
if (hasKey(key)) {
ForeignContext newContext = new ForeignContext(this.delegate);
newContext.delegate.remove(key);
return newContext;
}
return this;
}
@Override
public String toString() {
return "ForeignContext" + delegate.toString();
}
}
static | ForeignContext |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/web/server/ServerHttpSecurityTests.java | {
"start": 6407,
"end": 36946
} | class ____ {
@Mock
private ServerSecurityContextRepository contextRepository;
@Mock
private ReactiveAuthenticationManager authenticationManager;
@Mock
private ServerCsrfTokenRepository csrfTokenRepository;
private ServerHttpSecurity http;
@BeforeEach
public void setup() {
this.http = ServerHttpSecurityConfigurationBuilder.http().authenticationManager(this.authenticationManager);
}
@Test
public void defaults() {
TestPublisher<SecurityContext> securityContext = TestPublisher.create();
given(this.contextRepository.load(any())).willReturn(securityContext.mono());
this.http.securityContextRepository(this.contextRepository);
WebTestClient client = buildClient();
// @formatter:off
FluxExchangeResult<String> result = client.get()
.uri("/")
.exchange()
.expectHeader().valueMatches(HttpHeaders.CACHE_CONTROL, ".+")
.returnResult(String.class);
// @formatter:on
assertThat(result.getResponseCookies()).isEmpty();
// there is no need to try and load the SecurityContext by default
securityContext.assertWasNotSubscribed();
}
@Test
public void basic() {
given(this.authenticationManager.authenticate(any()))
.willReturn(Mono.just(new TestingAuthenticationToken("rob", "rob", "ROLE_USER", "ROLE_ADMIN")));
this.http.httpBasic(withDefaults());
this.http.authenticationManager(this.authenticationManager);
this.http.authorizeExchange((authorize) -> authorize.anyExchange().authenticated());
WebTestClient client = buildClient();
// @formatter:off
EntityExchangeResult<String> result = client.get()
.uri("/")
.headers((headers) -> headers
.setBasicAuth("rob", "rob")
)
.exchange()
.expectStatus().isOk()
.expectHeader().valueMatches(HttpHeaders.CACHE_CONTROL, ".+")
.expectBody(String.class).consumeWith((b) -> assertThat(b.getResponseBody()).isEqualTo("ok"))
.returnResult();
// @formatter:on
assertThat(result.getResponseCookies().getFirst("SESSION")).isNull();
}
@Test
public void basicWithGlobalWebSessionServerSecurityContextRepository() {
given(this.authenticationManager.authenticate(any()))
.willReturn(Mono.just(new TestingAuthenticationToken("rob", "rob", "ROLE_USER", "ROLE_ADMIN")));
this.http.securityContextRepository(new WebSessionServerSecurityContextRepository());
this.http.httpBasic(withDefaults());
this.http.authenticationManager(this.authenticationManager);
this.http.authorizeExchange((authorize) -> authorize.anyExchange().authenticated());
WebTestClient client = buildClient();
// @formatter:off
EntityExchangeResult<String> result = client.get()
.uri("/")
.headers((headers) -> headers
.setBasicAuth("rob", "rob")
)
.exchange()
.expectStatus().isOk()
.expectHeader().valueMatches(HttpHeaders.CACHE_CONTROL, ".+")
.expectBody(String.class).consumeWith((b) -> assertThat(b.getResponseBody()).isEqualTo("ok"))
.returnResult();
// @formatter:on
assertThat(result.getResponseCookies().getFirst("SESSION")).isNotNull();
}
@Test
public void basicWhenNoCredentialsThenUnauthorized() {
this.http.authorizeExchange((authorize) -> authorize.anyExchange().authenticated());
WebTestClient client = buildClient();
// @formatter:off
client.get().uri("/")
.exchange()
.expectStatus().isUnauthorized()
.expectHeader().valueMatches(HttpHeaders.CACHE_CONTROL, ".+")
.expectBody().isEmpty();
// @formatter:on
}
@Test
public void basicWhenXHRRequestThenUnauthorized() {
ServerAuthenticationEntryPoint authenticationEntryPoint = spy(
new HttpStatusServerEntryPoint(HttpStatus.UNAUTHORIZED));
this.http.httpBasic((basic) -> basic.authenticationEntryPoint(authenticationEntryPoint));
this.http.authorizeExchange((authorize) -> authorize.anyExchange().authenticated());
WebTestClient client = buildClient();
// @formatter:off
client.get().uri("/")
.header("X-Requested-With", "XMLHttpRequest")
.exchange()
.expectStatus().isUnauthorized()
.expectHeader().doesNotExist("WWW-Authenticate")
.expectHeader().valueMatches(HttpHeaders.CACHE_CONTROL, ".+")
.expectBody().isEmpty();
// @formatter:on
verify(authenticationEntryPoint).commence(any(), any());
}
@Test
public void basicWhenCustomAuthenticationFailureHandlerThenUses() {
ReactiveAuthenticationManager authenticationManager = mock(ReactiveAuthenticationManager.class);
ServerAuthenticationFailureHandler authenticationFailureHandler = mock(
ServerAuthenticationFailureHandler.class);
this.http.httpBasic((basic) -> basic.authenticationFailureHandler(authenticationFailureHandler));
this.http.httpBasic((basic) -> basic.authenticationManager(authenticationManager));
this.http.authorizeExchange((authorize) -> authorize.anyExchange().authenticated());
given(authenticationManager.authenticate(any()))
.willReturn(Mono.error(() -> new BadCredentialsException("bad")));
given(authenticationFailureHandler.onAuthenticationFailure(any(), any())).willReturn(Mono.empty());
WebTestClient client = buildClient();
// @formatter:off
client.get().uri("/")
.headers((headers) -> headers.setBasicAuth("user", "password"))
.exchange()
.expectStatus().isOk();
// @formatter:on
verify(authenticationFailureHandler).onAuthenticationFailure(any(), any());
}
@Test
public void buildWhenServerWebExchangeFromContextThenFound() {
SecurityWebFilterChain filter = this.http.build();
// @formatter:off
WebTestClient client = WebTestClient
.bindToController(new SubscriberContextController())
.webFilter(new WebFilterChainProxy(filter))
.build();
client.get()
.uri("/foo/bar")
.exchange()
.expectBody(String.class).isEqualTo("/foo/bar");
// @formatter:on
}
@Test
public void csrfServerLogoutHandlerNotAppliedIfCsrfIsntEnabled() {
SecurityWebFilterChain securityWebFilterChain = this.http.csrf((csrf) -> csrf.disable()).build();
assertThat(getWebFilter(securityWebFilterChain, CsrfWebFilter.class)).isNotPresent();
Optional<ServerLogoutHandler> logoutHandler = getWebFilter(securityWebFilterChain, LogoutWebFilter.class)
.map((logoutWebFilter) -> (ServerLogoutHandler) ReflectionTestUtils.getField(logoutWebFilter,
LogoutWebFilter.class, "logoutHandler"));
assertThat(logoutHandler).get().isExactlyInstanceOf(SecurityContextServerLogoutHandler.class);
}
@Test
public void csrfServerLogoutHandlerAppliedIfCsrfIsEnabled() {
SecurityWebFilterChain securityWebFilterChain = this.http
.csrf((csrf) -> csrf.csrfTokenRepository(this.csrfTokenRepository))
.build();
assertThat(getWebFilter(securityWebFilterChain, CsrfWebFilter.class)).get()
.extracting((csrfWebFilter) -> ReflectionTestUtils.getField(csrfWebFilter, "csrfTokenRepository"))
.isEqualTo(this.csrfTokenRepository);
Optional<ServerLogoutHandler> logoutHandler = getWebFilter(securityWebFilterChain, LogoutWebFilter.class)
.map((logoutWebFilter) -> (ServerLogoutHandler) ReflectionTestUtils.getField(logoutWebFilter,
LogoutWebFilter.class, "logoutHandler"));
assertThat(logoutHandler).get()
.isExactlyInstanceOf(DelegatingServerLogoutHandler.class)
.extracting((delegatingLogoutHandler) -> ((List<ServerLogoutHandler>) ReflectionTestUtils
.getField(delegatingLogoutHandler, DelegatingServerLogoutHandler.class, "delegates")).stream()
.map(ServerLogoutHandler::getClass)
.collect(Collectors.toList()))
.isEqualTo(Arrays.asList(SecurityContextServerLogoutHandler.class, CsrfServerLogoutHandler.class));
}
@Test
@SuppressWarnings("unchecked")
public void addFilterAfterIsApplied() {
SecurityWebFilterChain securityWebFilterChain = this.http
.addFilterAfter(new TestWebFilter(), SecurityWebFiltersOrder.SECURITY_CONTEXT_SERVER_WEB_EXCHANGE)
.build();
// @formatter:off
List filters = securityWebFilterChain.getWebFilters()
.map(WebFilter::getClass)
.collectList()
.block();
// @formatter:on
assertThat(filters).isNotNull()
.isNotEmpty()
.containsSequence(SecurityContextServerWebExchangeWebFilter.class, TestWebFilter.class);
}
@Test
@SuppressWarnings("unchecked")
public void addFilterBeforeIsApplied() {
SecurityWebFilterChain securityWebFilterChain = this.http
.addFilterBefore(new TestWebFilter(), SecurityWebFiltersOrder.SECURITY_CONTEXT_SERVER_WEB_EXCHANGE)
.build();
// @formatter:off
List filters = securityWebFilterChain.getWebFilters()
.map(WebFilter::getClass)
.collectList()
.block();
// @formatter:on
assertThat(filters).isNotNull()
.isNotEmpty()
.containsSequence(TestWebFilter.class, SecurityContextServerWebExchangeWebFilter.class);
}
@Test
public void anonymous() {
// @formatter:off
SecurityWebFilterChain securityFilterChain = this.http
.anonymous(withDefaults())
.build();
WebTestClient client = WebTestClientBuilder
.bindToControllerAndWebFilters(AnonymousAuthenticationWebFilterTests.HttpMeController.class, securityFilterChain)
.build();
client.get()
.uri("/me")
.exchange()
.expectStatus().isOk()
.expectBody(String.class).isEqualTo("anonymousUser");
// @formatter:on
}
@Test
public void getWhenAnonymousConfiguredThenAuthenticationIsAnonymous() {
SecurityWebFilterChain securityFilterChain = this.http.anonymous(withDefaults()).build();
// @formatter:off
WebTestClient client = WebTestClientBuilder
.bindToControllerAndWebFilters(AnonymousAuthenticationWebFilterTests.HttpMeController.class, securityFilterChain)
.build();
client.get()
.uri("/me")
.exchange()
.expectStatus().isOk()
.expectBody(String.class).isEqualTo("anonymousUser");
// @formatter:on
}
@Test
public void basicWithAnonymous() {
given(this.authenticationManager.authenticate(any()))
.willReturn(Mono.just(new TestingAuthenticationToken("rob", "rob", "ROLE_USER", "ROLE_ADMIN")));
this.http.httpBasic(withDefaults()).anonymous(withDefaults());
this.http.authenticationManager(this.authenticationManager);
this.http.authorizeExchange((authorize) -> authorize.anyExchange().hasAuthority("ROLE_ADMIN"));
WebTestClient client = buildClient();
// @formatter:off
EntityExchangeResult<String> result = client.get()
.uri("/")
.headers((headers) -> headers
.setBasicAuth("rob", "rob")
).exchange()
.expectStatus().isOk()
.expectHeader().valueMatches(HttpHeaders.CACHE_CONTROL, ".+")
.expectBody(String.class).consumeWith((b) -> assertThat(b.getResponseBody()).isEqualTo("ok"))
.returnResult();
// @formatter:on
assertThat(result.getResponseCookies().getFirst("SESSION")).isNull();
}
@Test
public void basicWithCustomRealmName() {
this.http.securityContextRepository(new WebSessionServerSecurityContextRepository());
HttpBasicServerAuthenticationEntryPoint authenticationEntryPoint = new HttpBasicServerAuthenticationEntryPoint();
authenticationEntryPoint.setRealm("myrealm");
this.http.httpBasic((basic) -> basic.authenticationEntryPoint(authenticationEntryPoint));
this.http.authenticationManager(this.authenticationManager);
this.http.authorizeExchange((authorize) -> authorize.anyExchange().authenticated());
WebTestClient client = buildClient();
// @formatter:off
EntityExchangeResult<String> result = client.get()
.uri("/")
.exchange()
.expectStatus().isUnauthorized()
.expectHeader().value(HttpHeaders.WWW_AUTHENTICATE, (value) -> assertThat(value).contains("myrealm"))
.expectBody(String.class)
.returnResult();
// @formatter:on
assertThat(result.getResponseCookies().getFirst("SESSION")).isNull();
}
@Test
public void requestWhenBasicWithRealmNameInLambdaThenRealmNameUsed() {
this.http.securityContextRepository(new WebSessionServerSecurityContextRepository());
HttpBasicServerAuthenticationEntryPoint authenticationEntryPoint = new HttpBasicServerAuthenticationEntryPoint();
authenticationEntryPoint.setRealm("myrealm");
this.http.httpBasic((httpBasic) -> httpBasic.authenticationEntryPoint(authenticationEntryPoint));
this.http.authenticationManager(this.authenticationManager);
this.http.authorizeExchange((authorize) -> authorize.anyExchange().authenticated());
WebTestClient client = buildClient();
// @formatter:off
EntityExchangeResult<String> result = client.get()
.uri("/")
.exchange()
.expectStatus().isUnauthorized()
.expectHeader().value(HttpHeaders.WWW_AUTHENTICATE, (value) -> assertThat(value).contains("myrealm"))
.expectBody(String.class)
.returnResult();
// @formatter:on
assertThat(result.getResponseCookies().getFirst("SESSION")).isNull();
}
@Test
public void basicWithCustomAuthenticationManager() {
ReactiveAuthenticationManager customAuthenticationManager = mock(ReactiveAuthenticationManager.class);
given(customAuthenticationManager.authenticate(any()))
.willReturn(Mono.just(new TestingAuthenticationToken("rob", "rob", "ROLE_USER", "ROLE_ADMIN")));
// @formatter:off
SecurityWebFilterChain securityFilterChain = this.http
.httpBasic((basic) -> basic
.authenticationManager(customAuthenticationManager))
.build();
// @formatter:on
WebFilterChainProxy springSecurityFilterChain = new WebFilterChainProxy(securityFilterChain);
// @formatter:off
WebTestClient client = WebTestClientBuilder
.bindToWebFilters(springSecurityFilterChain)
.build();
client.get()
.uri("/").headers((headers) -> headers
.setBasicAuth("rob", "rob")
)
.exchange()
.expectStatus().isOk()
.expectBody(String.class).consumeWith((b) -> assertThat(b.getResponseBody()).isEqualTo("ok"));
// @formatter:on
verifyNoMoreInteractions(this.authenticationManager);
}
@Test
public void requestWhenBasicWithAuthenticationManagerInLambdaThenAuthenticationManagerUsed() {
ReactiveAuthenticationManager customAuthenticationManager = mock(ReactiveAuthenticationManager.class);
given(customAuthenticationManager.authenticate(any()))
.willReturn(Mono.just(new TestingAuthenticationToken("rob", "rob", "ROLE_USER", "ROLE_ADMIN")));
// @formatter:off
SecurityWebFilterChain securityFilterChain = this.http
.httpBasic((httpBasic) -> httpBasic
.authenticationManager(customAuthenticationManager)
)
.build();
// @formatter:on
WebFilterChainProxy springSecurityFilterChain = new WebFilterChainProxy(securityFilterChain);
// @formatter:off
WebTestClient client = WebTestClientBuilder
.bindToWebFilters(springSecurityFilterChain)
.build();
client.get()
.uri("/")
.headers((headers) -> headers
.setBasicAuth("rob", "rob")
)
.exchange()
.expectStatus().isOk()
.expectBody(String.class).consumeWith((b) -> assertThat(b.getResponseBody()).isEqualTo("ok"));
// @formatter:on
verifyNoMoreInteractions(this.authenticationManager);
verify(customAuthenticationManager).authenticate(any(Authentication.class));
}
@Test
@SuppressWarnings("unchecked")
public void addsX509FilterWhenX509AuthenticationIsConfigured() {
X509PrincipalExtractor mockExtractor = mock(X509PrincipalExtractor.class);
ReactiveAuthenticationManager mockAuthenticationManager = mock(ReactiveAuthenticationManager.class);
this.http
.x509((x509) -> x509.principalExtractor(mockExtractor).authenticationManager(mockAuthenticationManager));
SecurityWebFilterChain securityWebFilterChain = this.http.build();
WebFilter x509WebFilter = securityWebFilterChain.getWebFilters().filter(this::isX509Filter).blockFirst();
assertThat(x509WebFilter).isNotNull();
}
@Test
public void x509WhenCustomizedThenAddsX509Filter() {
X509PrincipalExtractor mockExtractor = mock(X509PrincipalExtractor.class);
ReactiveAuthenticationManager mockAuthenticationManager = mock(ReactiveAuthenticationManager.class);
this.http
.x509((x509) -> x509.principalExtractor(mockExtractor).authenticationManager(mockAuthenticationManager));
SecurityWebFilterChain securityWebFilterChain = this.http.build();
WebFilter x509WebFilter = securityWebFilterChain.getWebFilters().filter(this::isX509Filter).blockFirst();
assertThat(x509WebFilter).isNotNull();
}
@Test
public void x509WithConverterAndNoExtractorThenAddsX509Filter() {
ServerAuthenticationConverter mockConverter = mock(ServerAuthenticationConverter.class);
this.http.x509((x509) -> x509.serverAuthenticationConverter(mockConverter));
SecurityWebFilterChain securityWebFilterChain = this.http.build();
WebFilter x509WebFilter = securityWebFilterChain.getWebFilters()
.filter((filter) -> matchesX509Converter(filter, mockConverter))
.blockFirst();
assertThat(x509WebFilter).isNotNull();
}
@Test
public void addsX509FilterWhenX509AuthenticationIsConfiguredWithDefaults() {
this.http.x509(withDefaults());
SecurityWebFilterChain securityWebFilterChain = this.http.build();
WebFilter x509WebFilter = securityWebFilterChain.getWebFilters().filter(this::isX509Filter).blockFirst();
assertThat(x509WebFilter).isNotNull();
}
@Test
public void x509WhenDefaultsThenAddsX509Filter() {
this.http.x509(withDefaults());
SecurityWebFilterChain securityWebFilterChain = this.http.build();
WebFilter x509WebFilter = securityWebFilterChain.getWebFilters().filter(this::isX509Filter).blockFirst();
assertThat(x509WebFilter).isNotNull();
}
@Test
public void postWhenCsrfDisabledThenPermitted() {
SecurityWebFilterChain securityFilterChain = this.http.csrf((csrf) -> csrf.disable()).build();
WebFilterChainProxy springSecurityFilterChain = new WebFilterChainProxy(securityFilterChain);
WebTestClient client = WebTestClientBuilder.bindToWebFilters(springSecurityFilterChain).build();
client.post().uri("/").exchange().expectStatus().isOk();
}
@Test
public void postWhenCustomCsrfTokenRepositoryThenUsed() {
ServerCsrfTokenRepository customServerCsrfTokenRepository = mock(ServerCsrfTokenRepository.class);
given(customServerCsrfTokenRepository.loadToken(any(ServerWebExchange.class))).willReturn(Mono.empty());
SecurityWebFilterChain securityFilterChain = this.http
.csrf((csrf) -> csrf.csrfTokenRepository(customServerCsrfTokenRepository))
.build();
WebFilterChainProxy springSecurityFilterChain = new WebFilterChainProxy(securityFilterChain);
WebTestClient client = WebTestClientBuilder.bindToWebFilters(springSecurityFilterChain).build();
client.post().uri("/").exchange().expectStatus().isForbidden();
verify(customServerCsrfTokenRepository).loadToken(any());
}
@Test
public void postWhenCustomRequestHandlerThenUsed() {
CsrfToken csrfToken = new DefaultCsrfToken("headerName", "paramName", "tokenValue");
given(this.csrfTokenRepository.loadToken(any(ServerWebExchange.class))).willReturn(Mono.just(csrfToken));
given(this.csrfTokenRepository.generateToken(any(ServerWebExchange.class))).willReturn(Mono.empty());
ServerCsrfTokenRequestHandler requestHandler = mock(ServerCsrfTokenRequestHandler.class);
given(requestHandler.resolveCsrfTokenValue(any(ServerWebExchange.class), any(CsrfToken.class)))
.willReturn(Mono.just(csrfToken.getToken()));
// @formatter:off
this.http.csrf((csrf) -> csrf
.csrfTokenRepository(this.csrfTokenRepository)
.csrfTokenRequestHandler(requestHandler)
);
// @formatter:on
WebTestClient client = buildClient();
client.post().uri("/").exchange().expectStatus().isOk();
verify(this.csrfTokenRepository, times(2)).loadToken(any(ServerWebExchange.class));
verify(this.csrfTokenRepository).generateToken(any(ServerWebExchange.class));
verify(requestHandler).handle(any(ServerWebExchange.class), any());
verify(requestHandler).resolveCsrfTokenValue(any(ServerWebExchange.class), any());
}
@Test
public void postWhenServerXorCsrfTokenRequestAttributeHandlerThenOk() {
CsrfToken csrfToken = new DefaultCsrfToken("X-CSRF-TOKEN", "_csrf", "token");
given(this.csrfTokenRepository.loadToken(any(ServerWebExchange.class))).willReturn(Mono.just(csrfToken));
given(this.csrfTokenRepository.generateToken(any(ServerWebExchange.class))).willReturn(Mono.empty());
ServerCsrfTokenRequestHandler requestHandler = new XorServerCsrfTokenRequestAttributeHandler();
// @formatter:off
this.http.csrf((csrf) -> csrf
.csrfTokenRepository(this.csrfTokenRepository)
.csrfTokenRequestHandler(requestHandler)
);
// @formatter:on
// Generate masked CSRF token value
ServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("/").build());
requestHandler.handle(exchange, Mono.just(csrfToken));
Mono<CsrfToken> csrfTokenAttribute = exchange.getAttribute(CsrfToken.class.getName());
String actualTokenValue = csrfTokenAttribute.map(CsrfToken::getToken).block();
assertThat(actualTokenValue).isNotEqualTo(csrfToken.getToken());
WebTestClient client = buildClient();
// @formatter:off
client.post()
.uri("/")
.header(csrfToken.getHeaderName(), actualTokenValue)
.exchange()
.expectStatus().isOk();
// @formatter:on
verify(this.csrfTokenRepository, times(2)).loadToken(any(ServerWebExchange.class));
verify(this.csrfTokenRepository).generateToken(any(ServerWebExchange.class));
}
@Test
@SuppressWarnings("unchecked")
public void shouldConfigureRequestCacheForOAuth2LoginAuthenticationEntryPointAndSuccessHandler() {
ServerRequestCache requestCache = spy(new WebSessionServerRequestCache());
ReactiveClientRegistrationRepository clientRegistrationRepository = mock(
ReactiveClientRegistrationRepository.class);
SecurityWebFilterChain securityFilterChain = this.http
.oauth2Login((login) -> login.clientRegistrationRepository(clientRegistrationRepository))
.authorizeExchange((authorize) -> authorize.anyExchange().authenticated())
.requestCache((c) -> c.requestCache(requestCache))
.build();
WebTestClient client = WebTestClientBuilder.bindToWebFilters(securityFilterChain).build();
client.get().uri("/test").exchange();
ArgumentCaptor<ServerWebExchange> captor = ArgumentCaptor.forClass(ServerWebExchange.class);
verify(requestCache).saveRequest(captor.capture());
assertThat(captor.getValue().getRequest().getURI().toString()).isEqualTo("/test");
OAuth2LoginAuthenticationWebFilter authenticationWebFilter = getWebFilter(securityFilterChain,
OAuth2LoginAuthenticationWebFilter.class)
.get();
DelegatingServerAuthenticationSuccessHandler handler = (DelegatingServerAuthenticationSuccessHandler) ReflectionTestUtils
.getField(authenticationWebFilter, "authenticationSuccessHandler");
List<ServerAuthenticationSuccessHandler> delegates = (List<ServerAuthenticationSuccessHandler>) ReflectionTestUtils
.getField(handler, "delegates");
assertThat(ReflectionTestUtils.getField(delegates.get(0), "requestCache")).isSameAs(requestCache);
}
@Test
public void shouldConfigureAuthorizationRequestRepositoryForOAuth2Login() {
ServerAuthorizationRequestRepository<OAuth2AuthorizationRequest> authorizationRequestRepository = mock(
ServerAuthorizationRequestRepository.class);
ReactiveClientRegistrationRepository clientRegistrationRepository = mock(
ReactiveClientRegistrationRepository.class);
OAuth2AuthorizationRequest authorizationRequest = TestOAuth2AuthorizationRequests.request().build();
given(authorizationRequestRepository.removeAuthorizationRequest(any()))
.willReturn(Mono.just(authorizationRequest));
SecurityWebFilterChain securityFilterChain = this.http
.oauth2Login((login) -> login.clientRegistrationRepository(clientRegistrationRepository)
.authorizationRequestRepository(authorizationRequestRepository))
.build();
WebTestClient client = WebTestClientBuilder.bindToWebFilters(securityFilterChain).build();
client.get().uri("/login/oauth2/code/registration-id").exchange();
verify(authorizationRequestRepository).removeAuthorizationRequest(any());
}
@Test
public void shouldUseDefaultAuthorizationRedirectStrategyForOAuth2Login() {
ReactiveClientRegistrationRepository clientRegistrationRepository = mock(
ReactiveClientRegistrationRepository.class);
given(clientRegistrationRepository.findByRegistrationId(anyString()))
.willReturn(Mono.just(TestClientRegistrations.clientRegistration().build()));
SecurityWebFilterChain securityFilterChain = this.http
.oauth2Login((login) -> login.clientRegistrationRepository(clientRegistrationRepository))
.build();
WebTestClient client = WebTestClientBuilder.bindToWebFilters(securityFilterChain).build();
client.get().uri("/oauth2/authorization/registration-id").exchange().expectStatus().is3xxRedirection();
OAuth2AuthorizationRequestRedirectWebFilter filter = getWebFilter(securityFilterChain,
OAuth2AuthorizationRequestRedirectWebFilter.class)
.get();
assertThat(ReflectionTestUtils.getField(filter, "authorizationRedirectStrategy"))
.isInstanceOf(DefaultServerRedirectStrategy.class);
}
@Test
public void shouldConfigureAuthorizationRedirectStrategyForOAuth2Login() {
ServerRedirectStrategy authorizationRedirectStrategy = mock(ServerRedirectStrategy.class);
ReactiveClientRegistrationRepository clientRegistrationRepository = mock(
ReactiveClientRegistrationRepository.class);
given(clientRegistrationRepository.findByRegistrationId(anyString()))
.willReturn(Mono.just(TestClientRegistrations.clientRegistration().build()));
given(authorizationRedirectStrategy.sendRedirect(any(), any())).willReturn(Mono.empty());
SecurityWebFilterChain securityFilterChain = this.http
.oauth2Login((login) -> login.clientRegistrationRepository(clientRegistrationRepository)
.authorizationRedirectStrategy(authorizationRedirectStrategy))
.build();
WebTestClient client = WebTestClientBuilder.bindToWebFilters(securityFilterChain).build();
client.get().uri("/oauth2/authorization/registration-id").exchange();
verify(authorizationRedirectStrategy).sendRedirect(any(), any());
OAuth2AuthorizationRequestRedirectWebFilter filter = getWebFilter(securityFilterChain,
OAuth2AuthorizationRequestRedirectWebFilter.class)
.get();
assertThat(ReflectionTestUtils.getField(filter, "authorizationRedirectStrategy"))
.isSameAs(authorizationRedirectStrategy);
}
@Test
public void shouldUseDefaultAuthorizationRedirectStrategyForOAuth2Client() {
ReactiveClientRegistrationRepository clientRegistrationRepository = mock(
ReactiveClientRegistrationRepository.class);
given(clientRegistrationRepository.findByRegistrationId(anyString()))
.willReturn(Mono.just(TestClientRegistrations.clientRegistration().build()));
SecurityWebFilterChain securityFilterChain = this.http
.oauth2Client((client) -> client.clientRegistrationRepository(clientRegistrationRepository))
.build();
WebTestClient client = WebTestClientBuilder.bindToWebFilters(securityFilterChain).build();
client.get().uri("/oauth2/authorization/registration-id").exchange().expectStatus().is3xxRedirection();
OAuth2AuthorizationRequestRedirectWebFilter filter = getWebFilter(securityFilterChain,
OAuth2AuthorizationRequestRedirectWebFilter.class)
.get();
assertThat(ReflectionTestUtils.getField(filter, "authorizationRedirectStrategy"))
.isInstanceOf(DefaultServerRedirectStrategy.class);
}
@Test
public void shouldConfigureAuthorizationRedirectStrategyForOAuth2Client() {
ServerRedirectStrategy authorizationRedirectStrategy = mock(ServerRedirectStrategy.class);
ReactiveClientRegistrationRepository clientRegistrationRepository = mock(
ReactiveClientRegistrationRepository.class);
given(clientRegistrationRepository.findByRegistrationId(anyString()))
.willReturn(Mono.just(TestClientRegistrations.clientRegistration().build()));
given(authorizationRedirectStrategy.sendRedirect(any(), any())).willReturn(Mono.empty());
SecurityWebFilterChain securityFilterChain = this.http
.oauth2Client((client) -> client.clientRegistrationRepository(clientRegistrationRepository)
.authorizationRedirectStrategy(authorizationRedirectStrategy))
.build();
WebTestClient client = WebTestClientBuilder.bindToWebFilters(securityFilterChain).build();
client.get().uri("/oauth2/authorization/registration-id").exchange();
verify(authorizationRedirectStrategy).sendRedirect(any(), any());
OAuth2AuthorizationRequestRedirectWebFilter filter = getWebFilter(securityFilterChain,
OAuth2AuthorizationRequestRedirectWebFilter.class)
.get();
assertThat(ReflectionTestUtils.getField(filter, "authorizationRedirectStrategy"))
.isSameAs(authorizationRedirectStrategy);
}
@Test
void resourcesWhenLoginPageConfiguredThenServesCss() {
this.http.formLogin(withDefaults());
this.http.authenticationManager(this.authenticationManager);
WebTestClient client = WebTestClientBuilder
.bindToControllerAndWebFilters(NotFoundController.class, this.http.build())
.build();
client.get()
.uri("/default-ui.css")
.exchange()
.expectStatus()
.isOk()
.expectBody(String.class)
.value(Matchers.containsString("body {"));
}
@Test
void resourcesWhenLoginPageNotConfiguredThenDoesNotServeCss() {
this.http.httpBasic(withDefaults());
this.http.authenticationManager(this.authenticationManager);
WebTestClient client = WebTestClientBuilder
.bindToControllerAndWebFilters(NotFoundController.class, this.http.build())
.build();
client.get()
.uri("/default-ui.css")
.exchange()
.expectStatus()
.isNotFound()
.expectBody(String.class)
.isEqualTo(null);
}
private boolean isX509Filter(WebFilter filter) {
try {
Object converter = ReflectionTestUtils.getField(filter, "authenticationConverter");
return converter.getClass().isAssignableFrom(ServerX509AuthenticationConverter.class);
}
catch (IllegalArgumentException ex) {
// field doesn't exist
return false;
}
}
private boolean matchesX509Converter(WebFilter filter, ServerAuthenticationConverter expectedConverter) {
try {
Object converter = ReflectionTestUtils.getField(filter, "authenticationConverter");
return converter.equals(expectedConverter);
}
catch (IllegalArgumentException ex) {
// field doesn't exist
return false;
}
}
private <T extends WebFilter> Optional<T> getWebFilter(SecurityWebFilterChain filterChain, Class<T> filterClass) {
return (Optional<T>) filterChain.getWebFilters()
.filter(Objects::nonNull)
.filter((filter) -> filterClass.isAssignableFrom(filter.getClass()))
.singleOrEmpty()
.blockOptional();
}
private WebTestClient buildClient() {
WebFilterChainProxy springSecurityFilterChain = new WebFilterChainProxy(this.http.build());
return WebTestClientBuilder.bindToWebFilters(springSecurityFilterChain).build();
}
@RestController
private static | ServerHttpSecurityTests |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/BulkTypeConverters.java | {
"start": 1464,
"end": 6246
} | interface ____ extends Ordered, TypeConverter {
/**
* Performs a lookup for a given type converter.
*
* @param toType the type to convert to
* @param fromType the type to convert from
* @return the type converter or <tt>null</tt> if not found.
*/
TypeConverter lookup(Class<?> toType, Class<?> fromType);
/**
* Converts the value to the specified type in the context of an exchange
* <p/>
* Used when conversion requires extra information from the current exchange (such as encoding).
*
* @param from the from type
* @param to the to type
* @param exchange the current exchange
* @param value the value to be converted
* @return the converted value, <tt>null</tt> if no converter can covert this, or
* <tt>Void.class</tt> if a converter converted the value to null and was allowed to
* return null.
* @throws TypeConversionException is thrown if error during type conversion
*/
<T> T convertTo(Class<?> from, Class<T> to, Exchange exchange, Object value) throws TypeConversionException;
/**
* Tries to convert the value to the specified type, returning <tt>null</tt> if not possible to convert.
* <p/>
* This method will <b>not</b> throw an exception if an exception occurred during conversion.
*
* @param from the from type
* @param to the to type
* @param value the value to be converted
* @return the converted value, or <tt>null</tt> if not possible to convert
*/
default <T> T tryConvertTo(Class<?> from, Class<T> to, Exchange exchange, Object value) throws TypeConversionException {
try {
Object t = convertTo(from, to, exchange, value);
if (t == Void.class) {
return null;
}
return (T) t;
} catch (Exception e) {
// ignore
}
return null;
}
/**
* Converts the value to the specified type in the context of an exchange
* <p/>
* Used when conversion requires extra information from the current exchange (such as encoding).
*
* @param from the from type
* @param to the to type
* @param exchange the current exchange
* @param value the value to be converted
* @return the converted value, is never <tt>null</tt>
* @throws TypeConversionException is thrown if error during type conversion
* @throws NoTypeConversionAvailableException if no type converters exists to convert to the given type
*/
default <T> T mandatoryConvertTo(Class<?> from, Class<T> to, Exchange exchange, Object value)
throws TypeConversionException, NoTypeConversionAvailableException {
Object t = convertTo(from, to, exchange, value);
if (t == Void.class) {
return null;
} else if (t == null) {
throw new NoTypeConversionAvailableException(value, to);
} else {
return (T) t;
}
}
/**
* Number of type converters included
*/
int size();
@Override
default int getOrder() {
return 0;
}
@Override
default boolean allowNull() {
return false;
}
@Override
default <T> T convertTo(Class<T> type, Object value) throws TypeConversionException {
return convertTo(value.getClass(), type, null, value);
}
@Override
default <T> T convertTo(Class<T> type, Exchange exchange, Object value) throws TypeConversionException {
return convertTo(value.getClass(), type, exchange, value);
}
@Override
default <T> T mandatoryConvertTo(Class<T> type, Object value)
throws TypeConversionException, NoTypeConversionAvailableException {
return mandatoryConvertTo(value.getClass(), type, null, value);
}
@Override
default <T> T mandatoryConvertTo(Class<T> type, Exchange exchange, Object value)
throws TypeConversionException, NoTypeConversionAvailableException {
return mandatoryConvertTo(value.getClass(), type, exchange, value);
}
@Override
default <T> T tryConvertTo(Class<T> type, Object value) {
return tryConvertTo(value.getClass(), type, null, value);
}
@Override
default <T> T tryConvertTo(Class<T> type, Exchange exchange, Object value) {
return tryConvertTo(value.getClass(), type, exchange, value);
}
}
| BulkTypeConverters |
java | google__dagger | javatests/dagger/internal/codegen/ComponentCreatorTest.java | {
"start": 15438,
"end": 15522
} | class ____ {",
" @Component.Builder",
" static | SimpleComponent |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/batch_keys/BatchKeysTest.java | {
"start": 1359,
"end": 6021
} | class ____ {
private SqlSessionFactory sqlSessionFactory;
@BeforeEach
void setUp() throws Exception {
try (Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/batch_keys/Config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/batch_keys/CreateDB.sql");
}
public void testJdbc3Support() throws Exception {
try (Connection conn = sqlSessionFactory.getConfiguration().getEnvironment().getDataSource().getConnection();
PreparedStatement stmt = conn.prepareStatement("insert into users2 values(null, 'Pocoyo')",
Statement.RETURN_GENERATED_KEYS)) {
stmt.addBatch();
stmt.executeBatch();
try (ResultSet rs = stmt.getGeneratedKeys()) {
if (rs.next()) {
ResultSetMetaData rsmd = rs.getMetaData();
int colCount = rsmd.getColumnCount();
do {
for (int i = 1; i <= colCount; i++) {
String key = rs.getString(i);
System.out.println("key " + i + " is " + key);
}
} while (rs.next());
} else {
System.out.println("There are no generated keys.");
}
}
}
}
@Test
void insert() {
try (SqlSession sqlSession = sqlSessionFactory.openSession(ExecutorType.BATCH)) {
User user1 = new User(null, "Pocoyo");
sqlSession.insert("insert", user1);
User user2 = new User(null, "Valentina");
sqlSession.insert("insert", user2);
sqlSession.flushStatements();
assertEquals(Integer.valueOf(50), user1.getId());
assertEquals(Integer.valueOf(50), user2.getId());
sqlSession.commit();
}
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
List<User> users = sqlSession.selectList("select");
Assertions.assertEquals(2, users.size());
}
}
@Test
void insertJdbc3() {
try (SqlSession sqlSession = sqlSessionFactory.openSession(ExecutorType.BATCH)) {
User user1 = new User(null, "Pocoyo");
sqlSession.insert("insertIdentity", user1);
User user2 = new User(null, "Valentina");
sqlSession.insert("insertIdentity", user2);
sqlSession.flushStatements();
assertEquals(Integer.valueOf(0), user1.getId());
assertEquals(Integer.valueOf(1), user2.getId());
sqlSession.commit();
}
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
List<User> users = sqlSession.selectList("selectIdentity");
Assertions.assertEquals(2, users.size());
}
}
@Test
void insertWithMapper() {
try (SqlSession sqlSession = sqlSessionFactory.openSession(ExecutorType.BATCH)) {
Mapper userMapper = sqlSession.getMapper(Mapper.class);
User user1 = new User(null, "Pocoyo");
userMapper.insert(user1);
User user2 = new User(null, "Valentina");
userMapper.insert(user2);
sqlSession.flushStatements();
assertEquals(Integer.valueOf(50), user1.getId());
assertEquals(Integer.valueOf(50), user2.getId());
sqlSession.commit();
}
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
List<User> users = sqlSession.selectList("select");
Assertions.assertEquals(2, users.size());
}
}
@Test
void insertMapperJdbc3() {
try (SqlSession sqlSession = sqlSessionFactory.openSession(ExecutorType.BATCH)) {
Mapper userMapper = sqlSession.getMapper(Mapper.class);
User user1 = new User(null, "Pocoyo");
userMapper.insertIdentity(user1);
User user2 = new User(null, "Valentina");
userMapper.insertIdentity(user2);
sqlSession.flushStatements();
assertEquals(Integer.valueOf(0), user1.getId());
assertEquals(Integer.valueOf(1), user2.getId());
sqlSession.commit();
}
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
List<User> users = sqlSession.selectList("selectIdentity");
Assertions.assertEquals(2, users.size());
}
}
@Test
void insertMapperNoBatchJdbc3() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper userMapper = sqlSession.getMapper(Mapper.class);
User user1 = new User(null, "Pocoyo");
userMapper.insertIdentity(user1);
assertEquals(Integer.valueOf(0), user1.getId());
sqlSession.commit();
}
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
List<User> users = sqlSession.selectList("selectIdentity");
Assertions.assertEquals(1, users.size());
}
}
}
| BatchKeysTest |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertHaveAtLeast_Test.java | {
"start": 1125,
"end": 2132
} | class ____ extends ObjectArraysWithConditionBaseTest {
@Test
void should_pass_if_satisfies_at_least_times_condition() {
arrays.assertHaveAtLeast(INFO, array("Yoda", "Luke", "Leia"), 2, jediPower);
}
@Test
void should_pass_if_all_satisfies_condition_() {
arrays.assertHaveAtLeast(INFO, array("Yoda", "Luke", "Obiwan"), 2, jediPower);
}
@Test
void should_throw_error_if_condition_is_null() {
assertThatNullPointerException().isThrownBy(() -> arrays.assertHaveAtLeast(INFO, array("Yoda", "Luke"), 2, null))
.withMessage("The condition to evaluate should not be null");
}
@Test
void should_fail_if_condition_is_not_met_enough() {
// GIVEN
var actual = array("Yoda", "Solo", "Leia");
// WHEN
var error = expectAssertionError(() -> arrays.assertHaveAtLeast(INFO, actual, 2, jediPower));
// THEN
then(error).hasMessage(elementsShouldHaveAtLeast(actual, 2, jediPower).create());
}
}
| ObjectArrays_assertHaveAtLeast_Test |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1338/Source.java | {
"start": 284,
"end": 697
} | class ____ {
private List<String> properties;
public void addProperty(String property) {
if ( properties == null ) {
properties = new ArrayList<>();
}
properties.add( property );
}
public List<String> getProperties() {
return properties;
}
public void setProperties(List<String> properties) {
this.properties = properties;
}
}
| Source |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/target/LuggageImpl.java | {
"start": 396,
"end": 1023
} | class ____ implements Luggage {
private Long id;
private double height;
private double width;
private Owner owner;
@Embedded
@TargetEmbeddable(OwnerImpl.class)
public Owner getOwner() {
return owner;
}
public void setOwner(Owner owner) {
this.owner = owner;
}
@Id
@GeneratedValue
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public double getHeight() {
return height;
}
public void setHeight(double height) {
this.height = height;
}
public double getWidth() {
return width;
}
public void setWidth(double width) {
this.width = width;
}
}
| LuggageImpl |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/create/OracleCreateTypeTest10.java | {
"start": 1021,
"end": 6397
} | class ____ extends OracleTest {
public void test_types() throws Exception {
String sql = "CREATE OR REPLACE type body connstrBycomma is\n" +
" static function ODCIAggregateInitialize(sctx IN OUT connstrBycomma)\t\n" +
" return number is\n" +
" begin\n" +
" sctx := connstrBycomma('',',');\n" +
" return ODCIConst.Success;\n" +
" end;\n" +
" member function ODCIAggregateIterate(self IN OUT connstrBycomma, value IN VARCHAR2) return number is\n" +
" begin\n" +
" if self.currentstr is null then\n" +
" self.currentstr := value;\n" +
" else\n" +
" self.currentstr := self.currentstr ||currentseprator || value;\n" +
" end if;\n" +
" return ODCIConst.Success;\n" +
" end;\n" +
" member function ODCIAggregateTerminate(self IN connstrBycomma, returnValue OUT VARCHAR2, flags IN number) return number is\n" +
" begin\n" +
" returnValue := self.currentstr;\n" +
" return ODCIConst.Success;\n" +
" end;\n" +
" member function ODCIAggregateMerge(self IN OUT connstrBycomma, ctx2 IN connstrBycomma) return number is\n" +
" begin\n" +
" if ctx2.currentstr is null then\n" +
" self.currentstr := self.currentstr;\n" +
" elsif self.currentstr is null then\n" +
" self.currentstr := ctx2.currentstr;\n" +
" else\n" +
" self.currentstr := self.currentstr || currentseprator || ctx2.currentstr;\n" +
" end if;\n" +
" return ODCIConst.Success;\n" +
" end;\n" +
" end;";
System.out.println(sql);
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("CREATE OR REPLACE TYPE BODY connstrBycomma IS\n" +
"\tSTATIC FUNCTION ODCIAggregateInitialize (sctx IN OUT connstrBycomma) RETURN number\n" +
"\tIS\n" +
"\tBEGIN\n" +
"\t\tsctx := connstrBycomma(NULL, ',');\n" +
"\t\tRETURN ODCIConst.Success;\n" +
"\tEND;\n" +
"\tMEMBER FUNCTION ODCIAggregateIterate (self IN OUT connstrBycomma, value IN VARCHAR2) RETURN number\n" +
"\tIS\n" +
"\tBEGIN\n" +
"\t\tIF self.currentstr IS NULL THEN\n" +
"\t\t\tself.currentstr := value;\n" +
"\t\tELSE\n" +
"\t\t\tself.currentstr := self.currentstr || currentseprator || value;\n" +
"\t\tEND IF;\n" +
"\t\tRETURN ODCIConst.Success;\n" +
"\tEND;\n" +
"\tMEMBER FUNCTION ODCIAggregateTerminate (self IN connstrBycomma, returnValue OUT VARCHAR2, flags IN number) RETURN number\n" +
"\tIS\n" +
"\tBEGIN\n" +
"\t\treturnValue := self.currentstr;\n" +
"\t\tRETURN ODCIConst.Success;\n" +
"\tEND;\n" +
"\tMEMBER FUNCTION ODCIAggregateMerge (self IN OUT connstrBycomma, ctx2 IN connstrBycomma) RETURN number\n" +
"\tIS\n" +
"\tBEGIN\n" +
"\t\tIF ctx2.currentstr IS NULL THEN\n" +
"\t\t\tself.currentstr := self.currentstr;\n" +
"\t\tELSIF self.currentstr IS NULL THEN\n" +
"\t\t\tself.currentstr := ctx2.currentstr;\n" +
"\t\tELSE\n" +
"\t\t\tself.currentstr := self.currentstr || currentseprator || ctx2.currentstr;\n" +
"\t\tEND IF;\n" +
"\t\tRETURN ODCIConst.Success;\n" +
"\tEND;\n" +
"END",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(0, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
// assertTrue(visitor.getColumns().contains(new TableStat.Column("orders", "order_total")));
}
}
| OracleCreateTypeTest10 |
java | netty__netty | codec-compression/src/test/java/io/netty/handler/codec/compression/LzfDecoderTest.java | {
"start": 1089,
"end": 2424
} | class ____ extends AbstractDecoderTest {
public LzfDecoderTest() throws Exception {
}
@Override
protected EmbeddedChannel createChannel() {
return new EmbeddedChannel(new LzfDecoder());
}
@Test
public void testUnexpectedBlockIdentifier() {
final ByteBuf in = Unpooled.buffer();
in.writeShort(0x1234); //random value
in.writeByte(BLOCK_TYPE_NON_COMPRESSED);
in.writeShort(0);
assertThrows(DecompressionException.class, new Executable() {
@Override
public void execute() {
channel.writeInbound(in);
}
}, "unexpected block identifier");
}
@Test
public void testUnknownTypeOfChunk() {
final ByteBuf in = Unpooled.buffer();
in.writeByte(BYTE_Z);
in.writeByte(BYTE_V);
in.writeByte(0xFF); //random value
in.writeInt(0);
assertThrows(DecompressionException.class, new Executable() {
@Override
public void execute() {
channel.writeInbound(in);
}
}, "unknown type of chunk");
}
@Override
protected byte[] compress(byte[] data) throws Exception {
return PlatformDependent.hasUnsafe() ? LZFEncoder.encode(data) : LZFEncoder.safeEncode(data);
}
}
| LzfDecoderTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/pool/SlowSqlMillisTest.java | {
"start": 790,
"end": 1561
} | class ____ extends TestCase {
private DruidDataSource dataSource;
protected void setUp() throws Exception {
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setTestOnBorrow(false);
dataSource.setTestOnReturn(false);
dataSource.setTestWhileIdle(false);
dataSource.setConnectionProperties("druid.stat.slowSqlMillis=500");
dataSource.setFilters("stat");
dataSource.init();
}
protected void tearDown() throws Exception {
dataSource.close();
}
public void test_connect() throws Exception {
StatFilter filter = (StatFilter) dataSource.getProxyFilters().get(0);
assertEquals(500, filter.getSlowSqlMillis());
}
}
| SlowSqlMillisTest |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/testFixtures/java/org/springframework/boot/web/server/servlet/AbstractServletWebServerFactoryTests.java | {
"start": 76579,
"end": 76873
} | class ____ implements ServletContextListener {
private ClassLoader contextClassLoader;
@Override
public void contextInitialized(ServletContextEvent sce) {
this.contextClassLoader = Thread.currentThread().getContextClassLoader();
}
}
static | ThreadContextClassLoaderCapturingListener |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/TreatDisjunctionTest.java | {
"start": 3578,
"end": 3824
} | class ____ extends PAccountDirectory {
@Column(nullable = true)
private boolean openldap;
public boolean isOpenldap() {
return openldap;
}
public void setOpenldap(boolean openldap) {
this.openldap = openldap;
}
}
}
| PLDAPDirectory |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/search/NestedHelper.java | {
"start": 1219,
"end": 1312
} | class ____ filter parent and children clauses when building nested
* queries. */
public final | to |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/DefaultOOMHandler.java | {
"start": 2390,
"end": 10743
} | class ____ implements Runnable {
protected static final Logger LOG = LoggerFactory
.getLogger(DefaultOOMHandler.class);
private final Context context;
private final String memoryStatFile;
private final CGroupsHandler cgroups;
/**
* Create an OOM handler.
* This has to be public to be able to construct through reflection.
* @param context node manager context to work with
* @param enforceVirtualMemory true if virtual memory needs to be checked,
* false if physical memory needs to be checked instead
*/
public DefaultOOMHandler(Context context, boolean enforceVirtualMemory) {
this.context = context;
this.memoryStatFile = enforceVirtualMemory ?
CGROUP_PARAM_MEMORY_MEMSW_USAGE_BYTES :
CGROUP_PARAM_MEMORY_USAGE_BYTES;
this.cgroups = getCGroupsHandler();
}
@VisibleForTesting
protected CGroupsHandler getCGroupsHandler() {
return ResourceHandlerModule.getCGroupsHandler();
}
/**
* Check if a given container exceeds its limits.
*/
private boolean isContainerOutOfLimit(Container container) {
boolean outOfLimit = false;
String value = null;
try {
value = cgroups.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
container.getContainerId().toString(), memoryStatFile);
long usage = Long.parseLong(value);
long request = container.getResource().getMemorySize() * 1024 * 1024;
// Check if the container has exceeded its limits.
if (usage > request) {
outOfLimit = true;
String message = String.format(
"Container %s is out of its limits, using %d " +
"when requested only %d",
container.getContainerId(), usage, request);
LOG.warn(message);
}
} catch (ResourceHandlerException ex) {
LOG.warn(String.format("Could not access memory resource for %s",
container.getContainerId()), ex);
} catch (NumberFormatException ex) {
LOG.warn(String.format("Could not parse %s in %s", value,
container.getContainerId()));
}
return outOfLimit;
}
/**
* SIGKILL the specified container. We do this not using the standard
* container logic. The reason is that the processes are frozen by
* the cgroups OOM handler, so they cannot respond to SIGTERM.
* On the other hand we have to be as fast as possible.
* We walk through the list of active processes in the container.
* This is needed because frozen parents cannot signal their children.
* We kill each process and then try again until the whole cgroup
* is cleaned up. This logic avoids leaking processes in a cgroup.
* Currently the killing only succeeds for PGIDS.
*
* @param container Container to clean up
* @return true if the container is killed successfully, false otherwise
*/
private boolean sigKill(Container container) {
boolean containerKilled = false;
boolean finished = false;
try {
while (!finished) {
String[] pids =
cgroups.getCGroupParam(
CGroupsHandler.CGroupController.MEMORY,
container.getContainerId().toString(),
CGROUP_PROCS_FILE)
.split("\n");
finished = true;
for (String pid : pids) {
// Note: this kills only PGIDs currently
if (pid != null && !pid.isEmpty()) {
LOG.debug(String.format(
"Terminating container %s Sending SIGKILL to -%s",
container.getContainerId().toString(),
pid));
finished = false;
try {
context.getContainerExecutor().signalContainer(
new ContainerSignalContext.Builder().setContainer(container)
.setUser(container.getUser())
.setPid(pid).setSignal(ContainerExecutor.Signal.KILL)
.build());
} catch (IOException ex) {
LOG.warn(String.format("Cannot kill container %s pid -%s.",
container.getContainerId(), pid), ex);
}
}
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
LOG.debug("Interrupted while waiting for processes to disappear");
}
}
containerKilled = true;
} catch (ResourceHandlerException ex) {
// the tasks file of the container may not be available because the
// container may not have been launched at this point when the root
// cgroup is under oom
LOG.warn(String.format(
"Cannot list more tasks in container %s to kill.",
container.getContainerId()));
}
return containerKilled;
}
/**
* It is called when the node is under an OOM condition. All processes in
* all sub-cgroups are suspended. We need to act fast, so that we do not
* affect the overall system utilization. In general we try to find a
* newly launched container that exceeded its limits. The justification is
* cost, since probably this is the one that has accumulated the least
* amount of uncommitted data so far. OPPORTUNISTIC containers are always
* killed before any GUARANTEED containers are considered. We continue the
* process until the OOM is resolved.
*/
@Override
public void run() {
try {
// We kill containers until the kernel reports the OOM situation resolved
// Note: If the kernel has a delay this may kill more than necessary
while (true) {
String status = cgroups.getCGroupParam(
CGroupsHandler.CGroupController.MEMORY,
"",
CGROUP_PARAM_MEMORY_OOM_CONTROL);
if (!status.contains(CGroupsHandler.UNDER_OOM)) {
break;
}
boolean containerKilled = killContainer();
if (!containerKilled) {
// This can happen, if SIGKILL did not clean up
// non-PGID or containers or containers launched by other users
// or if a process was put to the root YARN cgroup.
throw new YarnRuntimeException(
"Could not find any containers but CGroups " +
"reserved for containers ran out of memory. " +
"I am giving up");
}
}
} catch (ResourceHandlerException ex) {
LOG.warn("Could not fetch OOM status. " +
"This is expected at shutdown. Exiting.", ex);
}
}
/**
* Choose and kill a container in case of OOM. We try to find the most
* recently launched OPPORTUNISTIC container that exceeds its limit
* and fall back to the most recently launched OPPORTUNISTIC container
* If there is no such container found, we choose to kill a GUARANTEED
* container in the same way.
* @return true if a container is killed, false otherwise
*/
protected boolean killContainer() {
boolean containerKilled = false;
ArrayList<ContainerCandidate> candidates = new ArrayList<>(0);
for (Container container : context.getContainers().values()) {
if (!container.isRunning()) {
// skip containers that are not running yet because killing them
// won't release any memory to get us out of OOM.
continue;
// note even if it is indicated that the container is running from
// container.isRunning(), the container process might not have been
// running yet. From NM's perspective, a container is running as
// soon as the container launch is handed over the container executor
}
candidates.add(
new ContainerCandidate(container, isContainerOutOfLimit(container)));
}
Collections.sort(candidates);
if (candidates.isEmpty()) {
LOG.warn(
"Found no running containers to kill in order to release memory");
}
// make sure one container is killed successfully to release memory
for(int i = 0; !containerKilled && i < candidates.size(); i++) {
ContainerCandidate candidate = candidates.get(i);
if (sigKill(candidate.container)) {
String message = String.format(
"container %s killed by elastic cgroups OOM handler.",
candidate.container.getContainerId());
LOG.warn(message);
containerKilled = true;
}
}
return containerKilled;
}
/**
* Note: this | DefaultOOMHandler |
java | alibaba__nacos | config/src/test/java/com/alibaba/nacos/config/server/constant/ConfigModuleStateBuilderTest.java | {
"start": 1390,
"end": 3490
} | class ____ {
private ConfigurableEnvironment environment;
@BeforeEach
void setUp() {
environment = new MockEnvironment().withProperty(PersistenceConstant.DATASOURCE_PLATFORM_PROPERTY, PersistenceConstant.DERBY)
.withProperty(CommonConstant.NACOS_PLUGIN_DATASOURCE_LOG, "true");
EnvUtil.setEnvironment(environment);
}
@Test
void testBuild() {
ModuleState actual = new ConfigModuleStateBuilder().build();
Map<String, Object> states = actual.getStates();
assertEquals(PersistenceConstant.DERBY, states.get(Constants.DATASOURCE_PLATFORM_PROPERTY_STATE));
assertTrue((Boolean) states.get(Constants.NACOS_PLUGIN_DATASOURCE_LOG_STATE));
assertEquals(PropertyUtil.getNotifyConnectTimeout(), states.get(PropertiesConstant.NOTIFY_CONNECT_TIMEOUT));
assertEquals(PropertyUtil.getNotifySocketTimeout(), states.get(PropertiesConstant.NOTIFY_SOCKET_TIMEOUT));
assertEquals(PropertyUtil.isHealthCheck(), states.get(PropertiesConstant.IS_HEALTH_CHECK));
assertEquals(PropertyUtil.getMaxHealthCheckFailCount(), states.get(PropertiesConstant.MAX_HEALTH_CHECK_FAIL_COUNT));
assertEquals(PropertyUtil.getMaxContent(), states.get(PropertiesConstant.MAX_CONTENT));
assertEquals(PropertyUtil.isManageCapacity(), states.get(PropertiesConstant.IS_MANAGE_CAPACITY));
assertEquals(PropertyUtil.isCapacityLimitCheck(), states.get(PropertiesConstant.IS_CAPACITY_LIMIT_CHECK));
assertEquals(PropertyUtil.getDefaultClusterQuota(), states.get(PropertiesConstant.DEFAULT_CLUSTER_QUOTA));
assertEquals(PropertyUtil.getDefaultGroupQuota(), states.get(PropertiesConstant.DEFAULT_GROUP_QUOTA));
assertEquals(PropertyUtil.getDefaultMaxSize(), states.get(PropertiesConstant.DEFAULT_MAX_SIZE));
assertEquals(PropertyUtil.getDefaultMaxAggrCount(), states.get(PropertiesConstant.DEFAULT_MAX_AGGR_COUNT));
assertEquals(PropertyUtil.getDefaultMaxAggrSize(), states.get(PropertiesConstant.DEFAULT_MAX_AGGR_SIZE));
}
}
| ConfigModuleStateBuilderTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/scheduler/ResourceUtilizationTracker.java | {
"start": 1357,
"end": 2398
} | interface ____ {
/**
* Get the current total utilization of all the Containers running on
* the node.
* @return ResourceUtilization Resource Utilization.
*/
ResourceUtilization getCurrentUtilization();
/**
* Add Container's resources to Node Utilization.
* @param container Container.
*/
void addContainerResources(Container container);
/**
* Subtract Container's resources to Node Utilization.
* @param container Container.
*/
void subtractContainerResource(Container container);
/**
* Check if NM has resources available currently to run the container.
* @param container Container.
* @return True, if NM has resources available currently to run the container.
*/
boolean hasResourcesAvailable(Container container);
/**
* Check if NM has resources available currently to run requested resources.
* @param resource the resources.
* @return True, if NM has enough available resources.
*/
boolean hasResourcesAvailable(Resource resource);
}
| ResourceUtilizationTracker |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/table/ContinuousPartitionFetcher.java | {
"start": 1394,
"end": 2007
} | interface ____<P, T extends Comparable<T>>
extends PartitionFetcher<P> {
/** Fetch partitions by previous partition offset (Including). */
List<Tuple2<P, T>> fetchPartitions(Context<P, T> context, T previousOffset) throws Exception;
/**
* Context for fetch partitions, partition information is stored in hive meta store.
*
* @param <P> The type of partition.
* @param <T> The type of partition offset, the type could be Long when fetches in
* partition-time or create-time order, be String when fetches in partition-name order.
*/
| ContinuousPartitionFetcher |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/arm-java/org/apache/hadoop/ipc/protobuf/TestProtosLegacy.java | {
"start": 1244,
"end": 1431
} | interface ____
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.common.EmptyRequestProto}
*/
public static final | EmptyRequestProtoOrBuilder |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/indices/IndexingMemoryControllerIT.java | {
"start": 1582,
"end": 2284
} | class ____ extends ESSingleNodeTestCase {
@Override
protected Settings nodeSettings() {
return Settings.builder()
.put(super.nodeSettings())
// small indexing buffer so that
// 1. We can trigger refresh after buffering 100 deletes
// 2. Indexing memory Controller writes indexing buffers in sync with indexing on the indexing thread
.put("indices.memory.index_buffer_size", "1kb")
.build();
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return CollectionUtils.appendToCopy(super.getPlugins(), TestEnginePlugin.class);
}
public static | IndexingMemoryControllerIT |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/support/AutowireCandidateQualifier.java | {
"start": 1771,
"end": 2555
} | class ____ (without the package).
* @param typeName the name of the annotation type
*/
public AutowireCandidateQualifier(String typeName) {
Assert.notNull(typeName, "Type name must not be null");
this.typeName = typeName;
}
/**
* Construct a qualifier to match against an annotation of the
* given type whose {@code value} attribute also matches
* the specified value.
* @param type the annotation type
* @param value the annotation value to match
*/
public AutowireCandidateQualifier(Class<?> type, Object value) {
this(type.getName(), value);
}
/**
* Construct a qualifier to match against an annotation of the
* given type name whose {@code value} attribute also matches
* the specified value.
* <p>The type name may match the fully-qualified | name |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/TotalRequestsThrottler.java | {
"start": 15633,
"end": 15692
} | interface ____ by DelayQueue.
*/
private static | needed |
java | apache__flink | flink-connectors/flink-connector-base/src/main/java/org/apache/flink/connector/base/sink/writer/ElementConverter.java | {
"start": 1497,
"end": 1739
} | interface ____<InputT, RequestEntryT> extends Serializable {
RequestEntryT apply(InputT element, SinkWriter.Context context);
default void open(WriterInitContext context) {
// No-op default implementation
}
}
| ElementConverter |
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/data/sql/jooq/dslcontext/Tables.java | {
"start": 945,
"end": 1136
} | class ____ extends TableImpl<TAuthorRecord> {
TAuthor(Name name) {
super(name);
}
public final TableField<TAuthorRecord, GregorianCalendar> DATE_OF_BIRTH = null;
}
abstract | TAuthor |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/rest/MethodHandlersTests.java | {
"start": 772,
"end": 3507
} | class ____ extends ESTestCase {
private final RestApiVersion current = RestApiVersion.current();
private final RestApiVersion previous = RestApiVersion.previous();
public void testLookupForDifferentMethodsSameVersion() {
RestHandler putHandler = (request, channel, client) -> {};
RestHandler postHandler = (request, channel, client) -> {};
MethodHandlers methodHandlers = new MethodHandlers("path").addMethod(PUT, current, putHandler)
.addMethod(POST, current, postHandler);
RestHandler found = methodHandlers.getHandler(PUT, current);
assertThat(found, sameInstance(putHandler));
}
public void testLookupForHandlerUnderMultipleMethods() {
RestHandler handler = (request, channel, client) -> {};
MethodHandlers methodHandlers = new MethodHandlers("path").addMethod(PUT, current, handler).addMethod(POST, current, handler);
RestHandler found = methodHandlers.getHandler(PUT, current);
assertThat(found, sameInstance(handler));
found = methodHandlers.getHandler(POST, current);
assertThat(found, sameInstance(handler));
}
public void testLookupForHandlersUnderDifferentVersions() {
RestHandler handler = (request, channel, client) -> {};
MethodHandlers methodHandlers = new MethodHandlers("path").addMethod(PUT, current, handler).addMethod(PUT, previous, handler);
RestHandler found = methodHandlers.getHandler(PUT, current);
assertThat(found, sameInstance(handler));
found = methodHandlers.getHandler(PUT, previous);
assertThat(found, sameInstance(handler));
}
public void testExceptionOnOverride() {
RestHandler handler = (request, channel, client) -> {};
MethodHandlers methodHandlers = new MethodHandlers("path").addMethod(PUT, current, handler);
expectThrows(IllegalArgumentException.class, () -> methodHandlers.addMethod(PUT, current, handler));
}
public void testMissingCurrentHandler() {
RestHandler handler = (request, channel, client) -> {};
MethodHandlers methodHandlers = new MethodHandlers("path").addMethod(PUT, previous, handler).addMethod(POST, previous, handler);
RestHandler found = methodHandlers.getHandler(PUT, current);
assertNull(found);
}
public void testMissingPriorHandlerReturnsCurrentHandler() {
RestHandler handler = (request, channel, client) -> {};
MethodHandlers methodHandlers = new MethodHandlers("path").addMethod(PUT, current, handler).addMethod(POST, current, handler);
RestHandler found = methodHandlers.getHandler(PUT, previous);
assertThat(found, sameInstance(handler));
}
}
| MethodHandlersTests |
java | elastic__elasticsearch | x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStats.java | {
"start": 1098,
"end": 7967
} | enum ____ {
count {
Object getFieldValue(InternalStringStats stats) {
return stats.getCount();
}
},
min_length {
Object getFieldValue(InternalStringStats stats) {
return stats.getMinLength();
}
},
max_length {
Object getFieldValue(InternalStringStats stats) {
return stats.getMaxLength();
}
},
avg_length {
Object getFieldValue(InternalStringStats stats) {
return stats.getAvgLength();
}
},
entropy {
Object getFieldValue(InternalStringStats stats) {
return stats.getEntropy();
}
};
abstract Object getFieldValue(InternalStringStats stats);
}
private final DocValueFormat format;
private final boolean showDistribution;
private final long count;
private final long totalLength;
private final int minLength;
private final int maxLength;
private final Map<String, Long> charOccurrences;
public InternalStringStats(
String name,
long count,
long totalLength,
int minLength,
int maxLength,
Map<String, Long> charOccurences,
boolean showDistribution,
DocValueFormat formatter,
Map<String, Object> metadata
) {
super(name, metadata);
this.format = formatter;
this.showDistribution = showDistribution;
this.count = count;
this.totalLength = totalLength;
this.minLength = minLength;
this.maxLength = maxLength;
this.charOccurrences = charOccurences;
}
/** Read from a stream. */
public InternalStringStats(StreamInput in) throws IOException {
super(in);
format = in.readNamedWriteable(DocValueFormat.class);
showDistribution = in.readBoolean();
count = in.readVLong();
totalLength = in.readVLong();
minLength = in.readVInt();
maxLength = in.readVInt();
charOccurrences = in.readMap(StreamInput::readLong);
}
@Override
protected final void doWriteTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(format);
out.writeBoolean(showDistribution);
out.writeVLong(count);
out.writeVLong(totalLength);
out.writeVInt(minLength);
out.writeVInt(maxLength);
out.writeMap(charOccurrences, StreamOutput::writeLong);
}
public String getWriteableName() {
return StringStatsAggregationBuilder.NAME;
}
public long getCount() {
return count;
}
long getTotalLength() {
return totalLength;
}
public int getMinLength() {
return minLength;
}
public int getMaxLength() {
return maxLength;
}
public double getAvgLength() {
return (double) totalLength / count;
}
public double getEntropy() {
// Compute the sum of double values with Kahan summation algorithm which is more
// accurate than naive summation.
CompensatedSum kahanSummation = new CompensatedSum(0, 0);
for (double p : getDistribution().values()) {
if (p > 0) {
double value = p * log2(p);
kahanSummation.add(value);
}
}
return -kahanSummation.value();
}
/**
* Convert the character occurrences map to character frequencies.
*
* @return A map with the character as key and the probability of
* this character to occur as value. The map is ordered by frequency descending.
*/
Map<String, Double> getDistribution() {
return charOccurrences.entrySet()
.stream()
.sorted((e1, e2) -> e2.getValue().compareTo(e1.getValue()))
.collect(Collectors.toMap(e -> e.getKey(), e -> (double) e.getValue() / totalLength, (e1, e2) -> e2, LinkedHashMap::new));
}
/** Calculate base 2 logarithm */
static double log2(double d) {
return Math.log(d) / Math.log(2.0);
}
Map<String, Long> getCharOccurrences() {
return charOccurrences;
}
boolean getShowDistribution() {
return showDistribution;
}
public String getCountAsString() {
return format.format(getCount()).toString();
}
public String getMinLengthAsString() {
return format.format(getMinLength()).toString();
}
public String getMaxLengthAsString() {
return format.format(getMaxLength()).toString();
}
public String getAvgLengthAsString() {
return format.format(getAvgLength()).toString();
}
public String getEntropyAsString() {
return format.format(getEntropy()).toString();
}
public Object value(String name) {
try {
return Metrics.valueOf(name).getFieldValue(this);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Unknown value [" + name + "] in string stats aggregation");
}
}
@Override
protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) {
return new AggregatorReducer() {
long count = 0;
long totalLength = 0;
int minLength = Integer.MAX_VALUE;
int maxLength = Integer.MIN_VALUE;
final Map<String, Long> occurs = new HashMap<>();
@Override
public void accept(InternalAggregation aggregation) {
InternalStringStats stats = (InternalStringStats) aggregation;
count += stats.getCount();
minLength = Math.min(minLength, stats.getMinLength());
maxLength = Math.max(maxLength, stats.getMaxLength());
totalLength += stats.totalLength;
stats.charOccurrences.forEach((k, v) -> occurs.merge(k, v, Long::sum));
}
@Override
public InternalAggregation get() {
return new InternalStringStats(
name,
count,
totalLength,
minLength,
maxLength,
occurs,
showDistribution,
format,
getMetadata()
);
}
};
}
@Override
protected boolean mustReduceOnSingleInternalAgg() {
return false;
}
@Override
public Object getProperty(List<String> path) {
if (path.isEmpty()) {
return this;
} else if (path.size() == 1) {
return value(path.get(0));
} else {
throw new IllegalArgumentException("path not supported for [" + getName() + "]: " + path);
}
}
static | Metrics |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/annotation/AutowiredAnnotationBeanPostProcessorTests.java | {
"start": 148356,
"end": 148597
} | class ____ {
@Autowired
@Qualifier("testBean")
private ObjectFactory<?> testBeanFactory;
public TestBean getTestBean() {
return (TestBean) this.testBeanFactory.getObject();
}
}
public static | ObjectFactoryQualifierInjectionBean |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/model/RequestKind.java | {
"start": 1175,
"end": 2659
} | enum ____ {
/** A default request for an instance. E.g.: {@code FooType} */
INSTANCE,
/** A request for a {@code Provider}. E.g.: {@code Provider<FooType>} */
PROVIDER,
/** A request for a {@code Lazy}. E.g.: {@code Lazy<FooType>} */
LAZY,
/** A request for a {@code Provider} of a {@code Lazy}. E.g.: {@code Provider<Lazy<FooType>>}. */
PROVIDER_OF_LAZY,
/**
* A request for a members injection. E.g. {@code void injectMembers(FooType);}. Can only be
* requested by component interfaces.
*/
MEMBERS_INJECTION,
/** A request for a {@code Producer}. E.g.: {@code Producer<FooType>} */
PRODUCER,
/** A request for a {@code Produced}. E.g.: {@code Produced<FooType>} */
PRODUCED,
/**
* A request for a {@link com.google.common.util.concurrent.ListenableFuture}. E.g.: {@code
* ListenableFuture<FooType>}. These can only be requested by component interfaces.
*/
FUTURE,
;
/** Returns a string that represents requests of this kind for a key. */
public String format(Key key) {
switch (this) {
case INSTANCE:
return key.toString();
case PROVIDER_OF_LAZY:
return String.format("Provider<Lazy<%s>>", key);
case MEMBERS_INJECTION:
return String.format("injectMembers(%s)", key);
case FUTURE:
return String.format("ListenableFuture<%s>", key);
default:
return String.format("%s<%s>", UPPER_UNDERSCORE.to(UPPER_CAMEL, name()), key);
}
}
}
| RequestKind |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/StateWithoutExecutionGraph.java | {
"start": 1280,
"end": 2438
} | class ____ implements State {
private final Context context;
private final Logger logger;
StateWithoutExecutionGraph(Context context, Logger logger) {
this.context = context;
this.logger = logger;
}
@Override
public void cancel() {
context.goToFinished(context.getArchivedExecutionGraph(JobStatus.CANCELED, null));
}
@Override
public void suspend(Throwable cause) {
context.goToFinished(context.getArchivedExecutionGraph(JobStatus.SUSPENDED, cause));
}
@Override
public JobID getJobId() {
return context.getJobId();
}
@Override
public ArchivedExecutionGraph getJob() {
return context.getArchivedExecutionGraph(getJobStatus(), null);
}
@Override
public void handleGlobalFailure(
Throwable cause, CompletableFuture<Map<String, String>> failureLabels) {
context.goToFinished(context.getArchivedExecutionGraph(JobStatus.FAILED, cause));
}
@Override
public Logger getLogger() {
return logger;
}
/** Context of the {@link StateWithoutExecutionGraph} state. */
| StateWithoutExecutionGraph |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/errorhandler/DeadLetterChannelDefinition.java | {
"start": 1407,
"end": 6161
} | class ____ extends DefaultErrorHandlerDefinition {
@XmlAttribute(required = true)
private String deadLetterUri;
@XmlAttribute
@Metadata(label = "advanced", defaultValue = "true", javaType = "java.lang.Boolean")
private String deadLetterHandleNewException;
public DeadLetterChannelDefinition() {
}
public DeadLetterChannelDefinition(DeadLetterChannelDefinition source) {
super(source);
this.deadLetterUri = source.deadLetterUri;
this.deadLetterHandleNewException = source.deadLetterHandleNewException;
}
public DeadLetterChannelDefinition(String deadLetterUri) {
this.deadLetterUri = deadLetterUri;
}
public DeadLetterChannelDefinition(Endpoint deadLetterUri) {
this.deadLetterUri = deadLetterUri.getEndpointUri();
}
@Override
public DeadLetterChannelDefinition copyDefinition() {
return new DeadLetterChannelDefinition(this);
}
@Override
protected RedeliveryPolicyDefinition createRedeliveryPolicy() {
RedeliveryPolicyDefinition answer = super.createRedeliveryPolicy();
// DLC do not log exhausted by default
answer.setLogExhausted("false");
return answer;
}
public String getDeadLetterUri() {
return deadLetterUri;
}
/**
* The dead letter endpoint uri for the Dead Letter error handler.
*/
public void setDeadLetterUri(String deadLetterUri) {
this.deadLetterUri = deadLetterUri;
}
public String getDeadLetterHandleNewException() {
return deadLetterHandleNewException;
}
/**
* Whether the dead letter channel should handle (and ignore) any new exception that may been thrown during sending
* the message to the dead letter endpoint.
* <p/>
* The default value is <tt>true</tt> which means any such kind of exception is handled and ignored. Set this to
* <tt>false</tt> to let the exception be propagated back on the {@link org.apache.camel.Exchange}. This can be used
* in situations where you use transactions, and want to use Camel's dead letter channel to deal with exceptions
* during routing, but if the dead letter channel itself fails because of a new exception being thrown, then by
* setting this to <tt>false</tt> the new exceptions is propagated back and set on the
* {@link org.apache.camel.Exchange}, which allows the transaction to detect the exception, and rollback.
*/
public void setDeadLetterHandleNewException(String deadLetterHandleNewException) {
this.deadLetterHandleNewException = deadLetterHandleNewException;
}
@Override
public boolean supportTransacted() {
return false;
}
@Override
public ErrorHandlerFactory cloneBuilder() {
DeadLetterChannelDefinition answer = new DeadLetterChannelDefinition();
cloneBuilder(answer);
return answer;
}
protected void cloneBuilder(DeadLetterChannelDefinition other) {
other.setDeadLetterUri(getDeadLetterUri());
other.setDeadLetterHandleNewException(getDeadLetterHandleNewException());
super.cloneBuilder(other);
}
/**
* The dead letter endpoint uri for the Dead Letter error handler.
*/
public DeadLetterChannelDefinition deadLetterUri(String deadLetterUri) {
setDeadLetterUri(deadLetterUri);
return this;
}
/**
* Whether the dead letter channel should handle (and ignore) any new exception that may been thrown during sending
* the message to the dead letter endpoint.
* <p/>
* The default value is <tt>true</tt> which means any such kind of exception is handled and ignored. Set this to
* <tt>false</tt> to let the exception be propagated back on the {@link org.apache.camel.Exchange}. This can be used
* in situations where you use transactions, and want to use Camel's dead letter channel to deal with exceptions
* during routing, but if the dead letter channel itself fails because of a new exception being thrown, then by
* setting this to <tt>false</tt> the new exceptions is propagated back and set on the
* {@link org.apache.camel.Exchange}, which allows the transaction to detect the exception, and rollback.
*
* @param handleNewException <tt>true</tt> to handle (and ignore), <tt>false</tt> to catch and propagated the
* exception on the {@link org.apache.camel.Exchange}
* @return the builder
*/
public DefaultErrorHandlerDefinition deadLetterHandleNewException(boolean handleNewException) {
setDeadLetterHandleNewException(handleNewException ? "true" : "false");
return this;
}
}
| DeadLetterChannelDefinition |
java | apache__hadoop | hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSFileReaderTask.java | {
"start": 1403,
"end": 3781
} | class ____ implements Runnable {
public static final Logger LOG =
LoggerFactory.getLogger(AliyunOSSFileReaderTask.class);
private String key;
private AliyunOSSFileSystemStore store;
private ReadBuffer readBuffer;
private static final int MAX_RETRIES = 3;
private RetryPolicy retryPolicy;
public AliyunOSSFileReaderTask(String key, AliyunOSSFileSystemStore store,
ReadBuffer readBuffer) {
this.key = key;
this.store = store;
this.readBuffer = readBuffer;
RetryPolicy defaultPolicy =
RetryPolicies.retryUpToMaximumCountWithFixedSleep(
MAX_RETRIES, 3, TimeUnit.SECONDS);
Map<Class<? extends Exception>, RetryPolicy> policies = new HashMap<>();
policies.put(IOException.class, defaultPolicy);
policies.put(IndexOutOfBoundsException.class,
RetryPolicies.TRY_ONCE_THEN_FAIL);
policies.put(NullPointerException.class,
RetryPolicies.TRY_ONCE_THEN_FAIL);
this.retryPolicy = RetryPolicies.retryByException(defaultPolicy, policies);
}
@Override
public void run() {
int retries = 0;
readBuffer.lock();
try {
while (true) {
try (InputStream in = store.retrieve(
key, readBuffer.getByteStart(), readBuffer.getByteEnd())) {
IOUtils.readFully(in, readBuffer.getBuffer(),
0, readBuffer.getBuffer().length);
readBuffer.setStatus(ReadBuffer.STATUS.SUCCESS);
break;
} catch (Exception e) {
LOG.warn("Exception thrown when retrieve key: "
+ this.key + ", exception: " + e);
try {
RetryPolicy.RetryAction rc = retryPolicy.shouldRetry(
e, retries++, 0, true);
if (rc.action == RetryPolicy.RetryAction.RetryDecision.RETRY) {
Thread.sleep(rc.delayMillis);
} else {
//should not retry
break;
}
} catch (Exception ex) {
//FAIL
LOG.warn("Exception thrown when call shouldRetry, exception " + ex);
break;
}
}
}
if (readBuffer.getStatus() != ReadBuffer.STATUS.SUCCESS) {
readBuffer.setStatus(ReadBuffer.STATUS.ERROR);
}
//notify main thread which wait for this buffer
readBuffer.signalAll();
} finally {
readBuffer.unlock();
}
}
}
| AliyunOSSFileReaderTask |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/errorhandler/ExceptionPolicyKey.java | {
"start": 1172,
"end": 3215
} | class ____ {
private final String routeId;
private final Class<? extends Throwable> exceptionClass;
private final Predicate when;
/**
* Key for exception clause
*
* @param routeId the route, or use <tt>null</tt> for a global scoped
* @param exceptionClass the exception class
* @param when optional predicate when the exception clause should trigger
*/
public ExceptionPolicyKey(String routeId, Class<? extends Throwable> exceptionClass, Predicate when) {
this.routeId = routeId;
this.exceptionClass = exceptionClass;
this.when = when;
}
public Class<?> getExceptionClass() {
return exceptionClass;
}
public Predicate getWhen() {
return when;
}
public String getRouteId() {
return routeId;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ExceptionPolicyKey that = (ExceptionPolicyKey) o;
if (exceptionClass != null ? !exceptionClass.equals(that.exceptionClass) : that.exceptionClass != null) {
return false;
}
if (routeId != null ? !routeId.equals(that.routeId) : that.routeId != null) {
return false;
}
if (when != null ? !when.equals(that.when) : that.when != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = routeId != null ? routeId.hashCode() : 0;
result = 31 * result + (exceptionClass != null ? exceptionClass.hashCode() : 0);
result = 31 * result + (when != null ? when.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "ExceptionPolicyKey[route: " + (routeId != null ? routeId : "<global>") + ", " + exceptionClass
+ (when != null ? " " + when : "") + "]";
}
}
| ExceptionPolicyKey |
java | dropwizard__dropwizard | dropwizard-jetty/src/test/java/io/dropwizard/jetty/RoutingHandlerTest.java | {
"start": 863,
"end": 3982
} | class ____ {
private final Connector connector1 = mock(Connector.class);
private final Connector connector2 = mock(Connector.class);
private final Handler handler1 = spy(new ContextHandler());
private final Handler handler2 = spy(new ContextHandler());
private final RoutingHandler handler = new RoutingHandler(Map.of(connector1,
handler1,
connector2,
handler2));
@Test
void startsAndStopsAllHandlers() throws Exception {
handler1.setServer(mock(Server.class));
handler2.setServer(mock(Server.class));
handler.start();
try {
assertThat(handler1.isStarted())
.isTrue();
assertThat(handler2.isStarted())
.isTrue();
} finally {
handler.stop();
}
assertThat(handler1.isStopped())
.isTrue();
assertThat(handler2.isStopped())
.isTrue();
}
@Test
void routesRequestsToTheConnectorSpecificHandler() throws Exception {
final ConnectionMetaData connectionMetaData = mock(ConnectionMetaData.class);
when(connectionMetaData.getConnector()).thenReturn(connector1);
final Request request = mock(Request.class);
when(request.getConnectionMetaData()).thenReturn(connectionMetaData);
final Response response = mock(Response.class);
final Callback callback = mock(Callback.class);
handler.handle(request, response, callback);
verify(handler1).handle(request, response, callback);
}
@Test
void withSessionHandler() throws Exception {
final ContextHandler handler1 = new ContextHandler();
final ServletContextHandler handler2 = new ServletContextHandler();
final SessionHandler childHandler1 = new SessionHandler();
handler2.setSessionHandler(childHandler1);
final RoutingHandler handler = new RoutingHandler(Map.of(connector1, handler1, connector2, handler2));
Server server = new Server();
server.setHandler(handler);
server.start();
handler.start();
try {
assertThat(getSessionHandlers(handler)).containsOnly(childHandler1);
} finally {
handler.stop();
server.stop();
}
}
@Test
void withoutSessionHandler() throws Exception {
new Server().setHandler(handler);
handler.start();
try {
assertThat(getSessionHandlers(handler)).isEmpty();
} finally {
handler.stop();
}
}
private Set<SessionHandler> getSessionHandlers(final RoutingHandler routingHandler) {
return routingHandler.getServer().getDescendants(ContextHandler.class).stream()
.map(handler -> handler.getDescendant(SessionHandler.class))
.filter(Objects::nonNull).collect(Collectors.toSet());
}
}
| RoutingHandlerTest |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/amazon/AwsSecretSettings.java | {
"start": 4436,
"end": 6164
} | class ____ {
public static Map<String, SettingsConfiguration> get() {
return configuration.getOrCompute();
}
private static final LazyInitializable<Map<String, SettingsConfiguration>, RuntimeException> configuration =
new LazyInitializable<>(
() -> configuration(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.COMPLETION)).collect(
Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)
)
);
}
public static Stream<Map.Entry<String, SettingsConfiguration>> configuration(EnumSet<TaskType> supportedTaskTypes) {
return Stream.of(
Map.entry(
ACCESS_KEY_FIELD,
new SettingsConfiguration.Builder(supportedTaskTypes).setDescription(
"A valid AWS access key that has permissions to use Amazon Bedrock."
)
.setLabel("Access Key")
.setRequired(true)
.setSensitive(true)
.setUpdatable(true)
.setType(SettingsConfigurationFieldType.STRING)
.build()
),
Map.entry(
SECRET_KEY_FIELD,
new SettingsConfiguration.Builder(supportedTaskTypes).setDescription(
"A valid AWS secret key that is paired with the access_key."
)
.setLabel("Secret Key")
.setRequired(true)
.setSensitive(true)
.setUpdatable(true)
.setType(SettingsConfigurationFieldType.STRING)
.build()
)
);
}
}
| Configuration |
java | apache__flink | flink-python/src/main/java/org/apache/flink/streaming/api/utils/ClassLeakCleaner.java | {
"start": 1926,
"end": 2200
} | class ____ even after job finished.
// so, trigger garbage collection explicitly to:
// 1) trigger the execution of the `Finalizer`s of objects created by the finished jobs
// of this TaskManager
// 2) the references to the | loader |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/OptionalOfRedundantMethodTest.java | {
"start": 15335,
"end": 15709
} | class ____ {
String f() {
return "test";
}
}
""")
.setFixChooser(FixChoosers.SECOND)
.doTest();
}
@Test
public void negative_ifPresent() {
compilationTestHelper
.addSourceLines(
"Test.java",
"""
import java.util.Optional;
| Test |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/reactive/ServerHttpSecurityConfigurationBuilder.java | {
"start": 1183,
"end": 1802
} | class ____ {
private ServerHttpSecurityConfigurationBuilder() {
}
public static ServerHttpSecurity http() {
return new ServerHttpSecurityConfiguration().httpSecurity();
}
public static ServerHttpSecurity httpWithDefaultAuthentication() {
ReactiveUserDetailsService reactiveUserDetailsService = ReactiveAuthenticationTestConfiguration
.userDetailsService();
ReactiveAuthenticationManager authenticationManager = new UserDetailsRepositoryReactiveAuthenticationManager(
reactiveUserDetailsService);
return http().authenticationManager(authenticationManager);
}
}
| ServerHttpSecurityConfigurationBuilder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/http/HttpRequest.java | {
"start": 1032,
"end": 1872
} | enum ____ {
HTTP_1_0,
HTTP_1_1
}
HttpBody body();
void setBody(HttpBody body);
List<String> strictCookies();
HttpVersion protocolVersion();
HttpRequest removeHeader(String header);
boolean hasContent();
/**
* Create an http response from this request and the supplied status and content.
*/
HttpResponse createResponse(RestStatus status, BytesReference content);
HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPart firstBodyPart);
@Nullable
Exception getInboundException();
/**
* Release any resources associated with this request. Implementations should be idempotent. The behavior of {@link #body()}
* after this method has been invoked is undefined and implementation specific.
*/
void release();
}
| HttpVersion |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.