language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
quarkusio__quarkus
|
integration-tests/reactive-messaging-rabbitmq/src/main/java/io/quarkus/it/rabbitmq/PeopleManager.java
|
{
"start": 321,
"end": 724
}
|
class ____ {
private final Logger log = Logger.getLogger(PeopleManager.class);
private final List<Person> list = new CopyOnWriteArrayList<>();
@Incoming("people-in")
public void consume(JsonObject message) {
list.add(message.mapTo(Person.class));
}
public List<Person> getPeople() {
log.info("Returning people " + list);
return list;
}
}
|
PeopleManager
|
java
|
apache__flink
|
flink-connectors/flink-connector-files/src/test/java/org/apache/flink/connector/file/sink/utils/FileSinkTestUtils.java
|
{
"start": 1647,
"end": 3174
}
|
class ____ extends StringValue
implements InProgressFileWriter.PendingFileRecoverable {
private Path path;
private Path uncommittedPath;
private long size;
public TestPendingFileRecoverable() {
this.path = null;
this.uncommittedPath = null;
this.size = -1L;
}
public TestPendingFileRecoverable(Path path, long size) {
this.path = path;
this.uncommittedPath = new Path(path.getParent(), "." + path.getName());
this.size = size;
}
@Override
public Path getPath() {
return path;
}
public Path getUncommittedPath() {
return uncommittedPath;
}
@Override
public long getSize() {
return size;
}
@Override
public String getValue() {
return size + "," + (path == null ? "" : path.toUri().toString());
}
@Override
public void setValue(CharSequence value, int offset, int len) {
String[] arr = value.subSequence(offset, len).toString().split(",");
size = Integer.parseInt(arr[0]);
path = arr.length == 1 ? null : new Path(arr[1]);
if (path != null) {
uncommittedPath = new Path(path.getParent(), "." + path.getName());
}
}
}
/** A type of testing {@link InProgressFileWriter.InProgressFileRecoverable}. */
public static
|
TestPendingFileRecoverable
|
java
|
quarkusio__quarkus
|
integration-tests/main/src/main/java/io/quarkus/it/arc/UnusedBean.java
|
{
"start": 1322,
"end": 1570
}
|
class ____ {
private final List<Integer> nums;
public NestedDummyInput(List<Integer> nums) {
this.nums = nums;
}
public List<Integer> getNums() {
return nums;
}
}
}
|
NestedDummyInput
|
java
|
apache__camel
|
components/camel-sql/src/test/java/org/apache/camel/processor/aggregate/jdbc/JdbcExchangeSerializationTest.java
|
{
"start": 1247,
"end": 3007
}
|
class ____ extends AbstractJdbcAggregationTestSupport {
@Test
public void testExchangeSerialization() {
final String key = "foo";
Exchange exchange = new DefaultExchange(context);
exchange.getIn().setBody("Hello World");
exchange.getIn().setHeader("name", "Olivier");
exchange.getIn().setHeader("number", 123);
exchange.setProperty("quote", "Camel rocks");
Date now = new Date();
exchange.getIn().setHeader("date", now);
exchange = repoAddAndGet(key, exchange);
Exchange actual = repo.get(context, key);
assertEquals("Hello World", actual.getIn().getBody());
assertEquals("Olivier", actual.getIn().getHeader("name"));
assertEquals(123, actual.getIn().getHeader("number"));
Date date = actual.getIn().getHeader("date", Date.class);
assertNotNull(date);
assertEquals(now.getTime(), date.getTime());
// we do not serialize properties to avoid storing all kind of not needed information
assertNull(actual.getProperty("quote"));
assertSame(context, actual.getContext());
// change something
exchange.getIn().setBody("Bye World");
exchange.getIn().setHeader("name", "Thomas");
exchange.getIn().removeHeader("date");
exchange = repoAddAndGet(key, exchange);
actual = repo.get(context, key);
assertEquals("Bye World", actual.getIn().getBody());
assertEquals("Thomas", actual.getIn().getHeader("name"));
assertEquals(123, actual.getIn().getHeader("number"));
date = actual.getIn().getHeader("date", Date.class);
assertNull(date);
assertSame(context, actual.getContext());
}
}
|
JdbcExchangeSerializationTest
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/ForOverrideChecker.java
|
{
"start": 8117,
"end": 8418
}
|
enum ____ an element, or null if none. */
private static @Nullable Type getOutermostClass(VisitorState state) {
return findLast(
stream(state.getPath())
.filter(t -> t instanceof ClassTree)
.map(t -> ASTHelpers.getType(t)))
.orElse(null);
}
}
|
of
|
java
|
quarkusio__quarkus
|
integration-tests/jpa-h2-embedded/src/main/java/io/quarkus/it/jpa/h2/DialectEndpoint.java
|
{
"start": 492,
"end": 1112
}
|
class ____ {
@Inject
SessionFactory sessionFactory;
@Inject
DataSource dataSource;
@GET
@Path("version")
public String version() throws IOException {
var version = sessionFactory.unwrap(SessionFactoryImplementor.class).getJdbcServices().getDialect().getVersion();
return DialectVersions.toString(version);
}
@GET
@Path("actual-db-version")
public String actualDbVersion() throws IOException, SQLException {
try (var conn = dataSource.getConnection()) {
return conn.getMetaData().getDatabaseProductVersion();
}
}
}
|
DialectEndpoint
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/method/configuration/Gh4020GlobalMethodSecurityConfigurationTests.java
|
{
"start": 1780,
"end": 2109
}
|
class ____ {
@Autowired
DenyAllService denyAll;
// gh-4020
@Test
public void denyAll() {
assertThatExceptionOfType(AuthenticationCredentialsNotFoundException.class).isThrownBy(this.denyAll::denyAll);
}
@Configuration
@EnableGlobalMethodSecurity(prePostEnabled = true)
static
|
Gh4020GlobalMethodSecurityConfigurationTests
|
java
|
google__gson
|
gson/src/main/java/com/google/gson/internal/NonNullElementWrapperList.java
|
{
"start": 1064,
"end": 3259
}
|
class ____<E> extends AbstractList<E> implements RandomAccess {
// Explicitly specify ArrayList as type to guarantee that delegate implements RandomAccess
private final ArrayList<E> delegate;
@SuppressWarnings("NonApiType")
public NonNullElementWrapperList(ArrayList<E> delegate) {
this.delegate = Objects.requireNonNull(delegate);
}
@Override
public E get(int index) {
return delegate.get(index);
}
@Override
public int size() {
return delegate.size();
}
private E nonNull(E element) {
if (element == null) {
throw new NullPointerException("Element must be non-null");
}
return element;
}
@Override
public E set(int index, E element) {
return delegate.set(index, nonNull(element));
}
@Override
public void add(int index, E element) {
delegate.add(index, nonNull(element));
}
@Override
public E remove(int index) {
return delegate.remove(index);
}
/* The following methods are overridden because their default implementation is inefficient */
@Override
public void clear() {
delegate.clear();
}
@SuppressWarnings("UngroupedOverloads") // this is intentionally ungrouped, see comment above
@Override
public boolean remove(Object o) {
return delegate.remove(o);
}
@Override
public boolean removeAll(Collection<?> c) {
return delegate.removeAll(c);
}
@Override
public boolean retainAll(Collection<?> c) {
return delegate.retainAll(c);
}
@Override
public boolean contains(Object o) {
return delegate.contains(o);
}
@Override
public int indexOf(Object o) {
return delegate.indexOf(o);
}
@Override
public int lastIndexOf(Object o) {
return delegate.lastIndexOf(o);
}
@Override
public Object[] toArray() {
return delegate.toArray();
}
@Override
public <T> T[] toArray(T[] a) {
return delegate.toArray(a);
}
@Override
public boolean equals(Object o) {
return delegate.equals(o);
}
@Override
public int hashCode() {
return delegate.hashCode();
}
// Maybe also delegate List#sort and List#spliterator in the future, but that
// requires Android API level 24
}
|
NonNullElementWrapperList
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/exc/InvalidDefinitionException.java
|
{
"start": 516,
"end": 685
}
|
class ____ its properties.
* This is in contrast to {@link MismatchedInputException} which
* signals a problem with input to map.
*/
@SuppressWarnings("serial")
public
|
or
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/DoNotCallCheckerTest.java
|
{
"start": 21176,
"end": 21574
}
|
class ____ {
void f(Field f) {
// BUG: Diagnostic contains: getDeclaringClass
f.getClass();
}
}
""")
.doTest();
}
@Test
public void positive_methodGetClass() {
testHelper
.addSourceLines(
"Test.java",
"""
import java.lang.reflect.Method;
|
Test
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/ReadFromImpl.java
|
{
"start": 11578,
"end": 12619
}
|
class ____ extends ReadFrom {
private final ReadFrom delegate;
private final boolean orderSensitive;
public ReadFromRegex(Pattern pattern, boolean orderSensitive) {
LettuceAssert.notNull(pattern, "Pattern must not be null");
this.orderSensitive = orderSensitive;
delegate = new UnorderedPredicateReadFromAdapter(redisNodeDescription -> {
String host = redisNodeDescription.getUri().getHost();
if (LettuceStrings.isEmpty(host)) {
return false;
}
return pattern.matcher(host).matches();
});
}
@Override
public List<RedisNodeDescription> select(Nodes nodes) {
return delegate.select(nodes);
}
@Override
protected boolean isOrderSensitive() {
return orderSensitive;
}
}
/**
* {@link Predicate}-based {@link ReadFrom} implementation.
*
* @since 5.2
*/
static
|
ReadFromRegex
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/mapping/meta/MethodMeta.java
|
{
"start": 1361,
"end": 6009
}
|
class ____ extends AnnotationSupport {
private final List<Method> hierarchy;
private final Method method;
private MethodDescriptor methodDescriptor;
private ParameterMeta[] parameters;
private ParameterMeta returnParameter;
private final ServiceMeta serviceMeta;
public MethodMeta(List<Method> hierarchy, MethodDescriptor methodDescriptor, ServiceMeta serviceMeta) {
super(serviceMeta.getToolKit());
this.hierarchy = hierarchy;
method = initMethod(hierarchy, methodDescriptor);
this.methodDescriptor = methodDescriptor;
this.serviceMeta = serviceMeta;
}
private Method initMethod(List<Method> hierarchy, MethodDescriptor methodDescriptor) {
Method method = null;
if (methodDescriptor != null) {
method = methodDescriptor.getMethod();
}
return method == null ? hierarchy.get(hierarchy.size() - 1) : method;
}
public void initParameters() {
RpcType rpcType = methodDescriptor.getRpcType();
if (rpcType == RpcType.CLIENT_STREAM || rpcType == RpcType.BI_STREAM) {
Type genericType = TypeUtils.getNestedGenericType(method.getGenericReturnType(), 0);
parameters = new ParameterMeta[] {new StreamParameterMeta(getToolKit(), genericType, method, hierarchy)};
return;
}
int count = rpcType == RpcType.SERVER_STREAM ? 1 : method.getParameterCount();
List<List<Parameter>> parameterHierarchies = new ArrayList<>(count);
for (int i = 0, size = hierarchy.size(); i < size; i++) {
Method m = hierarchy.get(i);
Parameter[] mps = m.getParameters();
for (int j = 0; j < count; j++) {
List<Parameter> parameterHierarchy;
if (parameterHierarchies.size() <= j) {
parameterHierarchy = new ArrayList<>(size);
parameterHierarchies.add(parameterHierarchy);
} else {
parameterHierarchy = parameterHierarchies.get(j);
}
parameterHierarchy.add(mps[j]);
}
}
String[] parameterNames = getToolKit().getParameterNames(method);
ParameterMeta[] parameters = new ParameterMeta[count];
for (int i = 0; i < count; i++) {
String parameterName = parameterNames == null ? null : parameterNames[i];
parameters[i] = new MethodParameterMeta(parameterHierarchies.get(i), parameterName, i, this);
}
this.parameters = parameters;
}
public List<Method> getHierarchy() {
return hierarchy;
}
public Method getMethod() {
return method;
}
public MethodDescriptor getMethodDescriptor() {
return methodDescriptor;
}
public void setMethodDescriptor(MethodDescriptor methodDescriptor) {
this.methodDescriptor = methodDescriptor;
}
public ParameterMeta[] getParameters() {
return parameters;
}
public ParameterMeta getReturnParameter() {
ParameterMeta returnParameter = this.returnParameter;
if (returnParameter == null) {
this.returnParameter = returnParameter = new ReturnParameterMeta(getToolKit(), hierarchy, method);
}
return returnParameter;
}
public ServiceMeta getServiceMeta() {
return serviceMeta;
}
public Class<?> getReturnType() {
return method.getReturnType();
}
public Class<?> getActualReturnType() {
return getReturnParameter().getActualType();
}
public Type getGenericReturnType() {
return method.getGenericReturnType();
}
public Type getActualGenericReturnType() {
return getReturnParameter().getActualGenericType();
}
@Override
public List<? extends AnnotatedElement> getAnnotatedElements() {
return hierarchy;
}
@Override
protected AnnotatedElement getAnnotatedElement() {
return method;
}
@Override
public int hashCode() {
return method.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != MethodMeta.class) {
return false;
}
return method.equals(((MethodMeta) obj).method);
}
@Override
public String toString() {
return "MethodMeta{method=" + toShortString() + ", service=" + serviceMeta.toShortString() + '}';
}
public String toShortString() {
return MethodUtils.toShortString(method);
}
public static final
|
MethodMeta
|
java
|
quarkusio__quarkus
|
integration-tests/gradle/src/test/java/io/quarkus/gradle/TestWithAppJunitPropertiesFileTest.java
|
{
"start": 165,
"end": 573
}
|
class ____ extends QuarkusGradleWrapperTestBase {
@Test
public void shouldRunTestsSuccessfully() throws Exception {
final File projectDir = getProjectDir("with-junit-properties-file");
BuildResult buildResult = runGradleWrapper(projectDir, "test");
assertThat(BuildResult.isSuccessful(buildResult.getTasks().get(":test"))).isTrue();
}
}
|
TestWithAppJunitPropertiesFileTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/StGeohashNoLicenseIT.java
|
{
"start": 403,
"end": 768
}
|
class ____ extends StGeohashLicenseIT {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(SpatialNoLicenseTestCase.TestSpatialPlugin.class, SpatialNoLicenseTestCase.TestEsqlPlugin.class);
}
public void testGeoGridWithShapes() {
assertGeoGridFailsWith("index_geo_shape");
}
}
|
StGeohashNoLicenseIT
|
java
|
quarkusio__quarkus
|
integration-tests/observability-lgtm/src/test/java/io/quarkus/observability/test/support/ReloadEndpoint.java
|
{
"start": 594,
"end": 1365
}
|
class ____ {
private static final Logger log = Logger.getLogger(ReloadEndpoint.class);
@Inject
MeterRegistry registry;
Random random = new SecureRandom();
double[] arr = new double[1];
@PostConstruct
public void start() {
String key = System.getProperty("tag-key", "test");
Gauge.builder("xvalue", arr, a -> arr[0])
.baseUnit("X")
.description("Some random x")
.tag(key, "x")
.register(registry);
}
@GET
@Produces(MediaType.TEXT_PLAIN)
@Path("/poke")
public String poke(@QueryParam("f") int f) {
log.infof("Poke %s", f);
double x = random.nextDouble() * f;
arr[0] = x;
return "poke:" + x;
}
}
|
ReloadEndpoint
|
java
|
netty__netty
|
codec-haproxy/src/test/java/io/netty/handler/codec/haproxy/HAProxyMessageDecoderTest.java
|
{
"start": 1901,
"end": 47894
}
|
class ____ {
private EmbeddedChannel ch;
@BeforeEach
public void setUp() {
ch = new EmbeddedChannel(new HAProxyMessageDecoder());
}
@Test
public void testIPV4Decode() {
int startChannels = ch.pipeline().names().size();
String header = "PROXY TCP4 192.168.0.1 192.168.0.11 56324 443\r\n";
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
assertThat(msgObj).isInstanceOf(HAProxyMessage.class);
HAProxyMessage msg = (HAProxyMessage) msgObj;
assertEquals(HAProxyProtocolVersion.V1, msg.protocolVersion());
assertEquals(HAProxyCommand.PROXY, msg.command());
assertEquals(HAProxyProxiedProtocol.TCP4, msg.proxiedProtocol());
assertEquals("192.168.0.1", msg.sourceAddress());
assertEquals("192.168.0.11", msg.destinationAddress());
assertEquals(56324, msg.sourcePort());
assertEquals(443, msg.destinationPort());
assertNull(ch.readInbound());
assertFalse(ch.finish());
assertTrue(msg.release());
}
@Test
public void testIPV6Decode() {
int startChannels = ch.pipeline().names().size();
String header = "PROXY TCP6 2001:0db8:85a3:0000:0000:8a2e:0370:7334 1050:0:0:0:5:600:300c:326b 56324 443\r\n";
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
assertThat(msgObj).isInstanceOf(HAProxyMessage.class);
HAProxyMessage msg = (HAProxyMessage) msgObj;
assertEquals(HAProxyProtocolVersion.V1, msg.protocolVersion());
assertEquals(HAProxyCommand.PROXY, msg.command());
assertEquals(HAProxyProxiedProtocol.TCP6, msg.proxiedProtocol());
assertEquals("2001:0db8:85a3:0000:0000:8a2e:0370:7334", msg.sourceAddress());
assertEquals("1050:0:0:0:5:600:300c:326b", msg.destinationAddress());
assertEquals(56324, msg.sourcePort());
assertEquals(443, msg.destinationPort());
assertNull(ch.readInbound());
assertFalse(ch.finish());
assertTrue(msg.release());
}
@Test
public void testUnknownProtocolDecode() {
int startChannels = ch.pipeline().names().size();
String header = "PROXY UNKNOWN 192.168.0.1 192.168.0.11 56324 443\r\n";
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
assertThat(msgObj).isInstanceOf(HAProxyMessage.class);
HAProxyMessage msg = (HAProxyMessage) msgObj;
assertEquals(HAProxyProtocolVersion.V1, msg.protocolVersion());
assertEquals(HAProxyCommand.PROXY, msg.command());
assertEquals(HAProxyProxiedProtocol.UNKNOWN, msg.proxiedProtocol());
assertNull(msg.sourceAddress());
assertNull(msg.destinationAddress());
assertEquals(0, msg.sourcePort());
assertEquals(0, msg.destinationPort());
assertNull(ch.readInbound());
assertFalse(ch.finish());
assertTrue(msg.release());
}
@Test
public void testV1NoUDP() {
final String header = "PROXY UDP4 192.168.0.1 192.168.0.11 56324 443\r\n";
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
}
});
}
@Test
public void testInvalidPort() {
final String header = "PROXY TCP4 192.168.0.1 192.168.0.11 80000 443\r\n";
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
}
});
}
@Test
public void testInvalidIPV4Address() {
final String header = "PROXY TCP4 299.168.0.1 192.168.0.11 56324 443\r\n";
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
}
});
}
@Test
public void testInvalidIPV6Address() {
final String header =
"PROXY TCP6 r001:0db8:85a3:0000:0000:8a2e:0370:7334 1050:0:0:0:5:600:300c:326b 56324 443\r\n";
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
}
});
}
@Test
public void testInvalidProtocol() {
final String header = "PROXY TCP7 192.168.0.1 192.168.0.11 56324 443\r\n";
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
}
});
}
@Test
public void testMissingParams() {
final String header = "PROXY TCP4 192.168.0.1 192.168.0.11 56324\r\n";
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
}
});
}
@Test
public void testTooManyParams() {
final String header = "PROXY TCP4 192.168.0.1 192.168.0.11 56324 443 123\r\n";
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
}
});
}
@Test
public void testInvalidCommand() {
final String header = "PING TCP4 192.168.0.1 192.168.0.11 56324 443\r\n";
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
}
});
}
@Test
public void testInvalidEOL() {
final String header = "PROXY TCP4 192.168.0.1 192.168.0.11 56324 443\nGET / HTTP/1.1\r\n";
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
}
});
}
@Test
public void testHeaderTooLong() {
final String header = "PROXY TCP4 192.168.0.1 192.168.0.11 56324 " +
"00000000000000000000000000000000000000000000000000000000000000000443\r\n";
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
}
});
}
@Test
public void testFailSlowHeaderTooLong() {
final EmbeddedChannel slowFailCh = new EmbeddedChannel(new HAProxyMessageDecoder(false));
try {
String headerPart1 = "PROXY TCP4 192.168.0.1 192.168.0.11 56324 " +
"000000000000000000000000000000000000000000000000000000000000000000000443";
// Should not throw exception
assertFalse(slowFailCh.writeInbound(copiedBuffer(headerPart1, CharsetUtil.US_ASCII)));
String headerPart2 = "more header data";
// Should not throw exception
assertFalse(slowFailCh.writeInbound(copiedBuffer(headerPart2, CharsetUtil.US_ASCII)));
final String headerPart3 = "end of header\r\n";
int discarded = headerPart1.length() + headerPart2.length() + headerPart3.length() - 2;
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
slowFailCh.writeInbound(copiedBuffer(headerPart3, CharsetUtil.US_ASCII));
}
}, "over " + discarded);
} finally {
assertFalse(slowFailCh.finishAndReleaseAll());
}
}
@Test
public void testFailFastHeaderTooLong() {
final EmbeddedChannel fastFailCh = new EmbeddedChannel(new HAProxyMessageDecoder(true));
try {
final String headerPart1 = "PROXY TCP4 192.168.0.1 192.168.0.11 56324 " +
"000000000000000000000000000000000000000000000000000000000000000000000443";
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
fastFailCh.writeInbound(copiedBuffer(headerPart1, CharsetUtil.US_ASCII));
}
}, "over " + headerPart1.length());
} finally {
assertFalse(fastFailCh.finishAndReleaseAll());
}
}
@Test
public void testIncompleteHeader() {
String header = "PROXY TCP4 192.168.0.1 192.168.0.11 56324";
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
assertNull(ch.readInbound());
assertFalse(ch.finish());
}
@Test
public void testCloseOnInvalid() {
ChannelFuture closeFuture = ch.closeFuture();
String header = "GET / HTTP/1.1\r\n";
try {
ch.writeInbound(copiedBuffer(header, CharsetUtil.US_ASCII));
} catch (HAProxyProtocolException ppex) {
// swallow this exception since we're just testing to be sure the channel was closed
}
boolean isComplete = closeFuture.awaitUninterruptibly(5000);
if (!isComplete || !closeFuture.isDone() || !closeFuture.isSuccess()) {
fail("Expected channel close");
}
}
@Test
public void testTransportProtocolAndAddressFamily() {
final byte unknown = HAProxyProxiedProtocol.UNKNOWN.byteValue();
final byte tcp4 = HAProxyProxiedProtocol.TCP4.byteValue();
final byte tcp6 = HAProxyProxiedProtocol.TCP6.byteValue();
final byte udp4 = HAProxyProxiedProtocol.UDP4.byteValue();
final byte udp6 = HAProxyProxiedProtocol.UDP6.byteValue();
final byte unix_stream = HAProxyProxiedProtocol.UNIX_STREAM.byteValue();
final byte unix_dgram = HAProxyProxiedProtocol.UNIX_DGRAM.byteValue();
assertEquals(TransportProtocol.UNSPEC, TransportProtocol.valueOf(unknown));
assertEquals(TransportProtocol.STREAM, TransportProtocol.valueOf(tcp4));
assertEquals(TransportProtocol.STREAM, TransportProtocol.valueOf(tcp6));
assertEquals(TransportProtocol.STREAM, TransportProtocol.valueOf(unix_stream));
assertEquals(TransportProtocol.DGRAM, TransportProtocol.valueOf(udp4));
assertEquals(TransportProtocol.DGRAM, TransportProtocol.valueOf(udp6));
assertEquals(TransportProtocol.DGRAM, TransportProtocol.valueOf(unix_dgram));
assertEquals(AddressFamily.AF_UNSPEC, AddressFamily.valueOf(unknown));
assertEquals(AddressFamily.AF_IPv4, AddressFamily.valueOf(tcp4));
assertEquals(AddressFamily.AF_IPv4, AddressFamily.valueOf(udp4));
assertEquals(AddressFamily.AF_IPv6, AddressFamily.valueOf(tcp6));
assertEquals(AddressFamily.AF_IPv6, AddressFamily.valueOf(udp6));
assertEquals(AddressFamily.AF_UNIX, AddressFamily.valueOf(unix_stream));
assertEquals(AddressFamily.AF_UNIX, AddressFamily.valueOf(unix_dgram));
}
@Test
public void testV2IPV4Decode() {
byte[] header = new byte[28];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x21; // v2, cmd=PROXY
header[13] = 0x11; // TCP over IPv4
header[14] = 0x00; // Remaining Bytes
header[15] = 0x0c; // -----
header[16] = (byte) 0xc0; // Source Address
header[17] = (byte) 0xa8; // -----
header[18] = 0x00; // -----
header[19] = 0x01; // -----
header[20] = (byte) 0xc0; // Destination Address
header[21] = (byte) 0xa8; // -----
header[22] = 0x00; // -----
header[23] = 0x0b; // -----
header[24] = (byte) 0xdc; // Source Port
header[25] = 0x04; // -----
header[26] = 0x01; // Destination Port
header[27] = (byte) 0xbb; // -----
int startChannels = ch.pipeline().names().size();
ch.writeInbound(copiedBuffer(header));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
assertThat(msgObj).isInstanceOf(HAProxyMessage.class);
HAProxyMessage msg = (HAProxyMessage) msgObj;
assertEquals(HAProxyProtocolVersion.V2, msg.protocolVersion());
assertEquals(HAProxyCommand.PROXY, msg.command());
assertEquals(HAProxyProxiedProtocol.TCP4, msg.proxiedProtocol());
assertEquals("192.168.0.1", msg.sourceAddress());
assertEquals("192.168.0.11", msg.destinationAddress());
assertEquals(56324, msg.sourcePort());
assertEquals(443, msg.destinationPort());
assertNull(ch.readInbound());
assertFalse(ch.finish());
assertTrue(msg.release());
}
@Test
public void testV2UDPDecode() {
byte[] header = new byte[28];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x21; // v2, cmd=PROXY
header[13] = 0x12; // UDP over IPv4
header[14] = 0x00; // Remaining Bytes
header[15] = 0x0c; // -----
header[16] = (byte) 0xc0; // Source Address
header[17] = (byte) 0xa8; // -----
header[18] = 0x00; // -----
header[19] = 0x01; // -----
header[20] = (byte) 0xc0; // Destination Address
header[21] = (byte) 0xa8; // -----
header[22] = 0x00; // -----
header[23] = 0x0b; // -----
header[24] = (byte) 0xdc; // Source Port
header[25] = 0x04; // -----
header[26] = 0x01; // Destination Port
header[27] = (byte) 0xbb; // -----
int startChannels = ch.pipeline().names().size();
ch.writeInbound(copiedBuffer(header));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
assertThat(msgObj).isInstanceOf(HAProxyMessage.class);
HAProxyMessage msg = (HAProxyMessage) msgObj;
assertEquals(HAProxyProtocolVersion.V2, msg.protocolVersion());
assertEquals(HAProxyCommand.PROXY, msg.command());
assertEquals(HAProxyProxiedProtocol.UDP4, msg.proxiedProtocol());
assertEquals("192.168.0.1", msg.sourceAddress());
assertEquals("192.168.0.11", msg.destinationAddress());
assertEquals(56324, msg.sourcePort());
assertEquals(443, msg.destinationPort());
assertNull(ch.readInbound());
assertFalse(ch.finish());
assertTrue(msg.release());
}
@Test
public void testv2IPV6Decode() {
byte[] header = new byte[52];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x21; // v2, cmd=PROXY
header[13] = 0x21; // TCP over IPv6
header[14] = 0x00; // Remaining Bytes
header[15] = 0x24; // -----
header[16] = 0x20; // Source Address
header[17] = 0x01; // -----
header[18] = 0x0d; // -----
header[19] = (byte) 0xb8; // -----
header[20] = (byte) 0x85; // -----
header[21] = (byte) 0xa3; // -----
header[22] = 0x00; // -----
header[23] = 0x00; // -----
header[24] = 0x00; // -----
header[25] = 0x00; // -----
header[26] = (byte) 0x8a; // -----
header[27] = 0x2e; // -----
header[28] = 0x03; // -----
header[29] = 0x70; // -----
header[30] = 0x73; // -----
header[31] = 0x34; // -----
header[32] = 0x10; // Destination Address
header[33] = 0x50; // -----
header[34] = 0x00; // -----
header[35] = 0x00; // -----
header[36] = 0x00; // -----
header[37] = 0x00; // -----
header[38] = 0x00; // -----
header[39] = 0x00; // -----
header[40] = 0x00; // -----
header[41] = 0x05; // -----
header[42] = 0x06; // -----
header[43] = 0x00; // -----
header[44] = 0x30; // -----
header[45] = 0x0c; // -----
header[46] = 0x32; // -----
header[47] = 0x6b; // -----
header[48] = (byte) 0xdc; // Source Port
header[49] = 0x04; // -----
header[50] = 0x01; // Destination Port
header[51] = (byte) 0xbb; // -----
int startChannels = ch.pipeline().names().size();
ch.writeInbound(copiedBuffer(header));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
assertThat(msgObj).isInstanceOf(HAProxyMessage.class);
HAProxyMessage msg = (HAProxyMessage) msgObj;
assertEquals(HAProxyProtocolVersion.V2, msg.protocolVersion());
assertEquals(HAProxyCommand.PROXY, msg.command());
assertEquals(HAProxyProxiedProtocol.TCP6, msg.proxiedProtocol());
assertEquals("2001:db8:85a3:0:0:8a2e:370:7334", msg.sourceAddress());
assertEquals("1050:0:0:0:5:600:300c:326b", msg.destinationAddress());
assertEquals(56324, msg.sourcePort());
assertEquals(443, msg.destinationPort());
assertNull(ch.readInbound());
assertFalse(ch.finish());
assertTrue(msg.release());
}
@Test
public void testv2UnixDecode() {
byte[] header = new byte[232];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x21; // v2, cmd=PROXY
header[13] = 0x31; // UNIX_STREAM
header[14] = 0x00; // Remaining Bytes
header[15] = (byte) 0xd8; // -----
header[16] = 0x2f; // Source Address
header[17] = 0x76; // -----
header[18] = 0x61; // -----
header[19] = 0x72; // -----
header[20] = 0x2f; // -----
header[21] = 0x72; // -----
header[22] = 0x75; // -----
header[23] = 0x6e; // -----
header[24] = 0x2f; // -----
header[25] = 0x73; // -----
header[26] = 0x72; // -----
header[27] = 0x63; // -----
header[28] = 0x2e; // -----
header[29] = 0x73; // -----
header[30] = 0x6f; // -----
header[31] = 0x63; // -----
header[32] = 0x6b; // -----
header[33] = 0x00; // -----
header[124] = 0x2f; // Destination Address
header[125] = 0x76; // -----
header[126] = 0x61; // -----
header[127] = 0x72; // -----
header[128] = 0x2f; // -----
header[129] = 0x72; // -----
header[130] = 0x75; // -----
header[131] = 0x6e; // -----
header[132] = 0x2f; // -----
header[133] = 0x64; // -----
header[134] = 0x65; // -----
header[135] = 0x73; // -----
header[136] = 0x74; // -----
header[137] = 0x2e; // -----
header[138] = 0x73; // -----
header[139] = 0x6f; // -----
header[140] = 0x63; // -----
header[141] = 0x6b; // -----
header[142] = 0x00; // -----
int startChannels = ch.pipeline().names().size();
ch.writeInbound(copiedBuffer(header));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
assertThat(msgObj).isInstanceOf(HAProxyMessage.class);
HAProxyMessage msg = (HAProxyMessage) msgObj;
assertEquals(HAProxyProtocolVersion.V2, msg.protocolVersion());
assertEquals(HAProxyCommand.PROXY, msg.command());
assertEquals(HAProxyProxiedProtocol.UNIX_STREAM, msg.proxiedProtocol());
assertEquals("/var/run/src.sock", msg.sourceAddress());
assertEquals("/var/run/dest.sock", msg.destinationAddress());
assertEquals(0, msg.sourcePort());
assertEquals(0, msg.destinationPort());
assertNull(ch.readInbound());
assertFalse(ch.finish());
assertTrue(msg.release());
}
@Test
public void testV2LocalProtocolDecode() {
byte[] header = new byte[28];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x20; // v2, cmd=LOCAL
header[13] = 0x00; // Unspecified transport protocol and address family
header[14] = 0x00; // Remaining Bytes
header[15] = 0x0c; // -----
header[16] = (byte) 0xc0; // Source Address
header[17] = (byte) 0xa8; // -----
header[18] = 0x00; // -----
header[19] = 0x01; // -----
header[20] = (byte) 0xc0; // Destination Address
header[21] = (byte) 0xa8; // -----
header[22] = 0x00; // -----
header[23] = 0x0b; // -----
header[24] = (byte) 0xdc; // Source Port
header[25] = 0x04; // -----
header[26] = 0x01; // Destination Port
header[27] = (byte) 0xbb; // -----
int startChannels = ch.pipeline().names().size();
ch.writeInbound(copiedBuffer(header));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
assertThat(msgObj).isInstanceOf(HAProxyMessage.class);
HAProxyMessage msg = (HAProxyMessage) msgObj;
assertEquals(HAProxyProtocolVersion.V2, msg.protocolVersion());
assertEquals(HAProxyCommand.LOCAL, msg.command());
assertEquals(HAProxyProxiedProtocol.UNKNOWN, msg.proxiedProtocol());
assertNull(msg.sourceAddress());
assertNull(msg.destinationAddress());
assertEquals(0, msg.sourcePort());
assertEquals(0, msg.destinationPort());
assertNull(ch.readInbound());
assertFalse(ch.finish());
assertTrue(msg.release());
}
@Test
public void testV2UnknownProtocolDecode() {
byte[] header = new byte[28];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x21; // v2, cmd=PROXY
header[13] = 0x00; // Unspecified transport protocol and address family
header[14] = 0x00; // Remaining Bytes
header[15] = 0x0c; // -----
header[16] = (byte) 0xc0; // Source Address
header[17] = (byte) 0xa8; // -----
header[18] = 0x00; // -----
header[19] = 0x01; // -----
header[20] = (byte) 0xc0; // Destination Address
header[21] = (byte) 0xa8; // -----
header[22] = 0x00; // -----
header[23] = 0x0b; // -----
header[24] = (byte) 0xdc; // Source Port
header[25] = 0x04; // -----
header[26] = 0x01; // Destination Port
header[27] = (byte) 0xbb; // -----
int startChannels = ch.pipeline().names().size();
ch.writeInbound(copiedBuffer(header));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
assertThat(msgObj).isInstanceOf(HAProxyMessage.class);
HAProxyMessage msg = (HAProxyMessage) msgObj;
assertEquals(HAProxyProtocolVersion.V2, msg.protocolVersion());
assertEquals(HAProxyCommand.PROXY, msg.command());
assertEquals(HAProxyProxiedProtocol.UNKNOWN, msg.proxiedProtocol());
assertNull(msg.sourceAddress());
assertNull(msg.destinationAddress());
assertEquals(0, msg.sourcePort());
assertEquals(0, msg.destinationPort());
assertNull(ch.readInbound());
assertFalse(ch.finish());
assertTrue(msg.release());
}
@Test
public void testV2WithSslTLVs() {
ch = new EmbeddedChannel(new HAProxyMessageDecoder());
final byte[] bytes = {
13, 10, 13, 10, 0, 13, 10, 81, 85, 73, 84, 10, 33, 17, 0, 35, 127, 0, 0, 1, 127, 0, 0, 1,
-55, -90, 7, 89, 32, 0, 20, 5, 0, 0, 0, 0, 33, 0, 5, 84, 76, 83, 118, 49, 34, 0, 4, 76, 69, 65, 70
};
int startChannels = ch.pipeline().names().size();
assertTrue(ch.writeInbound(copiedBuffer(bytes)));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
HAProxyMessage msg = (HAProxyMessage) msgObj;
assertEquals(HAProxyProtocolVersion.V2, msg.protocolVersion());
assertEquals(HAProxyCommand.PROXY, msg.command());
assertEquals(HAProxyProxiedProtocol.TCP4, msg.proxiedProtocol());
assertEquals("127.0.0.1", msg.sourceAddress());
assertEquals("127.0.0.1", msg.destinationAddress());
assertEquals(51622, msg.sourcePort());
assertEquals(1881, msg.destinationPort());
final List<HAProxyTLV> tlvs = msg.tlvs();
assertEquals(3, tlvs.size());
final HAProxyTLV firstTlv = tlvs.get(0);
assertEquals(HAProxyTLV.Type.PP2_TYPE_SSL, firstTlv.type());
final HAProxySSLTLV sslTlv = (HAProxySSLTLV) firstTlv;
assertEquals(0, sslTlv.verify());
assertTrue(sslTlv.isPP2ClientSSL());
assertTrue(sslTlv.isPP2ClientCertSess());
assertFalse(sslTlv.isPP2ClientCertConn());
final HAProxyTLV secondTlv = tlvs.get(1);
assertEquals(HAProxyTLV.Type.PP2_TYPE_SSL_VERSION, secondTlv.type());
ByteBuf secondContentBuf = secondTlv.content();
byte[] secondContent = new byte[secondContentBuf.readableBytes()];
secondContentBuf.readBytes(secondContent);
assertArrayEquals("TLSv1".getBytes(CharsetUtil.US_ASCII), secondContent);
final HAProxyTLV thirdTLV = tlvs.get(2);
assertEquals(HAProxyTLV.Type.PP2_TYPE_SSL_CN, thirdTLV.type());
ByteBuf thirdContentBuf = thirdTLV.content();
byte[] thirdContent = new byte[thirdContentBuf.readableBytes()];
thirdContentBuf.readBytes(thirdContent);
assertArrayEquals("LEAF".getBytes(CharsetUtil.US_ASCII), thirdContent);
assertTrue(sslTlv.encapsulatedTLVs().contains(secondTlv));
assertTrue(sslTlv.encapsulatedTLVs().contains(thirdTLV));
assertTrue(0 < firstTlv.refCnt());
assertTrue(0 < secondTlv.refCnt());
assertTrue(0 < thirdTLV.refCnt());
assertTrue(msg.release());
assertEquals(0, firstTlv.refCnt());
assertEquals(0, secondTlv.refCnt());
assertEquals(0, thirdTLV.refCnt());
assertNull(ch.readInbound());
assertFalse(ch.finish());
}
@Test
public void testReleaseHAProxyMessage() {
ch = new EmbeddedChannel(new HAProxyMessageDecoder());
final byte[] bytes = {
13, 10, 13, 10, 0, 13, 10, 81, 85, 73, 84, 10, 33, 17, 0, 35, 127, 0, 0, 1, 127, 0, 0, 1,
-55, -90, 7, 89, 32, 0, 20, 5, 0, 0, 0, 0, 33, 0, 5, 84, 76, 83, 118, 49, 34, 0, 4, 76, 69, 65, 70
};
int startChannels = ch.pipeline().names().size();
assertTrue(ch.writeInbound(copiedBuffer(bytes)));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
HAProxyMessage msg = (HAProxyMessage) msgObj;
final List<HAProxyTLV> tlvs = msg.tlvs();
assertEquals(3, tlvs.size());
assertEquals(1, msg.refCnt());
for (HAProxyTLV tlv : tlvs) {
assertEquals(3, tlv.refCnt());
}
// Retain the haproxy message
msg.retain();
assertEquals(2, msg.refCnt());
for (HAProxyTLV tlv : tlvs) {
assertEquals(3, tlv.refCnt());
}
// Decrease the haproxy message refCnt
msg.release();
assertEquals(1, msg.refCnt());
for (HAProxyTLV tlv : tlvs) {
assertEquals(3, tlv.refCnt());
}
// Release haproxy message, TLVs will be released with it
msg.release();
assertEquals(0, msg.refCnt());
for (HAProxyTLV tlv : tlvs) {
assertEquals(0, tlv.refCnt());
}
}
@Test
public void testV2WithTLV() {
ch = new EmbeddedChannel(new HAProxyMessageDecoder(4));
byte[] header = new byte[236];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x21; // v2, cmd=PROXY
header[13] = 0x31; // UNIX_STREAM
header[14] = 0x00; // Remaining Bytes
header[15] = (byte) 0xdc; // -----
header[16] = 0x2f; // Source Address
header[17] = 0x76; // -----
header[18] = 0x61; // -----
header[19] = 0x72; // -----
header[20] = 0x2f; // -----
header[21] = 0x72; // -----
header[22] = 0x75; // -----
header[23] = 0x6e; // -----
header[24] = 0x2f; // -----
header[25] = 0x73; // -----
header[26] = 0x72; // -----
header[27] = 0x63; // -----
header[28] = 0x2e; // -----
header[29] = 0x73; // -----
header[30] = 0x6f; // -----
header[31] = 0x63; // -----
header[32] = 0x6b; // -----
header[33] = 0x00; // -----
header[124] = 0x2f; // Destination Address
header[125] = 0x76; // -----
header[126] = 0x61; // -----
header[127] = 0x72; // -----
header[128] = 0x2f; // -----
header[129] = 0x72; // -----
header[130] = 0x75; // -----
header[131] = 0x6e; // -----
header[132] = 0x2f; // -----
header[133] = 0x64; // -----
header[134] = 0x65; // -----
header[135] = 0x73; // -----
header[136] = 0x74; // -----
header[137] = 0x2e; // -----
header[138] = 0x73; // -----
header[139] = 0x6f; // -----
header[140] = 0x63; // -----
header[141] = 0x6b; // -----
header[142] = 0x00; // -----
// ---- Additional data (TLV) ---- \\
header[232] = 0x01; // Type
header[233] = 0x00; // Remaining bytes
header[234] = 0x01; // -----
header[235] = 0x01; // Payload
int startChannels = ch.pipeline().names().size();
ch.writeInbound(copiedBuffer(header));
Object msgObj = ch.readInbound();
assertEquals(startChannels - 1, ch.pipeline().names().size());
assertThat(msgObj).isInstanceOf(HAProxyMessage.class);
HAProxyMessage msg = (HAProxyMessage) msgObj;
assertEquals(HAProxyProtocolVersion.V2, msg.protocolVersion());
assertEquals(HAProxyCommand.PROXY, msg.command());
assertEquals(HAProxyProxiedProtocol.UNIX_STREAM, msg.proxiedProtocol());
assertEquals("/var/run/src.sock", msg.sourceAddress());
assertEquals("/var/run/dest.sock", msg.destinationAddress());
assertEquals(0, msg.sourcePort());
assertEquals(0, msg.destinationPort());
assertNull(ch.readInbound());
assertFalse(ch.finish());
assertTrue(msg.release());
}
@Test
public void testV2InvalidProtocol() {
final byte[] header = new byte[28];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x21; // v2, cmd=PROXY
header[13] = 0x41; // Bogus transport protocol
header[14] = 0x00; // Remaining Bytes
header[15] = 0x0c; // -----
header[16] = (byte) 0xc0; // Source Address
header[17] = (byte) 0xa8; // -----
header[18] = 0x00; // -----
header[19] = 0x01; // -----
header[20] = (byte) 0xc0; // Destination Address
header[21] = (byte) 0xa8; // -----
header[22] = 0x00; // -----
header[23] = 0x0b; // -----
header[24] = (byte) 0xdc; // Source Port
header[25] = 0x04; // -----
header[26] = 0x01; // Destination Port
header[27] = (byte) 0xbb; // -----
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header));
}
});
}
@Test
public void testV2MissingParams() {
final byte[] header = new byte[26];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x21; // v2, cmd=PROXY
header[13] = 0x11; // TCP over IPv4
header[14] = 0x00; // Remaining Bytes
header[15] = 0x0a; // -----
header[16] = (byte) 0xc0; // Source Address
header[17] = (byte) 0xa8; // -----
header[18] = 0x00; // -----
header[19] = 0x01; // -----
header[20] = (byte) 0xc0; // Destination Address
header[21] = (byte) 0xa8; // -----
header[22] = 0x00; // -----
header[23] = 0x0b; // -----
header[24] = (byte) 0xdc; // Source Port
header[25] = 0x04; // -----
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header));
}
});
}
@Test
public void testV2InvalidCommand() {
final byte[] header = new byte[28];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x22; // v2, Bogus command
header[13] = 0x11; // TCP over IPv4
header[14] = 0x00; // Remaining Bytes
header[15] = 0x0c; // -----
header[16] = (byte) 0xc0; // Source Address
header[17] = (byte) 0xa8; // -----
header[18] = 0x00; // -----
header[19] = 0x01; // -----
header[20] = (byte) 0xc0; // Destination Address
header[21] = (byte) 0xa8; // -----
header[22] = 0x00; // -----
header[23] = 0x0b; // -----
header[24] = (byte) 0xdc; // Source Port
header[25] = 0x04; // -----
header[26] = 0x01; // Destination Port
header[27] = (byte) 0xbb; // -----
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header));
}
});
}
@Test
public void testV2InvalidVersion() {
final byte[] header = new byte[28];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x31; // Bogus version, cmd=PROXY
header[13] = 0x11; // TCP over IPv4
header[14] = 0x00; // Remaining Bytes
header[15] = 0x0c; // -----
header[16] = (byte) 0xc0; // Source Address
header[17] = (byte) 0xa8; // -----
header[18] = 0x00; // -----
header[19] = 0x01; // -----
header[20] = (byte) 0xc0; // Destination Address
header[21] = (byte) 0xa8; // -----
header[22] = 0x00; // -----
header[23] = 0x0b; // -----
header[24] = (byte) 0xdc; // Source Port
header[25] = 0x04; // -----
header[26] = 0x01; // Destination Port
header[27] = (byte) 0xbb; // -----
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header));
}
});
}
@Test
public void testV2HeaderTooLong() {
ch = new EmbeddedChannel(new HAProxyMessageDecoder(0));
final byte[] header = new byte[248];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x21; // v2, cmd=PROXY
header[13] = 0x11; // TCP over IPv4
header[14] = 0x00; // Remaining Bytes
header[15] = (byte) 0xe8; // -----
header[16] = (byte) 0xc0; // Source Address
header[17] = (byte) 0xa8; // -----
header[18] = 0x00; // -----
header[19] = 0x01; // -----
header[20] = (byte) 0xc0; // Destination Address
header[21] = (byte) 0xa8; // -----
header[22] = 0x00; // -----
header[23] = 0x0b; // -----
header[24] = (byte) 0xdc; // Source Port
header[25] = 0x04; // -----
header[26] = 0x01; // Destination Port
header[27] = (byte) 0xbb; // -----
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(copiedBuffer(header));
}
});
}
@Test
public void testV2IncompleteHeader() {
byte[] header = new byte[13];
header[0] = 0x0D; // Binary Prefix
header[1] = 0x0A; // -----
header[2] = 0x0D; // -----
header[3] = 0x0A; // -----
header[4] = 0x00; // -----
header[5] = 0x0D; // -----
header[6] = 0x0A; // -----
header[7] = 0x51; // -----
header[8] = 0x55; // -----
header[9] = 0x49; // -----
header[10] = 0x54; // -----
header[11] = 0x0A; // -----
header[12] = 0x21; // v2, cmd=PROXY
ch.writeInbound(copiedBuffer(header));
assertNull(ch.readInbound());
assertFalse(ch.finish());
}
@Test
public void testDetectProtocol() {
final ByteBuf validHeaderV1 = copiedBuffer("PROXY TCP4 192.168.0.1 192.168.0.11 56324 443\r\n",
CharsetUtil.US_ASCII);
ProtocolDetectionResult<HAProxyProtocolVersion> result = HAProxyMessageDecoder.detectProtocol(validHeaderV1);
assertEquals(ProtocolDetectionState.DETECTED, result.state());
assertEquals(HAProxyProtocolVersion.V1, result.detectedProtocol());
validHeaderV1.release();
final ByteBuf invalidHeader = copiedBuffer("Invalid header", CharsetUtil.US_ASCII);
result = HAProxyMessageDecoder.detectProtocol(invalidHeader);
assertEquals(ProtocolDetectionState.INVALID, result.state());
assertNull(result.detectedProtocol());
invalidHeader.release();
final ByteBuf validHeaderV2 = buffer();
validHeaderV2.writeByte(0x0D);
validHeaderV2.writeByte(0x0A);
validHeaderV2.writeByte(0x0D);
validHeaderV2.writeByte(0x0A);
validHeaderV2.writeByte(0x00);
validHeaderV2.writeByte(0x0D);
validHeaderV2.writeByte(0x0A);
validHeaderV2.writeByte(0x51);
validHeaderV2.writeByte(0x55);
validHeaderV2.writeByte(0x49);
validHeaderV2.writeByte(0x54);
validHeaderV2.writeByte(0x0A);
result = HAProxyMessageDecoder.detectProtocol(validHeaderV2);
assertEquals(ProtocolDetectionState.DETECTED, result.state());
assertEquals(HAProxyProtocolVersion.V2, result.detectedProtocol());
validHeaderV2.release();
final ByteBuf incompleteHeader = buffer();
incompleteHeader.writeByte(0x0D);
incompleteHeader.writeByte(0x0A);
incompleteHeader.writeByte(0x0D);
incompleteHeader.writeByte(0x0A);
incompleteHeader.writeByte(0x00);
incompleteHeader.writeByte(0x0D);
incompleteHeader.writeByte(0x0A);
result = HAProxyMessageDecoder.detectProtocol(incompleteHeader);
assertEquals(ProtocolDetectionState.NEEDS_MORE_DATA, result.state());
assertNull(result.detectedProtocol());
incompleteHeader.release();
}
@Test
public void testNestedTLV() throws Exception {
ByteArrayOutputStream headerWriter = new ByteArrayOutputStream();
//src_ip = "AAAA", dst_ip = "BBBB", src_port = "CC", dst_port = "DD"
headerWriter.write(new byte[] {'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'C', 'C', 'D', 'D'});
//write TLVs
int countOfTLVs = 8100;
ByteBuffer tlvLengthBuf = ByteBuffer.allocate(2);
tlvLengthBuf.order(ByteOrder.BIG_ENDIAN);
short totalLength = (short) (countOfTLVs * (1 + 2 + 1 + 4));
for (int i = 0; i < countOfTLVs; i++) {
//write PP2_TYPE_SSL TLV
headerWriter.write(0x20); //PP2_TYPE_SSL
//notice that the TLV length cannot be bigger than 0xffff
totalLength -= 1 + 2; //exclude type and length themselves
tlvLengthBuf.clear();
tlvLengthBuf.putShort(totalLength);
//add to the header
headerWriter.write(tlvLengthBuf.array());
//write client field
headerWriter.write(1);
//write verify field
headerWriter.write(new byte[] {'V', 'V', 'V', 'V'});
//subtract the client and verify fields
totalLength -= 1 + 4;
}
byte[] header = headerWriter.toByteArray();
ByteBuffer numsWrite = ByteBuffer.allocate(2);
numsWrite.order(ByteOrder.BIG_ENDIAN);
numsWrite.putShort((short) header.length);
final ByteBuf data = Unpooled.buffer();
data.writeBytes(new byte[] {
(byte) 0x0D,
(byte) 0x0A,
(byte) 0x0D,
(byte) 0x0A,
(byte) 0x00,
(byte) 0x0D,
(byte) 0x0A,
(byte) 0x51,
(byte) 0x55,
(byte) 0x49,
(byte) 0x54,
(byte) 0x0A
});
//verCmd = 32
byte versionCmd = 0x20 | 1; //V2 | ProxyCmd
data.writeByte(versionCmd);
data.writeByte(17); //TPAF_TCP4_BYTE
data.writeBytes(numsWrite.array());
data.writeBytes(header);
assertThrows(HAProxyProtocolException.class, new Executable() {
@Override
public void execute() {
ch.writeInbound(data);
}
});
}
}
|
HAProxyMessageDecoderTest
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/core/src/test/java/io/quarkus/bootstrap/resolver/test/DirectDependencyOverridesManagedDependencyTestCase.java
|
{
"start": 175,
"end": 522
}
|
class ____ extends CollectDependenciesBase {
@Override
protected void setupDependencies() {
final TsArtifact x12 = new TsArtifact("x", "2");
final TsArtifact x13 = new TsArtifact("x", "3");
installAsDep(x12);
install(x13);
addManagedDep(x13);
}
}
|
DirectDependencyOverridesManagedDependencyTestCase
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/junit/jupiter/BDDSoftAssertionsExtensionIntegrationTest.java
|
{
"start": 3591,
"end": 3763
}
|
class ____ extends AbstractSoftAssertionsExample {
}
@TestInstance(PER_CLASS)
@Disabled("Executed via the JUnit Platform Test Kit")
static
|
TestInstancePerMethodExample
|
java
|
grpc__grpc-java
|
cronet/src/test/java/io/grpc/cronet/CronetClientStreamTest.java
|
{
"start": 2622,
"end": 3434
}
|
class ____ {
@Rule public final MockitoRule mocks = MockitoJUnit.rule();
@Mock private CronetClientTransport transport;
private Metadata metadata = new Metadata();
@Mock private StreamBuilderFactory factory;
@Mock private BidirectionalStream cronetStream;
@Mock private ClientStreamListener clientListener;
@Mock private BidirectionalStream.Builder builder;
private final Object lock = new Object();
private final TransportTracer transportTracer = TransportTracer.getDefaultFactory().create();
private final Executor executor = Runnable::run;
CronetClientStream clientStream;
private MethodDescriptor.Marshaller<Void> marshaller = TestMethodDescriptors.voidMarshaller();
private MethodDescriptor<?, ?> method = TestMethodDescriptors.voidMethod();
private static
|
CronetClientStreamTest
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/load/resource/transcode/TranscoderRegistry.java
|
{
"start": 363,
"end": 676
}
|
class ____ {
private final List<Entry<?, ?>> transcoders = new ArrayList<>();
/**
* Registers the given {@link com.bumptech.glide.load.resource.transcode.ResourceTranscoder} using
* the given classes so it can later be retrieved using the given classes.
*
* @param decodedClass The
|
TranscoderRegistry
|
java
|
netty__netty
|
codec-memcache/src/main/java/io/netty/handler/codec/memcache/MemcacheMessage.java
|
{
"start": 845,
"end": 1274
}
|
interface ____ extends MemcacheObject, ReferenceCounted {
/**
* Increases the reference count by {@code 1}.
*/
@Override
MemcacheMessage retain();
/**
* Increases the reference count by the specified {@code increment}.
*/
@Override
MemcacheMessage retain(int increment);
@Override
MemcacheMessage touch();
@Override
MemcacheMessage touch(Object hint);
}
|
MemcacheMessage
|
java
|
apache__camel
|
components/camel-jsonpath/src/test/java/org/apache/camel/jsonpath/JsonPathTransformONielEscapedTest.java
|
{
"start": 1142,
"end": 2027
}
|
class ____ extends CamelTestSupport {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.transform().jsonpath("$.store.book[?(@.author == 'John O\\'Niel')].title")
.to("mock:authors");
}
};
}
@Test
public void testAuthors() throws Exception {
getMockEndpoint("mock:authors").expectedMessageCount(1);
template.sendBody("direct:start", new File("src/test/resources/books.json"));
MockEndpoint.assertIsSatisfied(context);
List<?> titles = getMockEndpoint("mock:authors").getReceivedExchanges().get(0).getIn().getBody(List.class);
assertEquals("Camels in Space", titles.get(0));
}
}
|
JsonPathTransformONielEscapedTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/filter/FilterManagerTest.java
|
{
"start": 273,
"end": 1351
}
|
class ____ extends TestCase {
static {
ClassLoader current = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(null);
assertNotNull(FilterManager.getFilter("stat"));
} finally {
Thread.currentThread().setContextClassLoader(current);
}
}
public void test_instance() throws Exception {
new FilterManager();
}
public void test_loadFilter() throws Exception {
Exception error = null;
try {
FilterManager.loadFilter(new ArrayList<Filter>(), ErrorFilter.class.getName());
} catch (SQLException e) {
error = e;
}
assertNotNull(error);
}
public void test_loadFilter_2() throws Exception {
Exception error = null;
try {
FilterManager.loadFilter(new ArrayList<Filter>(), ErrorFilter.class.getName());
} catch (SQLException e) {
error = e;
}
assertNotNull(error);
}
public static
|
FilterManagerTest
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/http/client/handler/BeanResponseHandler.java
|
{
"start": 1044,
"end": 1470
}
|
class ____<T> extends AbstractResponseHandler<T> {
@Override
public HttpRestResult<T> convertResult(HttpClientResponse response, Type responseType) throws Exception {
final Header headers = response.getHeaders();
T extractBody = JacksonUtils.toObj(response.getBody(), responseType);
return new HttpRestResult<>(headers, response.getStatusCode(), extractBody, null);
}
}
|
BeanResponseHandler
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/query/KvStateRegistryTest.java
|
{
"start": 13181,
"end": 14792
}
|
class ____ extends TypeSerializer<String> {
private static final long serialVersionUID = -3744051158625555607L;
@Override
public boolean isImmutableType() {
return false;
}
@Override
public TypeSerializer<String> duplicate() {
return new DeepCopyingStringSerializer();
}
@Override
public String createInstance() {
return null;
}
@Override
public String copy(String from) {
return null;
}
@Override
public String copy(String from, String reuse) {
return null;
}
@Override
public int getLength() {
return 0;
}
@Override
public void serialize(String record, DataOutputView target) throws IOException {}
@Override
public String deserialize(DataInputView source) throws IOException {
return null;
}
@Override
public String deserialize(String reuse, DataInputView source) throws IOException {
return null;
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {}
@Override
public boolean equals(Object obj) {
return obj instanceof DeepCopyingStringSerializer;
}
@Override
public int hashCode() {
return 0;
}
@Override
public TypeSerializerSnapshot<String> snapshotConfiguration() {
return null;
}
}
}
|
DeepCopyingStringSerializer
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/RedissonListMultimap.java
|
{
"start": 14517,
"end": 18024
}
|
class ____ extends RedissonList<V> {
private final String setName;
private final K key;
public InnerList(String setName, K key) {
super(RedissonListMultimap.this.codec, RedissonListMultimap.this.commandExecutor, setName, null);
this.setName = setName;
this.key = key;
}
@Override
public RFuture<Boolean> addAsync(V value) {
return RedissonListMultimap.this.putAsync(key, value);
}
@Override
public RFuture<Boolean> addAllAsync(Collection<? extends V> c) {
return RedissonListMultimap.this.putAllAsync(key, c);
}
@Override
public RFuture<Boolean> removeAsync(Object value) {
return RedissonListMultimap.this.removeAsync(key, value);
}
@Override
public RFuture<Boolean> removeAllAsync(Collection<?> c) {
if (c.isEmpty()) {
return new CompletableFutureWrapper<>(false);
}
List<Object> args = new ArrayList<>(c.size() + 1);
args.add(encodeMapKey(key));
encode(args, c);
return commandExecutor.evalWriteAsync(RedissonListMultimap.this.getRawName(), codec, RedisCommands.EVAL_BOOLEAN,
"local v = 0 " +
"for i = 2, #ARGV, 1 do "
+ "if redis.call('lrem', KEYS[2], 0, ARGV[i]) == 1 then "
+ "v = 1; "
+ "end "
+"end "
+ "if v == 1 and redis.call('exists', KEYS[2]) == 0 then "
+ "redis.call('hdel', KEYS[1], ARGV[1]); "
+"end "
+ "return v",
Arrays.asList(RedissonListMultimap.this.getRawName(), setName),
args.toArray());
}
@Override
public RFuture<Boolean> deleteAsync() {
ByteBuf keyState = encodeMapKey(key);
return RedissonListMultimap.this.fastRemoveAsync(Arrays.asList(keyState),
Arrays.asList(RedissonListMultimap.this.getRawName(), setName), RedisCommands.EVAL_BOOLEAN_AMOUNT);
}
@Override
public RFuture<Boolean> clearExpireAsync() {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit, String param, String... keys) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
protected RFuture<Boolean> expireAtAsync(long timestamp, String param, String... keys) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Long> remainTimeToLiveAsync() {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Void> renameAsync(String newName) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Boolean> renamenxAsync(String newName) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
}
}
|
InnerList
|
java
|
quarkusio__quarkus
|
integration-tests/quartz-deferred-datasource/src/main/java/io/quarkus/it/quartz/ManualScheduledCounter.java
|
{
"start": 2084,
"end": 2592
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 7523966565034938905L;
public Date date;
public CounterFireTime(Date date) {
this.date = date;
}
public CounterFireTime() {
this.date = Date.from(Instant.now());
}
}
@RegisterForReflection(serialization = true, targets = { CounterFireTime.class, Number.class, Date.class, Long.class,
Integer.class })
public static
|
CounterFireTime
|
java
|
quarkusio__quarkus
|
extensions/funqy/funqy-server-common/runtime/src/main/java/io/quarkus/funqy/runtime/query/QuerySetReader.java
|
{
"start": 187,
"end": 439
}
|
class ____ extends BaseCollectionReader {
public QuerySetReader(Type genericType, QueryObjectMapper mapper) {
super(genericType, mapper);
}
@Override
public Object create() {
return new HashSet<>();
}
}
|
QuerySetReader
|
java
|
elastic__elasticsearch
|
build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java
|
{
"start": 646,
"end": 2478
}
|
class ____ {
/**
* Check that the task does not update git tags if the current version is a snapshot of the first patch release.
*/
@Test
public void needsGitTags_withFirstSnapshot_returnsFalse() {
assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.0-SNAPSHOT"), is(false));
}
/**
* Check that the task does update git tags if the current version is a snapshot after the first patch release.
*/
@Test
public void needsGitTags_withLaterSnapshot_returnsTrue() {
assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.1-SNAPSHOT"), is(true));
}
/**
* Check that the task does not update git tags if the current version is the first patch release in a minor series.
*/
@Test
public void needsGitTags_withFirstPatchRelease_returnsFalse() {
assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.0"), is(false));
}
/**
* Check that the task does update git tags if the current version is later than the first patch release in a minor series.
*/
@Test
public void needsGitTags_withLaterPatchRelease_returnsTrue() {
assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.1"), is(true));
}
/**
* Check that the task does not update git tags if the current version is a first alpha prerelease.
*/
@Test
public void needsGitTags_withFirsAlphaRelease_returnsFalse() {
assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.0-alpha1"), is(false));
}
/**
* Check that the task does update git tags if the current version is a prerelease after the first alpha.
*/
@Test
public void needsGitTags_withLaterAlphaRelease_returnsFalse() {
assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.0-alpha2"), is(true));
}
}
|
GenerateReleaseNotesTaskTest
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-multiple/src/test/java/org/apache/dubbo/registry/multiple/MultipleRegistryTestUtil.java
|
{
"start": 2056,
"end": 4324
}
|
class ____ {
public static ZookeeperRegistry getZookeeperRegistry(Collection<Registry> registryCollection) {
for (Registry registry : registryCollection) {
if (registry instanceof ListenerRegistryWrapper) {
registry = ((ListenerRegistryWrapper) registry).getRegistry();
}
if (registry instanceof ZookeeperRegistry) {
return (ZookeeperRegistry) registry;
}
}
return null;
}
/**
* copy from @org.apache.dubbo.registry.integration.RegistryDirectory#notify(java.util.List)
*
* @param urls
* @return
*/
public static List<URL> getProviderURLsFromNotifyURLS(List<URL> urls) {
Map<String, List<URL>> categoryUrls = urls.stream()
.filter(Objects::nonNull)
.filter(MultipleRegistryTestUtil::isValidCategory)
.filter(MultipleRegistryTestUtil::isNotCompatibleFor26x)
.collect(Collectors.groupingBy(url -> {
if (UrlUtils.isConfigurator(url)) {
return CONFIGURATORS_CATEGORY;
} else if (UrlUtils.isRoute(url)) {
return ROUTERS_CATEGORY;
} else if (UrlUtils.isProvider(url)) {
return PROVIDERS_CATEGORY;
}
return "";
}));
// providers
List<URL> providerURLs = categoryUrls.getOrDefault(PROVIDERS_CATEGORY, Collections.emptyList());
return providerURLs;
}
private static boolean isValidCategory(URL url) {
String category = url.getCategory(DEFAULT_CATEGORY);
if ((ROUTERS_CATEGORY.equals(category) || ROUTE_PROTOCOL.equals(url.getProtocol()))
|| PROVIDERS_CATEGORY.equals(category)
|| CONFIGURATORS_CATEGORY.equals(category)
|| DYNAMIC_CONFIGURATORS_CATEGORY.equals(category)
|| APP_DYNAMIC_CONFIGURATORS_CATEGORY.equals(category)) {
return true;
}
return false;
}
private static boolean isNotCompatibleFor26x(URL url) {
return StringUtils.isEmpty(url.getParameter(COMPATIBLE_CONFIG_KEY));
}
}
|
MultipleRegistryTestUtil
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/state/ttl/TtlMapStatePerElementTestContext.java
|
{
"start": 918,
"end": 2006
}
|
class ____ extends TtlMapStateTestContext<String, String> {
private static final int TEST_KEY = 1;
private static final String TEST_VAL1 = "test value1";
private static final String TEST_VAL2 = "test value2";
private static final String TEST_VAL3 = "test value3";
@Override
void initTestValues() {
updateEmpty = TEST_VAL1;
updateUnexpired = TEST_VAL2;
updateExpired = TEST_VAL3;
getUpdateEmpty = TEST_VAL1;
getUnexpired = TEST_VAL2;
getUpdateExpired = TEST_VAL3;
}
@Override
public void update(String value) throws Exception {
ttlState.put(TEST_KEY, value);
}
@Override
public String get() throws Exception {
String value = ttlState.get(TEST_KEY);
assert (getOriginal() == null && !ttlState.contains(TEST_KEY))
|| (getOriginal() != null && ttlState.contains(TEST_KEY));
return value;
}
@Override
public Object getOriginal() throws Exception {
return ttlState.original.get(TEST_KEY);
}
}
|
TtlMapStatePerElementTestContext
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/io/disk/iomanager/BufferFileChannelReader.java
|
{
"start": 1156,
"end": 2752
}
|
class ____ {
private final ByteBuffer header = ByteBuffer.allocateDirect(8);
private final FileChannel fileChannel;
BufferFileChannelReader(FileChannel fileChannel) {
this.fileChannel = fileChannel;
}
/**
* Reads data from the object's file channel into the given buffer.
*
* @param buffer the buffer to read into
* @return whether the end of the file has been reached (<tt>true</tt>) or not (<tt>false</tt>)
*/
public boolean readBufferFromFileChannel(Buffer buffer) throws IOException {
checkArgument(fileChannel.size() - fileChannel.position() > 0);
// Read header
header.clear();
fileChannel.read(header);
header.flip();
final boolean isBuffer = header.getInt() == 1;
final int size = header.getInt();
if (size > buffer.getMaxCapacity()) {
throw new IllegalStateException(
"Buffer is too small for data: "
+ buffer.getMaxCapacity()
+ " bytes available, but "
+ size
+ " needed. This is most likely due to an serialized event, which is larger than the buffer size.");
}
checkArgument(buffer.getSize() == 0, "Buffer not empty");
fileChannel.read(buffer.getNioBuffer(0, size));
buffer.setSize(size);
buffer.setDataType(isBuffer ? Buffer.DataType.DATA_BUFFER : Buffer.DataType.EVENT_BUFFER);
return fileChannel.size() - fileChannel.position() == 0;
}
}
|
BufferFileChannelReader
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/inheritance/joined/Company.java
|
{
"start": 423,
"end": 885
}
|
class ____ extends Customer {
private String companyName;
private String companyAddress;
@Column
public String getCompanyName() {
return companyName;
}
public void setCompanyName(String companyName) {
this.companyName = companyName;
}
@Column(table = "CompanyAddress")
public String getCompanyAddress() {
return companyAddress;
}
public void setCompanyAddress(String companyAddress) {
this.companyAddress = companyAddress;
}
}
|
Company
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/LambdaRouteBuilderTest.java
|
{
"start": 1140,
"end": 3004
}
|
class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testLambda() throws Exception {
assertEquals(0, context.getRoutesSize());
LambdaRouteBuilder builder = rb -> rb.from("direct:start").to("mock:result");
context.addRoutes(new RouteBuilder(context) {
@Override
public void configure() throws Exception {
builder.accept(this);
}
});
context.start();
assertEquals(1, context.getRoutesSize());
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testLambdaTwo() throws Exception {
assertEquals(0, context.getRoutesSize());
RouteBuilder.addRoutes(context, rb -> rb.from("direct:start").to("mock:result"));
context.start();
assertEquals(1, context.getRoutesSize());
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testLambdaSimple() throws Exception {
assertEquals(0, context.getRoutesSize());
RouteBuilder.addRoutes(context, rb -> rb.from("direct:start").transform(rb.simple("Hello ${body}")).to("mock:result"));
context.start();
assertEquals(1, context.getRoutesSize());
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "World");
assertMockEndpointsSatisfied();
}
}
|
LambdaRouteBuilderTest
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/dynamicfiltering/DynamicFilteringDataCollectorOperatorCoordinator.java
|
{
"start": 2220,
"end": 7892
}
|
class ____
implements OperatorCoordinator, CoordinationRequestHandler {
private static final Logger LOG =
LoggerFactory.getLogger(DynamicFilteringDataCollectorOperatorCoordinator.class);
private final CoordinatorStore coordinatorStore;
private final List<String> dynamicFilteringDataListenerIDs;
private DynamicFilteringData receivedFilteringData;
public DynamicFilteringDataCollectorOperatorCoordinator(
Context context, List<String> dynamicFilteringDataListenerIDs) {
this.coordinatorStore = checkNotNull(context.getCoordinatorStore());
this.dynamicFilteringDataListenerIDs = checkNotNull(dynamicFilteringDataListenerIDs);
}
@Override
public void start() throws Exception {}
@Override
public void close() throws Exception {}
@Override
public void handleEventFromOperator(int subtask, int attemptNumber, OperatorEvent event) {
DynamicFilteringData currentData =
((DynamicFilteringEvent) ((SourceEventWrapper) event).getSourceEvent()).getData();
if (receivedFilteringData == null) {
receivedFilteringData = currentData;
} else {
// Since there might be speculative execution or failover, we may receive multiple
// notifications, and we can't tell for sure which one is valid for further processing.
if (DynamicFilteringData.isEqual(receivedFilteringData, currentData)) {
// If the notifications contain exactly the same data, everything is alright, and
// we don't need to send the event again.
return;
} else {
// In case the mismatching of the source filtering result and the dim data, which
// may leads to incorrect result, trigger global failover for fully recomputing.
throw new IllegalStateException(
"DynamicFilteringData is recomputed but not equal. "
+ "Triggering global failover in case the result is incorrect. "
+ " It's recommended to re-run the job with dynamic filtering disabled.");
}
}
for (String listenerID : dynamicFilteringDataListenerIDs) {
coordinatorStore.compute(
listenerID,
(key, oldValue) -> {
// The value for a listener ID can be a source coordinator listening to an
// event, or an event waiting to be retrieved
if (oldValue == null || oldValue instanceof OperatorEvent) {
// If the listener has not been registered, or after a global failover
// without cleanup the store, we simply update it to the latest value.
// The listener coordinator would retrieve the event once it's started.
LOG.info(
"Updating event {} before the source coordinator with ID {} is registered",
event,
listenerID);
return event;
} else {
checkState(
oldValue instanceof OperatorCoordinator,
"The existing value for "
+ listenerID
+ "is expected to be an operator coordinator, but it is in fact "
+ oldValue);
LOG.info(
"Distributing event {} to source coordinator with ID {}",
event,
listenerID);
try {
// Subtask index and attempt number is not necessary for handling
// DynamicFilteringEvent.
((OperatorCoordinator) oldValue)
.handleEventFromOperator(0, 0, event);
} catch (Exception e) {
ExceptionUtils.rethrow(e);
}
// Dynamic filtering event is expected to be sent only once. So after
// the coordinator is notified, it can be removed from the store.
return null;
}
});
}
}
@Override
public CompletableFuture<CoordinationResponse> handleCoordinationRequest(
CoordinationRequest request) {
throw new UnsupportedOperationException();
}
@Override
public void subtaskReset(int subtask, long checkpointId) {}
@Override
public void executionAttemptFailed(
int subtask, int attemptNumber, @Nullable Throwable reason) {}
@Override
public void executionAttemptReady(int subtask, int attemptNumber, SubtaskGateway gateway) {}
@Override
public void checkpointCoordinator(long checkpointId, CompletableFuture<byte[]> result)
throws Exception {}
@Override
public void notifyCheckpointComplete(long checkpointId) {}
@Override
public void resetToCheckpoint(long checkpointId, @Nullable byte[] checkpointData)
throws Exception {}
/** Provider for {@link DynamicFilteringDataCollectorOperatorCoordinator}. */
public static
|
DynamicFilteringDataCollectorOperatorCoordinator
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/comparable/AbstractUniversalComparableAssert_usingComparator_Test.java
|
{
"start": 946,
"end": 1592
}
|
class ____ extends AbstractUniversalComparableAssertBaseTest {
private final Comparator<Comparable<String>> comparator = alwaysEqual();
@Override
protected UniversalComparableAssert<String> invoke_api_method() {
// in that, we don't care of the comparator, the point to check is that we switch correctly of comparator
return assertions.usingComparator(comparator);
}
@Override
protected void verify_internal_effects() {
then(getObjects(assertions).getComparator()).isSameAs(comparator);
then(getComparables(assertions).getComparator()).isSameAs(comparator);
}
}
|
AbstractUniversalComparableAssert_usingComparator_Test
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsMessageHeaderContentBasedRouterTest.java
|
{
"start": 1571,
"end": 3314
}
|
class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Test
public void testCBR() throws Exception {
getMockEndpoint("mock:a").expectedMessageCount(0);
getMockEndpoint("mock:b").expectedMessageCount(1);
template.sendBody("activemq:queue:JmsInOutRoutingSlipTest.start", "Hello World");
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected String getComponentName() {
return "activemq";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
Predicate isA = header("route").isEqualTo("a");
Predicate isB = header("route").isEqualTo("b");
from("activemq:queue:JmsInOutRoutingSlipTest.start")
.bean(MyPreProcessorBean.class, "determineRouting")
.choice()
.when(isA).to("mock:a")
.when(isB).to("mock:b")
.end();
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
public static
|
JmsMessageHeaderContentBasedRouterTest
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/spi/ExtendedLogger.java
|
{
"start": 12095,
"end": 12575
}
|
class ____
* method when location information needs to be logged.
* @param level The logging Level to check.
* @param marker A Marker or null.
* @param message The message.
* @param t the exception to log, including its stack trace.
*/
void logIfEnabled(String fqcn, Level level, Marker marker, String message, Throwable t);
/**
* Logs a message if the specified level is active.
*
* @param fqcn The fully qualified
|
and
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/snapshots/Snapshot.java
|
{
"start": 996,
"end": 4081
}
|
class ____ implements Writeable {
private static final TransportVersion PROJECT_ID_IN_SNAPSHOT = TransportVersion.fromName("project_id_in_snapshot");
private final ProjectId projectId;
private final String repository;
private final SnapshotId snapshotId;
private final int hashCode;
/**
* Constructs a snapshot.
*/
@FixForMultiProject
@Deprecated(forRemoval = true)
public Snapshot(final String repository, final SnapshotId snapshotId) {
this(ProjectId.DEFAULT, repository, snapshotId);
}
/**
* Constructs a snapshot.
*/
public Snapshot(ProjectId projectId, final String repository, final SnapshotId snapshotId) {
this.projectId = projectId;
this.repository = Objects.requireNonNull(repository);
this.snapshotId = Objects.requireNonNull(snapshotId);
this.hashCode = computeHashCode();
}
/**
* Constructs a snapshot from the stream input.
*/
public Snapshot(final StreamInput in) throws IOException {
if (in.getTransportVersion().supports(PROJECT_ID_IN_SNAPSHOT) == false) {
projectId = ProjectId.DEFAULT;
} else {
projectId = ProjectId.readFrom(in);
}
repository = in.readString();
snapshotId = new SnapshotId(in);
hashCode = computeHashCode();
}
public ProjectId getProjectId() {
return projectId;
}
/**
* Gets the repository name for the snapshot.
*/
public String getRepository() {
return repository;
}
/**
* Gets the snapshot id for the snapshot.
*/
public SnapshotId getSnapshotId() {
return snapshotId;
}
@Override
public String toString() {
return projectId + ":" + repository + ":" + snapshotId.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Snapshot that = (Snapshot) o;
return projectId.equals(that.projectId) && repository.equals(that.repository) && snapshotId.equals(that.snapshotId);
}
@Override
public int hashCode() {
return hashCode;
}
private int computeHashCode() {
return Objects.hash(projectId, repository, snapshotId);
}
@Override
public void writeTo(final StreamOutput out) throws IOException {
if (out.getTransportVersion().supports(PROJECT_ID_IN_SNAPSHOT) == false) {
if (ProjectId.DEFAULT.equals(projectId) == false) {
final var message = "Cannot write instance with non-default project id "
+ projectId
+ " to version before "
+ PROJECT_ID_IN_SNAPSHOT;
assert false : message;
throw new IllegalArgumentException(message);
}
} else {
projectId.writeTo(out);
}
out.writeString(repository);
snapshotId.writeTo(out);
}
}
|
Snapshot
|
java
|
spring-projects__spring-boot
|
documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/web/reactive/webflux/MyRestController.java
|
{
"start": 1118,
"end": 1916
}
|
class ____ {
private final UserRepository userRepository;
private final CustomerRepository customerRepository;
public MyRestController(UserRepository userRepository, CustomerRepository customerRepository) {
this.userRepository = userRepository;
this.customerRepository = customerRepository;
}
@GetMapping("/{userId}")
public Mono<User> getUser(@PathVariable Long userId) {
return this.userRepository.findById(userId);
}
@GetMapping("/{userId}/customers")
public Flux<Customer> getUserCustomers(@PathVariable Long userId) {
return this.userRepository.findById(userId).flatMapMany(this.customerRepository::findByUser);
}
@DeleteMapping("/{userId}")
public Mono<Void> deleteUser(@PathVariable Long userId) {
return this.userRepository.deleteById(userId);
}
}
|
MyRestController
|
java
|
spring-projects__spring-security
|
webauthn/src/main/java/org/springframework/security/web/webauthn/management/ImmutableRelyingPartyRegistrationRequest.java
|
{
"start": 1076,
"end": 2117
}
|
class ____ implements RelyingPartyRegistrationRequest {
private final PublicKeyCredentialCreationOptions options;
private final RelyingPartyPublicKey publicKey;
/**
* Creates a new instance.
* @param options the {@link PublicKeyCredentialCreationOptions} that were saved when
* {@link WebAuthnRelyingPartyOperations#createCredentialRequestOptions(PublicKeyCredentialRequestOptionsRequest)}
* was called.
* @param publicKey this is submitted by the client and if validated stored.
*/
public ImmutableRelyingPartyRegistrationRequest(PublicKeyCredentialCreationOptions options,
@Nullable RelyingPartyPublicKey publicKey) {
Assert.notNull(options, "options cannot be null");
Assert.notNull(publicKey, "publicKey cannot be null");
this.options = options;
this.publicKey = publicKey;
}
@Override
public PublicKeyCredentialCreationOptions getCreationOptions() {
return this.options;
}
@Override
public RelyingPartyPublicKey getPublicKey() {
return this.publicKey;
}
}
|
ImmutableRelyingPartyRegistrationRequest
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ModuleValidationTest.java
|
{
"start": 10441,
"end": 11010
}
|
class ____ {}");
CompilerTests.daggerCompiler(module)
.compile(
subject -> {
subject.hasErrorCount(1);
switch (CompilerTests.backend(subject)) {
case JAVAC:
subject.hasErrorContaining("int is not a valid subcomponent type")
.onSource(module)
.onLine(5);
break;
case KSP:
// TODO(b/245954367): Remove this pathway once this bug is fixed.
// KSP interprets the int.
|
TestModule
|
java
|
apache__rocketmq
|
proxy/src/test/java/org/apache/rocketmq/proxy/grpc/v2/consumer/AckMessageActivityTest.java
|
{
"start": 2151,
"end": 12819
}
|
class ____ extends BaseActivityTest {
private AckMessageActivity ackMessageActivity;
private static final String TOPIC = "topic";
private static final String GROUP = "group";
@Before
public void before() throws Throwable {
super.before();
this.ackMessageActivity = new AckMessageActivity(messagingProcessor, grpcClientSettingsManager, grpcChannelManager);
}
@Test
public void testAckMessage() throws Throwable {
ConfigurationManager.getProxyConfig().setEnableBatchAck(false);
String msg1 = "msg1";
String msg2 = "msg2";
String msg3 = "msg3";
when(this.messagingProcessor.ackMessage(any(), any(), eq(msg1), anyString(), anyString()))
.thenThrow(new ProxyException(ProxyExceptionCode.INVALID_RECEIPT_HANDLE, "receipt handle is expired"));
AckResult msg2AckResult = new AckResult();
msg2AckResult.setStatus(AckStatus.OK);
when(this.messagingProcessor.ackMessage(any(), any(), eq(msg2), anyString(), anyString()))
.thenReturn(CompletableFuture.completedFuture(msg2AckResult));
AckResult msg3AckResult = new AckResult();
msg3AckResult.setStatus(AckStatus.NO_EXIST);
when(this.messagingProcessor.ackMessage(any(), any(), eq(msg3), anyString(), anyString()))
.thenReturn(CompletableFuture.completedFuture(msg3AckResult));
{
AckMessageResponse response = this.ackMessageActivity.ackMessage(
createContext(),
AckMessageRequest.newBuilder()
.setTopic(Resource.newBuilder().setName(TOPIC).build())
.setGroup(Resource.newBuilder().setName(GROUP).build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(msg1)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis() - 10000, 1000))
.build())
.build()
).get();
assertEquals(Code.INVALID_RECEIPT_HANDLE, response.getStatus().getCode());
}
{
AckMessageResponse response = this.ackMessageActivity.ackMessage(
createContext(),
AckMessageRequest.newBuilder()
.setTopic(Resource.newBuilder().setName(TOPIC).build())
.setGroup(Resource.newBuilder().setName(GROUP).build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(msg2)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis() - 10000, 1000))
.build())
.build()
).get();
assertEquals(Code.OK, response.getStatus().getCode());
}
{
AckMessageResponse response = this.ackMessageActivity.ackMessage(
createContext(),
AckMessageRequest.newBuilder()
.setTopic(Resource.newBuilder().setName(TOPIC).build())
.setGroup(Resource.newBuilder().setName(GROUP).build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(msg3)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis() - 10000, 1000))
.build())
.build()
).get();
assertEquals(Code.INTERNAL_SERVER_ERROR, response.getStatus().getCode());
}
{
AckMessageResponse response = this.ackMessageActivity.ackMessage(
createContext(),
AckMessageRequest.newBuilder()
.setTopic(Resource.newBuilder().setName(TOPIC).build())
.setGroup(Resource.newBuilder().setName(GROUP).build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(msg1)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis() - 10000, 1000))
.build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(msg2)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis(), 3000))
.build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(msg3)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis(), 3000))
.build())
.build()
).get();
assertEquals(Code.MULTIPLE_RESULTS, response.getStatus().getCode());
assertEquals(3, response.getEntriesCount());
assertEquals(Code.INVALID_RECEIPT_HANDLE, response.getEntries(0).getStatus().getCode());
assertEquals(Code.OK, response.getEntries(1).getStatus().getCode());
assertEquals(Code.INTERNAL_SERVER_ERROR, response.getEntries(2).getStatus().getCode());
}
}
@Test
public void testAckMessageInBatch() throws Throwable {
ConfigurationManager.getProxyConfig().setEnableBatchAck(true);
String successMessageId = "msg1";
String notOkMessageId = "msg2";
String exceptionMessageId = "msg3";
doAnswer((Answer<CompletableFuture<List<BatchAckResult>>>) invocation -> {
List<ReceiptHandleMessage> receiptHandleMessageList = invocation.getArgument(1, List.class);
List<BatchAckResult> batchAckResultList = new ArrayList<>();
for (ReceiptHandleMessage receiptHandleMessage : receiptHandleMessageList) {
BatchAckResult batchAckResult;
if (receiptHandleMessage.getMessageId().equals(successMessageId)) {
AckResult ackResult = new AckResult();
ackResult.setStatus(AckStatus.OK);
batchAckResult = new BatchAckResult(receiptHandleMessage, ackResult);
} else if (receiptHandleMessage.getMessageId().equals(notOkMessageId)) {
AckResult ackResult = new AckResult();
ackResult.setStatus(AckStatus.NO_EXIST);
batchAckResult = new BatchAckResult(receiptHandleMessage, ackResult);
} else {
batchAckResult = new BatchAckResult(receiptHandleMessage, new ProxyException(ProxyExceptionCode.INVALID_RECEIPT_HANDLE, ""));
}
batchAckResultList.add(batchAckResult);
}
return CompletableFuture.completedFuture(batchAckResultList);
}).when(this.messagingProcessor).batchAckMessage(any(), anyList(), anyString(), anyString());
{
AckMessageResponse response = this.ackMessageActivity.ackMessage(
createContext(),
AckMessageRequest.newBuilder()
.setTopic(Resource.newBuilder().setName(TOPIC).build())
.setGroup(Resource.newBuilder().setName(GROUP).build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(successMessageId)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis(), 3000))
.build())
.build()
).get();
assertEquals(Code.OK, response.getStatus().getCode());
}
{
AckMessageResponse response = this.ackMessageActivity.ackMessage(
createContext(),
AckMessageRequest.newBuilder()
.setTopic(Resource.newBuilder().setName(TOPIC).build())
.setGroup(Resource.newBuilder().setName(GROUP).build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(notOkMessageId)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis(), 3000))
.build())
.build()
).get();
assertEquals(Code.INTERNAL_SERVER_ERROR, response.getStatus().getCode());
}
{
AckMessageResponse response = this.ackMessageActivity.ackMessage(
createContext(),
AckMessageRequest.newBuilder()
.setTopic(Resource.newBuilder().setName(TOPIC).build())
.setGroup(Resource.newBuilder().setName(GROUP).build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(exceptionMessageId)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis(), 3000))
.build())
.build()
).get();
assertEquals(Code.INVALID_RECEIPT_HANDLE, response.getStatus().getCode());
}
{
AckMessageResponse response = this.ackMessageActivity.ackMessage(
createContext(),
AckMessageRequest.newBuilder()
.setTopic(Resource.newBuilder().setName(TOPIC).build())
.setGroup(Resource.newBuilder().setName(GROUP).build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(successMessageId)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis(), 3000))
.build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(notOkMessageId)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis(), 3000))
.build())
.addEntries(AckMessageEntry.newBuilder()
.setMessageId(exceptionMessageId)
.setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis(), 3000))
.build())
.build()
).get();
assertEquals(Code.MULTIPLE_RESULTS, response.getStatus().getCode());
assertEquals(3, response.getEntriesCount());
Map<String, Code> msgCode = new HashMap<>();
for (AckMessageResultEntry entry : response.getEntriesList()) {
msgCode.put(entry.getMessageId(), entry.getStatus().getCode());
}
assertEquals(Code.OK, msgCode.get(successMessageId));
assertEquals(Code.INTERNAL_SERVER_ERROR, msgCode.get(notOkMessageId));
assertEquals(Code.INVALID_RECEIPT_HANDLE, msgCode.get(exceptionMessageId));
}
}
}
|
AckMessageActivityTest
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/SystemUtils.java
|
{
"start": 48357,
"end": 48848
}
|
class ____ loaded.
* </p>
*
* @since 3.15.0
*/
public static final boolean IS_OS_ANDROID = Strings.CS.contains(SystemProperties.getJavaVendor(), "Android");
/**
* The constant {@code true} if this is HP-UX.
* <p>
* The result depends on the value of the {@link #OS_NAME} constant.
* </p>
* <p>
* The field will return {@code false} if {@link #OS_NAME} is {@code null}.
* </p>
* <p>
* This value is initialized when the
|
is
|
java
|
quarkusio__quarkus
|
integration-tests/hibernate-search-standalone-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/standalone/elasticsearch/management/MyDatastore.java
|
{
"start": 280,
"end": 662
}
|
class ____ {
private Map<UUID, ManagementTestEntity> content = new ConcurrentHashMap<>();
public void clear() {
content.clear();
}
public void put(ManagementTestEntity entity) {
content.put(entity.getId(), entity);
}
public Map<UUID, ManagementTestEntity> getContent() {
return Collections.unmodifiableMap(content);
}
}
|
MyDatastore
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/sql/exec/onetoone/OneToOneWithDerivedIdentityTest.java
|
{
"start": 1387,
"end": 1960
}
|
class ____ {
@Id
private Integer id;
@Basic
private String name;
@OneToOne(mappedBy = "id")
private PersonInfo personInfo;
public Integer getId() {
return this.id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public PersonInfo getPersonInfo() {
return this.personInfo;
}
public void setPersonInfo(PersonInfo personInfo) {
this.personInfo = personInfo;
}
}
@Entity(name = "PersonInfo")
public static
|
Person
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
|
{
"start": 86678,
"end": 87381
}
|
class ____ implements
SingleArcTransition<ContainerImpl, ContainerEvent> {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
// Pause the process/process-grp if it is supported by the container
container.dispatcher.getEventHandler().handle(
new ContainersLauncherEvent(container,
ContainersLauncherEventType.PAUSE_CONTAINER));
ContainerPauseEvent pauseEvent = (ContainerPauseEvent) event;
container.addDiagnostics(pauseEvent.getDiagnostic() + "\n");
}
}
/**
* Transitions upon receiving PAUSED_CONTAINER.
*/
@SuppressWarnings("unchecked") // dispatcher not typed
static
|
PauseContainerTransition
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/jdk/MapKeySerializationTest.java
|
{
"start": 2662,
"end": 2988
}
|
class ____ extends ValueSerializer<Object>
{
private String _null;
public NullKeySerializer(String s) { _null = s; }
@Override
public void serialize(Object value, JsonGenerator gen, SerializationContext provider) {
gen.writeName(_null);
}
}
static
|
NullKeySerializer
|
java
|
grpc__grpc-java
|
services/src/test/java/io/grpc/protobuf/services/HealthStatusManagerTest.java
|
{
"start": 13506,
"end": 13915
}
|
class ____ implements StreamObserver<HealthCheckResponse> {
final ArrayDeque<Object> responses = new ArrayDeque<>();
@Override
public void onNext(HealthCheckResponse value) {
responses.add(value);
}
@Override
public void onError(Throwable t) {
responses.add(t);
}
@Override
public void onCompleted() {
responses.add("onCompleted");
}
}
}
|
RespObserver
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/annotations/CollectionIdMutability.java
|
{
"start": 729,
"end": 889
}
|
interface ____ {
/**
* The MutabilityPlan implementation
*
* @see Mutability#value
*/
Class<? extends MutabilityPlan<?>> value();
}
|
CollectionIdMutability
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/fastjson/deserializer/issues3796/bean/ObjectJ_A.java
|
{
"start": 95,
"end": 1578
}
|
class ____ {
private long a;
private int b;
private int c;
private int d;
private int e;
private int f;
private int g;
private int h;
private int i;
private List<ObjectJ_B> j;
private List<ObjectC> k;
private List<ObjectJ_C> l;
private List<CommonObject> m;
public long getA() {
return a;
}
public void setA(long a) {
this.a = a;
}
public int getB() {
return b;
}
public void setB(int b) {
this.b = b;
}
public int getC() {
return c;
}
public void setC(int c) {
this.c = c;
}
public int getD() {
return d;
}
public void setD(int d) {
this.d = d;
}
public int getE() {
return e;
}
public void setE(int e) {
this.e = e;
}
public int getF() {
return f;
}
public void setF(int f) {
this.f = f;
}
public int getG() {
return g;
}
public void setG(int g) {
this.g = g;
}
public int getH() {
return h;
}
public void setH(int h) {
this.h = h;
}
public int getI() {
return i;
}
public void setI(int i) {
this.i = i;
}
public List<ObjectJ_B> getJ() {
return j;
}
public void setJ(List<ObjectJ_B> j) {
this.j = j;
}
public List<ObjectC> getK() {
return k;
}
public void setK(List<ObjectC> k) {
this.k = k;
}
public List<ObjectJ_C> getL() {
return l;
}
public void setL(List<ObjectJ_C> l) {
this.l = l;
}
public List<CommonObject> getM() {
return m;
}
public void setM(List<CommonObject> m) {
this.m = m;
}
}
|
ObjectJ_A
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/util/HalfSerializerSubscriberTest.java
|
{
"start": 1096,
"end": 7869
}
|
class ____ extends RxJavaTest {
@Test
public void utilityClass() {
TestHelper.checkUtilityClass(HalfSerializer.class);
}
@Test
@SuppressWarnings({ "rawtypes", "unchecked" })
public void reentrantOnNextOnNext() {
final AtomicInteger wip = new AtomicInteger();
final AtomicThrowable error = new AtomicThrowable();
final Subscriber[] a = { null };
final TestSubscriber ts = new TestSubscriber();
FlowableSubscriber s = new FlowableSubscriber() {
@Override
public void onSubscribe(Subscription s) {
ts.onSubscribe(s);
}
@Override
public void onNext(Object t) {
if (t.equals(1)) {
HalfSerializer.onNext(a[0], 2, wip, error);
}
ts.onNext(t);
}
@Override
public void onError(Throwable t) {
ts.onError(t);
}
@Override
public void onComplete() {
ts.onComplete();
}
};
a[0] = s;
s.onSubscribe(new BooleanSubscription());
HalfSerializer.onNext(s, 1, wip, error);
ts.assertValue(1).assertNoErrors().assertNotComplete();
}
@Test
@SuppressWarnings({ "rawtypes", "unchecked" })
public void reentrantOnNextOnError() {
final AtomicInteger wip = new AtomicInteger();
final AtomicThrowable error = new AtomicThrowable();
final Subscriber[] a = { null };
final TestSubscriber ts = new TestSubscriber();
FlowableSubscriber s = new FlowableSubscriber() {
@Override
public void onSubscribe(Subscription s) {
ts.onSubscribe(s);
}
@Override
public void onNext(Object t) {
if (t.equals(1)) {
HalfSerializer.onError(a[0], new TestException(), wip, error);
}
ts.onNext(t);
}
@Override
public void onError(Throwable t) {
ts.onError(t);
}
@Override
public void onComplete() {
ts.onComplete();
}
};
a[0] = s;
s.onSubscribe(new BooleanSubscription());
HalfSerializer.onNext(s, 1, wip, error);
ts.assertFailure(TestException.class, 1);
}
@Test
@SuppressWarnings({ "rawtypes", "unchecked" })
public void reentrantOnNextOnComplete() {
final AtomicInteger wip = new AtomicInteger();
final AtomicThrowable error = new AtomicThrowable();
final Subscriber[] a = { null };
final TestSubscriber ts = new TestSubscriber();
FlowableSubscriber s = new FlowableSubscriber() {
@Override
public void onSubscribe(Subscription s) {
ts.onSubscribe(s);
}
@Override
public void onNext(Object t) {
if (t.equals(1)) {
HalfSerializer.onComplete(a[0], wip, error);
}
ts.onNext(t);
}
@Override
public void onError(Throwable t) {
ts.onError(t);
}
@Override
public void onComplete() {
ts.onComplete();
}
};
a[0] = s;
s.onSubscribe(new BooleanSubscription());
HalfSerializer.onNext(s, 1, wip, error);
ts.assertResult(1);
}
@Test
@SuppressUndeliverable
@SuppressWarnings({ "rawtypes", "unchecked" })
public void reentrantErrorOnError() {
final AtomicInteger wip = new AtomicInteger();
final AtomicThrowable error = new AtomicThrowable();
final Subscriber[] a = { null };
final TestSubscriber ts = new TestSubscriber();
FlowableSubscriber s = new FlowableSubscriber() {
@Override
public void onSubscribe(Subscription s) {
ts.onSubscribe(s);
}
@Override
public void onNext(Object t) {
ts.onNext(t);
}
@Override
public void onError(Throwable t) {
ts.onError(t);
HalfSerializer.onError(a[0], new IOException(), wip, error);
}
@Override
public void onComplete() {
ts.onComplete();
}
};
a[0] = s;
s.onSubscribe(new BooleanSubscription());
HalfSerializer.onError(s, new TestException(), wip, error);
ts.assertFailure(TestException.class);
}
@Test
public void onNextOnCompleteRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final AtomicInteger wip = new AtomicInteger();
final AtomicThrowable error = new AtomicThrowable();
final TestSubscriber<Integer> ts = new TestSubscriber<>();
ts.onSubscribe(new BooleanSubscription());
Runnable r1 = new Runnable() {
@Override
public void run() {
HalfSerializer.onNext(ts, 1, wip, error);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
HalfSerializer.onComplete(ts, wip, error);
}
};
TestHelper.race(r1, r2);
ts.assertComplete().assertNoErrors();
assertTrue(ts.values().size() <= 1);
}
}
@Test
@SuppressUndeliverable
public void onErrorOnCompleteRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final AtomicInteger wip = new AtomicInteger();
final AtomicThrowable error = new AtomicThrowable();
final TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
ts.onSubscribe(new BooleanSubscription());
final TestException ex = new TestException();
Runnable r1 = new Runnable() {
@Override
public void run() {
HalfSerializer.onError(ts, ex, wip, error);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
HalfSerializer.onComplete(ts, wip, error);
}
};
TestHelper.race(r1, r2);
if (ts.completions() != 0) {
ts.assertResult();
} else {
ts.assertFailure(TestException.class);
}
}
}
}
|
HalfSerializerSubscriberTest
|
java
|
spring-projects__spring-security
|
core/src/test/java/org/springframework/security/authorization/method/PreFilterAuthorizationMethodInterceptorTests.java
|
{
"start": 11174,
"end": 12261
}
|
class ____ implements InterfaceAnnotationsOne, InterfaceAnnotationsTwo {
@PreFilter(value = "filterObject == 'john'", filterTarget = "filterTargetNotMatch")
public List<String> doSomethingListFilterTargetNotMatch(List<String> list) {
return list;
}
@PreFilter(value = "filterObject == 'john'", filterTarget = "list")
public List<String> doSomethingListFilterTargetMatch(List<String> list) {
return list;
}
@PreFilter("filterObject == 'john'")
public List<String> doSomethingListFilterTargetNotProvided(List<String> list) {
return list;
}
@PreFilter("filterObject == 'john'")
public String[] doSomethingArrayFilterTargetNotProvided(String[] array) {
return array;
}
public List<String> doSomethingTwoArgsFilterTargetNotProvided(String s, List<String> list) {
return list;
}
@PreFilter(value = "filterObject == authentication.name", filterTarget = "list")
public List<String> doSomethingArrayFilterAuthentication(List<String> list) {
return list;
}
@Override
public void inheritedAnnotations() {
}
}
public static
|
TestClass
|
java
|
spring-projects__spring-security
|
webauthn/src/main/java/org/springframework/security/web/webauthn/jackson/PublicKeyCredentialCreationOptionsMixin.java
|
{
"start": 1118,
"end": 1257
}
|
class ____ {
@JsonSerialize(using = DurationSerializer.class)
private @Nullable Duration timeout;
}
|
PublicKeyCredentialCreationOptionsMixin
|
java
|
google__guice
|
core/src/com/google/inject/internal/ProviderMethodsModule.java
|
{
"start": 8402,
"end": 9474
}
|
class ____ {
final Method method;
final Annotation annotation;
MethodAndAnnotation(Method method, Annotation annotation) {
this.method = method;
this.annotation = annotation;
}
}
/** Returns the annotation that is claimed by the scanner, or null if there is none. */
private Annotation getAnnotation(Binder binder, Method method) {
if (method.isBridge() || method.isSynthetic()) {
return null;
}
Annotation annotation = null;
for (Class<? extends Annotation> annotationClass : scanner.annotationClasses()) {
Annotation foundAnnotation = method.getAnnotation(annotationClass);
if (foundAnnotation != null) {
if (annotation != null) {
binder.addError(
"More than one annotation claimed by %s on method %s."
+ " Methods can only have one annotation claimed per scanner.",
scanner, method);
return null;
}
annotation = foundAnnotation;
}
}
return annotation;
}
private static final
|
MethodAndAnnotation
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/AbstractClassAssert.java
|
{
"start": 23887,
"end": 23928
}
|
interface ____ { }
* @Force
*
|
Force
|
java
|
google__guava
|
android/guava-testlib/test/com/google/common/testing/ArbitraryInstancesTest.java
|
{
"start": 19961,
"end": 20014
}
|
interface ____ {}
public abstract static
|
SomeInterface
|
java
|
apache__camel
|
components/camel-debezium/camel-debezium-mongodb/src/test/java/org/apache/camel/component/debezium/DebeziumMongodbComponentTest.java
|
{
"start": 1334,
"end": 4870
}
|
class ____ {
@Test
void testIfConnectorEndpointCreatedWithConfig() throws Exception {
final Map<String, Object> params = new HashMap<>();
params.put("offsetStorageFileName", "/offset_test_file");
params.put("mongodbConnectionString", "mongodb://localhost:27017/?replicaSet=rs0");
params.put("mongodbUser", "dbz");
params.put("mongodbPassword", "pwd");
params.put("topicPrefix", "test");
params.put("schemaHistoryInternalFileFilename", "/db_history_file_test");
final String remaining = "test_name";
final String uri = "debezium?name=test_name&offsetStorageFileName=/test&"
+ "databaseHostname=localhost&databaseServerId=1234&databaseUser=dbz&databasePassword=pwd&"
+ "databaseServerName=test&schemaHistoryInternalFileFilename=/test";
try (final DebeziumComponent debeziumComponent = new DebeziumMongodbComponent(new DefaultCamelContext())) {
debeziumComponent.start();
final DebeziumEndpoint debeziumEndpoint = debeziumComponent.createEndpoint(uri, remaining, params);
assertNotNull(debeziumEndpoint);
// test for config
final MongoDbConnectorEmbeddedDebeziumConfiguration configuration
= (MongoDbConnectorEmbeddedDebeziumConfiguration) debeziumEndpoint.getConfiguration();
assertEquals("test_name", configuration.getName());
assertEquals("/offset_test_file", configuration.getOffsetStorageFileName());
assertEquals("mongodb://localhost:27017/?replicaSet=rs0", configuration.getMongodbConnectionString());
assertEquals("dbz", configuration.getMongodbUser());
assertEquals("pwd", configuration.getMongodbPassword());
assertEquals("test", configuration.getTopicPrefix());
assertEquals("/db_history_file_test", configuration.getSchemaHistoryInternalFileFilename());
}
}
@Test
void testIfCreatesComponentWithExternalConfiguration() throws Exception {
final MongoDbConnectorEmbeddedDebeziumConfiguration configuration = new MongoDbConnectorEmbeddedDebeziumConfiguration();
configuration.setName("test_config");
configuration.setMongodbUser("test_db");
configuration.setMongodbPassword("pwd");
configuration.setOffsetStorageFileName("/offset/file");
configuration.setTopicPrefix("test");
final String uri = "debezium:dummy";
try (final DebeziumComponent debeziumComponent = new DebeziumMongodbComponent(new DefaultCamelContext())) {
debeziumComponent.start();
// set configurations
debeziumComponent.setConfiguration(configuration);
final DebeziumEndpoint debeziumEndpoint = debeziumComponent.createEndpoint(uri, null, Collections.emptyMap());
assertNotNull(debeziumEndpoint);
// assert configurations
final MongoDbConnectorEmbeddedDebeziumConfiguration actualConfigurations
= (MongoDbConnectorEmbeddedDebeziumConfiguration) debeziumEndpoint.getConfiguration();
assertNotNull(actualConfigurations);
assertEquals(configuration.getName(), actualConfigurations.getName());
assertEquals(configuration.getMongodbUser(), actualConfigurations.getMongodbUser());
assertEquals(configuration.getConnectorClass(), actualConfigurations.getConnectorClass());
}
}
}
|
DebeziumMongodbComponentTest
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/function/MethodInvokersFailableBiFunctionTest.java
|
{
"start": 1388,
"end": 1556
}
|
class ____ extends MethodFixtures {
@Test
void testApply1Arg() throws Throwable {
// Use a local variable typed to the
|
MethodInvokersFailableBiFunctionTest
|
java
|
micronaut-projects__micronaut-core
|
core/src/main/java/io/micronaut/core/util/StringIntMap.java
|
{
"start": 827,
"end": 2437
}
|
class ____ {
private final int mask;
private final String[] keys;
private final int[] values;
/**
* Create a new map. The given size <b>must not</b> be exceeded by {@link #put} operations, or
* there may be infinite loops. There is no sanity check for this for performance reasons!
*
* @param size The maximum size of the map
*/
public StringIntMap(int size) {
// min size: at least one slot, aim for 50% load factor
int tableSize = (size * 2) + 1;
// round to next power of two for efficient hash code masking
tableSize = Integer.highestOneBit(tableSize) * 2;
this.mask = tableSize - 1;
this.keys = new String[tableSize];
this.values = new int[keys.length];
}
private int probe(String key) {
int n = keys.length;
int i = key.hashCode() & mask;
while (true) {
String candidate = keys[i];
if (candidate == null) {
return ~i;
} else if (candidate.equals(key)) {
return i;
} else {
i++;
if (i == n) {
i = 0;
}
}
}
}
public int get(String key, int def) {
int i = probe(key);
return i < 0 ? def : values[i];
}
public void put(String key, int value) {
int tableIndex = ~probe(key);
if (tableIndex < 0) {
throw new IllegalArgumentException("Duplicate key");
}
keys[tableIndex] = key;
values[tableIndex] = value;
}
}
|
StringIntMap
|
java
|
apache__flink
|
flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/src/enumerate/BlockSplittingRecursiveEnumerator.java
|
{
"start": 2271,
"end": 6328
}
|
class ____ extends NonSplittingRecursiveEnumerator {
private static final Logger LOG =
LoggerFactory.getLogger(BlockSplittingRecursiveEnumerator.class);
private final String[] nonSplittableFileSuffixes;
/**
* Creates a new enumerator that enumerates all files except hidden files. Hidden files are
* considered files where the filename starts with '.' or with '_'.
*
* <p>The enumerator does not split files that have a suffix corresponding to a known
* compression format (for example '.gzip', '.bz2', '.xy', '.zip', ...). See {@link
* StandardDeCompressors} for details.
*/
public BlockSplittingRecursiveEnumerator() {
this(
new DefaultFileFilter(),
StandardDeCompressors.getCommonSuffixes().toArray(new String[0]));
}
/**
* Creates a new enumerator that uses the given predicate as a filter for file paths, and avoids
* splitting files with the given extension (typically to avoid splitting compressed files).
*/
public BlockSplittingRecursiveEnumerator(
final Predicate<Path> fileFilter, final String[] nonSplittableFileSuffixes) {
super(fileFilter);
this.nonSplittableFileSuffixes = checkNotNull(nonSplittableFileSuffixes);
}
protected void convertToSourceSplits(
final FileStatus file, final FileSystem fs, final List<FileSourceSplit> target)
throws IOException {
if (!isFileSplittable(file.getPath())) {
super.convertToSourceSplits(file, fs, target);
return;
}
final BlockLocation[] blocks = getBlockLocationsForFile(file, fs);
if (blocks == null) {
target.add(
new FileSourceSplit(
getNextId(),
file.getPath(),
0L,
file.getLen(),
file.getModificationTime(),
file.getLen()));
} else {
for (BlockLocation block : blocks) {
target.add(
new FileSourceSplit(
getNextId(),
file.getPath(),
block.getOffset(),
block.getLength(),
file.getModificationTime(),
file.getLen(),
block.getHosts()));
}
}
}
protected boolean isFileSplittable(Path filePath) {
if (nonSplittableFileSuffixes.length == 0) {
return true;
}
final String path = filePath.getPath();
for (String suffix : nonSplittableFileSuffixes) {
if (path.endsWith(suffix)) {
return false;
}
}
return true;
}
@Nullable
private static BlockLocation[] getBlockLocationsForFile(FileStatus file, FileSystem fs)
throws IOException {
final long len = file.getLen();
final BlockLocation[] blocks = fs.getFileBlockLocations(file, 0, len);
if (blocks == null || blocks.length == 0) {
return null;
}
// a cheap check whether we have all blocks. we don't check whether the blocks fully cover
// the
// file (too expensive) but make some sanity checks to catch early the common cases where
// incorrect
// block info is returned by the implementation.
long totalLen = 0L;
for (BlockLocation block : blocks) {
totalLen += block.getLength();
}
if (totalLen != len) {
LOG.warn(
"Block lengths do not match file length for {}. File length is {}, blocks are {}",
file.getPath(),
len,
Arrays.toString(blocks));
return null;
}
return blocks;
}
}
|
BlockSplittingRecursiveEnumerator
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/context/support/HttpRequestHandlerTests.java
|
{
"start": 1493,
"end": 3060
}
|
class ____ {
@Test
void testHttpRequestHandlerServletPassThrough() throws Exception {
MockServletContext servletContext = new MockServletContext();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
StaticWebApplicationContext wac = new StaticWebApplicationContext();
wac.getBeanFactory().registerSingleton("myHandler", (HttpRequestHandler) (req, res) -> {
assertThat(req).isSameAs(request);
assertThat(res).isSameAs(response);
String exception = request.getParameter("exception");
if ("ServletException".equals(exception)) {
throw new ServletException("test");
}
if ("IOException".equals(exception)) {
throw new IOException("test");
}
res.getWriter().write("myResponse");
});
wac.setServletContext(servletContext);
wac.refresh();
servletContext.setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, wac);
Servlet servlet = new HttpRequestHandlerServlet();
servlet.init(new MockServletConfig(servletContext, "myHandler"));
servlet.service(request, response);
assertThat(response.getContentAsString()).isEqualTo("myResponse");
request.setParameter("exception", "ServletException");
assertThatExceptionOfType(ServletException.class)
.isThrownBy(() -> servlet.service(request, response))
.withMessage("test");
request.setParameter("exception", "IOException");
assertThatIOException()
.isThrownBy(() -> servlet.service(request, response))
.withMessage("test");
}
}
|
HttpRequestHandlerTests
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/TraceDelayerTest.java
|
{
"start": 1026,
"end": 1658
}
|
class ____ extends ContextTestSupport {
@Test
public void testSendingMessageGetsDelayed() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
context.setTracing(true);
from("direct:start").delay(1000).to("mock:foo").to("mock:result");
}
};
}
}
|
TraceDelayerTest
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/cancelling/MapCancelingITCase.java
|
{
"start": 2526,
"end": 2779
}
|
class ____<IN> implements MapFunction<IN, IN> {
private static final long serialVersionUID = 1L;
@Override
public IN map(IN value) throws Exception {
return value;
}
}
private static final
|
IdentityMapper
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/support/RootBeanDefinitionTests.java
|
{
"start": 3468,
"end": 4649
}
|
interface ____ a default AutoCloseable implementation
}
@Test
void resolveDestroyMethodWithMatchingCandidateReplacedForAutoCloseableExecutorService() {
RootBeanDefinition beanDefinition = new RootBeanDefinition(BeanImplementingExecutorServiceAndAutoCloseable.class);
beanDefinition.setDestroyMethodName(AbstractBeanDefinition.INFER_METHOD);
beanDefinition.resolveDestroyMethodIfNecessary();
assertThat(beanDefinition.getDestroyMethodNames()).containsExactly("close");
}
@Test
void resolveDestroyMethodWithNoCandidateSetDestroyMethodNameToNull() {
RootBeanDefinition beanDefinition = new RootBeanDefinition(BeanWithNoDestroyMethod.class);
beanDefinition.setDestroyMethodName(AbstractBeanDefinition.INFER_METHOD);
beanDefinition.resolveDestroyMethodIfNecessary();
assertThat(beanDefinition.getDestroyMethodNames()).isNull();
}
@Test
void resolveDestroyMethodWithNoResolvableType() {
RootBeanDefinition beanDefinition = new RootBeanDefinition();
beanDefinition.setDestroyMethodName(AbstractBeanDefinition.INFER_METHOD);
beanDefinition.resolveDestroyMethodIfNecessary();
assertThat(beanDefinition.getDestroyMethodNames()).isNull();
}
static
|
declares
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/metrics/internals/IntGaugeSuite.java
|
{
"start": 1487,
"end": 2898
}
|
class ____<K> implements AutoCloseable {
/**
* The log4j logger.
*/
private final Logger log;
/**
* The name of this suite.
*/
private final String suiteName;
/**
* The metrics object to use.
*/
private final Metrics metrics;
/**
* A user-supplied callback which translates keys into unique metric names.
*/
private final Function<K, MetricName> metricNameCalculator;
/**
* The maximum number of gauges that we will ever create at once.
*/
private final int maxEntries;
/**
* A map from keys to gauges. Protected by the object monitor.
*/
private final Map<K, StoredIntGauge> gauges;
/**
* The keys of gauges that can be removed, since their value is zero.
* Protected by the object monitor.
*/
private final Set<K> removable;
/**
* A lockless list of pending metrics additions and removals.
*/
private final ConcurrentLinkedDeque<PendingMetricsChange> pending;
/**
* A lock which serializes modifications to metrics. This lock is not
* required to create a new pending operation.
*/
private final Lock modifyMetricsLock;
/**
* True if this suite is closed. Protected by the object monitor.
*/
private boolean closed;
/**
* A pending metrics addition or removal.
*/
private static
|
IntGaugeSuite
|
java
|
google__dagger
|
javatests/dagger/functional/assisted/AssistedFactoryTest.java
|
{
"start": 1062,
"end": 1493
}
|
interface ____ {
// Simple factory using a nested factory.
SimpleFoo.Factory nestedSimpleFooFactory();
Provider<SimpleFoo.Factory> nestedSimpleFooFactoryProvider();
// Simple factory using a non-nested factory.
SimpleFooFactory nonNestedSimpleFooFactory();
// Simple factory using a factory that extends a supertype.
ExtendedSimpleFooFactory extendedSimpleFooFactory();
// Factory as
|
ParentComponent
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java
|
{
"start": 5570,
"end": 6037
}
|
class ____ implements ReservedStateHandler<Map<String, Object>> {
private final String name;
private TestStateHandler(String name) {
this.name = name;
}
@Override
public String name() {
return name;
}
@Override
public Map<String, Object> fromXContent(XContentParser parser) throws IOException {
return parser.map();
}
}
private static
|
TestStateHandler
|
java
|
apache__dubbo
|
dubbo-metrics/dubbo-tracing/src/main/java/org/apache/dubbo/tracing/tracer/brave/BraveProvider.java
|
{
"start": 6780,
"end": 10325
}
|
class ____ {
public static brave.propagation.Propagation.Factory getPropagationFactory(TracingConfig tracingConfig) {
BaggageConfig baggageConfig = tracingConfig.getBaggage();
if (baggageConfig == null || !baggageConfig.getEnabled()) {
return getPropagationFactoryWithoutBaggage(tracingConfig);
}
return getPropagationFactoryWithBaggage(tracingConfig);
}
private static brave.propagation.Propagation.Factory getPropagationFactoryWithoutBaggage(
TracingConfig tracingConfig) {
PropagationType propagationType =
PropagationType.forValue(tracingConfig.getPropagation().getType());
if (PropagationType.W3C == propagationType) {
return new io.micrometer.tracing.brave.bridge.W3CPropagation();
} else {
// Brave default propagation is B3
return brave.propagation.B3Propagation.newFactoryBuilder()
.injectFormat(brave.propagation.B3Propagation.Format.SINGLE_NO_PARENT)
.build();
}
}
private static brave.propagation.Propagation.Factory getPropagationFactoryWithBaggage(
TracingConfig tracingConfig) {
PropagationType propagationType =
PropagationType.forValue(tracingConfig.getPropagation().getType());
brave.propagation.Propagation.Factory delegate;
if (PropagationType.W3C == propagationType) {
delegate = new W3CPropagation(BRAVE_BAGGAGE_MANAGER, Collections.emptyList());
} else {
// Brave default propagation is B3
delegate = brave.propagation.B3Propagation.newFactoryBuilder()
.injectFormat(brave.propagation.B3Propagation.Format.SINGLE_NO_PARENT)
.build();
}
return getBaggageFactoryBuilder(delegate, tracingConfig).build();
}
private static brave.baggage.BaggagePropagation.FactoryBuilder getBaggageFactoryBuilder(
brave.propagation.Propagation.Factory delegate, TracingConfig tracingConfig) {
brave.baggage.BaggagePropagation.FactoryBuilder builder =
brave.baggage.BaggagePropagation.newFactoryBuilder(delegate);
getBaggagePropagationCustomizers(tracingConfig).forEach((customizer) -> customizer.customize(builder));
return builder;
}
private static List<brave.baggage.BaggagePropagationCustomizer> getBaggagePropagationCustomizers(
TracingConfig tracingConfig) {
List<brave.baggage.BaggagePropagationCustomizer> res = new ArrayList<>();
if (tracingConfig.getBaggage().getCorrelation().isEnabled()) {
res.add(remoteFieldsBaggagePropagationCustomizer(tracingConfig));
}
return res;
}
private static brave.baggage.BaggagePropagationCustomizer remoteFieldsBaggagePropagationCustomizer(
TracingConfig tracingConfig) {
return (builder) -> {
List<String> remoteFields = tracingConfig.getBaggage().getRemoteFields();
for (String fieldName : remoteFields) {
builder.add(brave.baggage.BaggagePropagationConfig.SingleBaggageField.remote(
brave.baggage.BaggageField.create(fieldName)));
}
};
}
}
}
|
PropagatorFactory
|
java
|
apache__logging-log4j2
|
log4j-layout-template-json-test/src/test/java/org/apache/logging/log4j/layout/template/json/JsonTemplateLayoutTest.java
|
{
"start": 48487,
"end": 48958
}
|
class ____ implements EventResolver {
private static final AtomicInteger COUNTER = new AtomicInteger(0);
private CustomResolver() {}
@Override
public void resolve(final LogEvent value, final JsonWriter jsonWriter) {
jsonWriter.writeString("CustomValue-" + COUNTER.getAndIncrement());
}
}
@Plugin(name = "CustomResolverFactory", category = TemplateResolverFactory.CATEGORY)
public static final
|
CustomResolver
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/junit4/rules/BaseAppCtxRuleTests.java
|
{
"start": 1938,
"end": 2012
}
|
class ____ {
@Bean
public String foo() {
return "foo";
}
}
}
|
Config
|
java
|
micronaut-projects__micronaut-core
|
http-server/src/test/groovy/io/micronaut/http/server/exceptions/NotAcceptableExceptionTest.java
|
{
"start": 259,
"end": 854
}
|
class ____ {
@Test
void statusIsNotAcceptable() {
NotAcceptableException ex = new NotAcceptableException(List.of(MediaType.TEXT_HTML_TYPE.toString()), List.of(MediaType.APPLICATION_JSON_TYPE.toString()));
assertEquals(HttpStatus.NOT_ACCEPTABLE, ex.getStatus());
assertEquals("Specified Accept Types [text/html] not supported. Supported types: [application/json]", ex.getMessage());
assertEquals(List.of("text/html"), ex.getAcceptedTypes());
assertEquals(List.of("application/json"), ex.getProduceableContentTypes());
}
}
|
NotAcceptableExceptionTest
|
java
|
spring-projects__spring-security
|
web/src/test/java/org/springframework/security/web/server/header/HttpHeaderWriterWebFilterTests.java
|
{
"start": 1583,
"end": 2479
}
|
class ____ {
@Mock
ServerHttpHeadersWriter writer;
HttpHeaderWriterWebFilter filter;
@BeforeEach
public void setup() {
given(this.writer.writeHttpHeaders(any())).willReturn(Mono.empty());
this.filter = new HttpHeaderWriterWebFilter(this.writer);
}
@Test
public void filterWhenCompleteThenWritten() {
WebTestClient rest = WebTestClientBuilder.bindToWebFilters(this.filter).build();
rest.get().uri("/foo").exchange();
verify(this.writer).writeHttpHeaders(any());
}
@Test
public void filterWhenNotCompleteThenNotWritten() {
WebTestHandler handler = WebTestHandler.bindToWebFilters(this.filter);
WebHandlerResult result = handler.exchange(MockServerHttpRequest.get("/foo"));
verify(this.writer, never()).writeHttpHeaders(any());
result.getExchange().getResponse().setComplete().block();
verify(this.writer).writeHttpHeaders(any());
}
}
|
HttpHeaderWriterWebFilterTests
|
java
|
junit-team__junit5
|
junit-jupiter-params/src/main/java/org/junit/jupiter/params/BeforeParameterizedClassInvocation.java
|
{
"start": 4918,
"end": 5017
}
|
interface ____ be
* executed before {@code @BeforeParameterizedClassInvocation} methods in the
*
|
will
|
java
|
google__auto
|
value/src/it/gwtserializer/src/test/java/com/google/auto/value/client/GwtSerializerTest.java
|
{
"start": 4228,
"end": 4401
}
|
interface ____ {
Builder message(String message);
SimpleWithBuilder build();
}
}
@AutoValue
@GwtCompatible(serializable = true)
abstract static
|
Builder
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/pkg/jar/NativeImageSourceJarBuilder.java
|
{
"start": 1359,
"end": 6642
}
|
class ____ extends AbstractLegacyThinJarBuilder<NativeImageSourceJarBuildItem> {
private static final Logger LOG = Logger.getLogger(NativeImageSourceJarBuilder.class);
public NativeImageSourceJarBuilder(CurateOutcomeBuildItem curateOutcome,
OutputTargetBuildItem outputTarget,
ApplicationInfoBuildItem applicationInfo,
PackageConfig packageConfig,
MainClassBuildItem mainClass,
ApplicationArchivesBuildItem applicationArchives,
TransformedClassesBuildItem transformedClasses,
List<GeneratedClassBuildItem> generatedClasses,
List<GeneratedResourceBuildItem> generatedResources,
List<GeneratedNativeImageClassBuildItem> nativeImageResources,
Set<ArtifactKey> removedArtifactKeys,
ExecutorService executorService) {
super(curateOutcome, outputTarget, applicationInfo, packageConfig, mainClass, applicationArchives, transformedClasses,
augmentGeneratedClasses(generatedClasses, nativeImageResources), generatedResources,
augmentRemovedArtifactKeys(removedArtifactKeys), executorService);
}
public NativeImageSourceJarBuildItem build() throws IOException {
Path targetDirectory = outputTarget.getOutputDirectory()
.resolve(outputTarget.getBaseName() + "-native-image-source-jar");
IoUtils.createOrEmptyDir(targetDirectory);
Path runnerJar = targetDirectory
.resolve(outputTarget.getBaseName() + packageConfig.computedRunnerSuffix() + DOT_JAR);
Path libDir = targetDirectory.resolve(LegacyThinJarFormat.LIB);
Files.createDirectories(libDir);
copyJsonConfigFiles(applicationArchives, targetDirectory);
// complain if graal-sdk is present as a dependency as nativeimage should be preferred
if (curateOutcome.getApplicationModel().getDependencies().stream()
.anyMatch(d -> d.getGroupId().equals("org.graalvm.sdk") && d.getArtifactId().equals("graal-sdk"))) {
LOG.warn("org.graalvm.sdk:graal-sdk is present in the classpath. "
+ "From Quarkus 3.8 and onwards, org.graalvm.sdk:nativeimage should be preferred. "
+ "Make sure you report the issue to the maintainers of the extensions that bring it.");
}
LOG.info("Building native image source jar: " + runnerJar);
doBuild(runnerJar, libDir);
return new NativeImageSourceJarBuildItem(runnerJar, libDir);
}
private static List<GeneratedClassBuildItem> augmentGeneratedClasses(List<GeneratedClassBuildItem> generatedClasses,
List<GeneratedNativeImageClassBuildItem> nativeImageResources) {
List<GeneratedClassBuildItem> allGeneratedClasses = new ArrayList<>(generatedClasses);
allGeneratedClasses.addAll(nativeImageResources.stream()
.map((s) -> new GeneratedClassBuildItem(true, s.getName(), s.getClassData()))
.toList());
return allGeneratedClasses;
}
private static Set<ArtifactKey> augmentRemovedArtifactKeys(Set<ArtifactKey> removedArtifactKeys) {
final Set<ArtifactKey> allRemovedArtifactKeys = new HashSet<>(removedArtifactKeys);
// Remove svm and graal-sdk artifacts as they are provided by GraalVM itself
allRemovedArtifactKeys.add(GACT.fromString("org.graalvm.nativeimage:svm"));
allRemovedArtifactKeys.add(GACT.fromString("org.graalvm.sdk:graal-sdk"));
allRemovedArtifactKeys.add(GACT.fromString("org.graalvm.sdk:nativeimage"));
allRemovedArtifactKeys.add(GACT.fromString("org.graalvm.sdk:word"));
allRemovedArtifactKeys.add(GACT.fromString("org.graalvm.sdk:collections"));
return allRemovedArtifactKeys;
}
/**
* This is done in order to make application specific native image configuration files available to the native-image tool
* without the user needing to know any specific paths.
* The files that are copied don't end up in the native image unless the user specifies they are needed, all this method
* does is copy them to a convenient location
*/
private static void copyJsonConfigFiles(ApplicationArchivesBuildItem applicationArchivesBuildItem, Path thinJarDirectory)
throws IOException {
for (Path root : applicationArchivesBuildItem.getRootArchive().getRootDirectories()) {
try (Stream<Path> stream = Files.find(root, 1, IsJsonFilePredicate.INSTANCE)) {
stream.forEach(new Consumer<Path>() {
@Override
public void accept(Path jsonPath) {
try {
Files.createDirectories(thinJarDirectory);
Files.copy(jsonPath, thinJarDirectory.resolve(jsonPath.getFileName().toString()));
} catch (IOException e) {
throw new UncheckedIOException(
"Unable to copy json config file from " + jsonPath + " to " + thinJarDirectory,
e);
}
}
});
}
}
}
private static
|
NativeImageSourceJarBuilder
|
java
|
micronaut-projects__micronaut-core
|
http-server-netty/src/main/java/io/micronaut/http/server/netty/handler/accesslog/element/LocalIpElementBuilder.java
|
{
"start": 757,
"end": 1026
}
|
class ____ implements LogElementBuilder {
@Override
public LogElement build(String token, String param) {
if (LocalIpElement.LOCAL_IP.equals(token)) {
return LocalIpElement.INSTANCE;
}
return null;
}
}
|
LocalIpElementBuilder
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-ai/camel-spring-ai-chat/src/main/java/org/apache/camel/component/springai/chat/SpringAiChatProducer.java
|
{
"start": 46513,
"end": 52141
}
|
class ____ structured output
if (entityClass != null) {
processEntityRequest(request, exchange, entityClass);
} else if (converter != null) {
processStructuredOutputRequest(request, exchange, converter);
} else {
ChatResponse response = request.call().chatResponse();
populateResponse(response, exchange);
}
}
/**
* Build the list of default advisors based on endpoint configuration.
* <p>
* Advisors are Spring AI components that intercept and modify chat requests and responses. They enable features
* like logging, memory, RAG (Retrieval-Augmented Generation), content filtering, and custom processing. Advisors
* are executed in the order they are added to the list.
* </p>
* <p>
* The following advisors may be added based on configuration:
* </p>
* <ol>
* <li><strong>SimpleLoggerAdvisor</strong> - Always added first. Logs chat requests and responses for debugging and
* monitoring purposes.</li>
* <li><strong>SafeGuardAdvisor</strong> - Added if sensitive words are configured. Blocks requests or responses
* containing configured sensitive words and returns a custom failure response.</li>
* <li><strong>MessageChatMemoryAdvisor</strong> - Added if ChatMemory is configured. Maintains conversation history
* using an in-memory or persistent chat memory store, enabling contextual multi-turn conversations.</li>
* <li><strong>VectorStoreChatMemoryAdvisor</strong> - Added if chatMemoryVectorStore is configured (alternative to
* MessageChatMemoryAdvisor). Stores conversation history in a vector store with automatic conversation isolation by
* ID.</li>
* <li><strong>QuestionAnswerAdvisor</strong> - Added if VectorStore is configured. Implements RAG by automatically
* retrieving relevant context from the vector store and augmenting the user's question with it.</li>
* <li><strong>Custom Advisors</strong> - Any user-provided advisors from the endpoint configuration are added
* last.</li>
* </ol>
* <p>
* <strong>Note:</strong> If both ChatMemory and chatMemoryVectorStore are configured, only ChatMemory will be used
* and a warning will be logged.
* </p>
*
* @return the list of configured advisors to be applied to the ChatClient
*/
private List<Advisor> buildDefaultAdvisors() {
List<Advisor> advisors = new ArrayList<>();
// Always add SimpleLoggerAdvisor first
advisors.add(new SimpleLoggerAdvisor());
// Add SafeGuardAdvisor if configured
SafeGuardAdvisor safeguardAdvisor = buildSafeguardAdvisor(
getEndpoint().getConfiguration().getSafeguardSensitiveWords(),
getEndpoint().getConfiguration().getSafeguardFailureResponse(),
getEndpoint().getConfiguration().getSafeguardOrder(),
false);
if (safeguardAdvisor != null) {
advisors.add(safeguardAdvisor);
}
// Add ChatMemory advisor if configured
ChatMemory chatMemory = getEndpoint().getConfiguration().getChatMemory();
VectorStore chatMemoryVectorStore = getEndpoint().getConfiguration().getChatMemoryVectorStore();
if (chatMemory != null && chatMemoryVectorStore != null) {
LOG.warn("Both chatMemory and chatMemoryVectorStore are configured. Using MessageChatMemoryAdvisor (chatMemory). " +
"Configure only one memory type.");
}
if (chatMemory != null) {
advisors.add(MessageChatMemoryAdvisor.builder(chatMemory).build());
LOG.debug("MessageChatMemoryAdvisor enabled");
} else if (chatMemoryVectorStore != null) {
// Configure VectorStoreChatMemoryAdvisor with conversation isolation
// The conversationId parameter enables automatic filtering by conversation ID
advisors.add(VectorStoreChatMemoryAdvisor.builder(chatMemoryVectorStore)
.conversationId(ChatMemory.CONVERSATION_ID)
.defaultTopK(getEndpoint().getConfiguration().getTopK())
.build());
LOG.debug("VectorStoreChatMemoryAdvisor enabled with conversation isolation and topK={}",
getEndpoint().getConfiguration().getTopK());
}
// Add QuestionAnswerAdvisor if VectorStore is configured
VectorStore vectorStore = getEndpoint().getConfiguration().getVectorStore();
if (vectorStore != null) {
advisors.add(QuestionAnswerAdvisor.builder(vectorStore)
.searchRequest(SearchRequest.builder()
.topK(getEndpoint().getConfiguration().getTopK())
.similarityThreshold(getEndpoint().getConfiguration().getSimilarityThreshold())
.build())
.build());
LOG.debug("QuestionAnswerAdvisor enabled with topK={}, similarityThreshold={}",
getEndpoint().getConfiguration().getTopK(),
getEndpoint().getConfiguration().getSimilarityThreshold());
}
// Add custom advisors if configured
List<Advisor> customAdvisors = getEndpoint().getConfiguration().getAdvisors();
if (customAdvisors != null && !customAdvisors.isEmpty()) {
advisors.addAll(customAdvisors);
LOG.debug("Added {} custom advisors", customAdvisors.size());
}
LOG.debug("Built {} total advisors", advisors.size());
return advisors;
}
}
|
and
|
java
|
quarkusio__quarkus
|
extensions/tls-registry/deployment/src/test/java/io/quarkus/tls/JKSTrustStoreWithOverriddenCredentialsProviderTest.java
|
{
"start": 974,
"end": 2369
}
|
class ____ {
private static final String configuration = """
quarkus.tls.trust-store.jks.path=target/certs/test-credentials-provider-truststore.jks
quarkus.tls.trust-store.jks.password=secret123!
quarkus.tls.trust-store.credentials-provider.name=tls
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.addClass(MyCredentialProvider.class)
.add(new StringAsset(configuration), "application.properties"));
@Inject
TlsConfigurationRegistry certificates;
@Test
void test() throws KeyStoreException, CertificateParsingException {
TlsConfiguration def = certificates.getDefault().orElseThrow();
assertThat(def.getTrustStoreOptions()).isNotNull();
assertThat(def.getTrustStore()).isNotNull();
X509Certificate certificate = (X509Certificate) def.getTrustStore().getCertificate("test-credentials-provider");
assertThat(certificate).isNotNull();
assertThat(certificate.getSubjectAlternativeNames()).anySatisfy(l -> {
assertThat(l.get(0)).isEqualTo(2);
assertThat(l.get(1)).isEqualTo("localhost");
});
}
@ApplicationScoped
public static
|
JKSTrustStoreWithOverriddenCredentialsProviderTest
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/multipart/MultipartOutputUsingBlockingEndpointsTest.java
|
{
"start": 715,
"end": 4946
}
|
class ____ extends AbstractMultipartTest {
private static final String EXPECTED_CONTENT_DISPOSITION_PART = "Content-Disposition: form-data; name=\"%s\"";
private static final String EXPECTED_CONTENT_DISPOSITION_FILE_PART = "Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"";
private static final String EXPECTED_CONTENT_TYPE_PART = "Content-Type: %s";
@RegisterExtension
static ResteasyReactiveUnitTest test = new ResteasyReactiveUnitTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.addClasses(MultipartOutputResource.class, MultipartOutputResponse.class,
MultipartOutputFileResponse.class,
Status.class, FormDataBase.class, OtherPackageFormDataBase.class));
@Test
public void testSimple() {
String response = RestAssured.get("/multipart/output/simple")
.then()
.contentType(ContentType.MULTIPART)
.statusCode(200)
.extract().asString();
assertContainsValue(response, "name", MediaType.TEXT_PLAIN, MultipartOutputResource.RESPONSE_NAME);
assertContainsValue(response, "custom-surname", MediaType.TEXT_PLAIN, MultipartOutputResource.RESPONSE_SURNAME);
assertContainsValue(response, "custom-status", MediaType.TEXT_PLAIN, MultipartOutputResource.RESPONSE_STATUS);
assertContainsValue(response, "active", MediaType.TEXT_PLAIN, MultipartOutputResource.RESPONSE_ACTIVE);
assertContainsValue(response, "values", MediaType.TEXT_PLAIN, "[one, two]");
assertContainsValue(response, "num", MediaType.TEXT_PLAIN, "0");
}
@Test
public void testWithFormData() {
ExtractableResponse<?> extractable = RestAssured.get("/multipart/output/with-form-data")
.then()
.contentType(ContentType.MULTIPART)
.statusCode(200)
.extract();
String body = extractable.asString();
assertContainsValue(body, "name", MediaType.TEXT_PLAIN, MultipartOutputResource.RESPONSE_NAME);
assertContainsValue(body, "custom-surname", MediaType.TEXT_PLAIN, MultipartOutputResource.RESPONSE_SURNAME);
assertContainsValue(body, "custom-status", MediaType.TEXT_PLAIN, MultipartOutputResource.RESPONSE_STATUS);
assertContainsValue(body, "active", MediaType.TEXT_PLAIN, MultipartOutputResource.RESPONSE_ACTIVE);
assertContainsValue(body, "values", MediaType.TEXT_PLAIN, "[one, two]");
assertThat(extractable.header("Content-Type")).contains("boundary=");
}
@Test
public void testString() {
RestAssured.get("/multipart/output/string")
.then()
.statusCode(200)
.body(equalTo(MultipartOutputResource.RESPONSE_NAME));
}
@Test
public void testWithFiles() {
String response = RestAssured
.given()
.get("/multipart/output/with-file")
.then()
.contentType(ContentType.MULTIPART)
.statusCode(200)
.log().all()
.extract().asString();
assertContainsValue(response, "name", MediaType.TEXT_PLAIN, MultipartOutputResource.RESPONSE_NAME);
assertContainsFile(response, "file", MediaType.APPLICATION_OCTET_STREAM, "lorem.txt");
}
private void assertContainsFile(String response, String name, String contentType, String fileName) {
String[] lines = response.split("--");
assertThat(lines).anyMatch(line -> line.contains(String.format(EXPECTED_CONTENT_DISPOSITION_FILE_PART, name, fileName))
&& line.contains(String.format(EXPECTED_CONTENT_TYPE_PART, contentType)));
}
private void assertContainsValue(String response, String name, String contentType, Object value) {
String[] lines = response.split("--");
assertThat(lines).anyMatch(line -> line.contains(String.format(EXPECTED_CONTENT_DISPOSITION_PART, name))
&& line.contains(String.format(EXPECTED_CONTENT_TYPE_PART, contentType))
&& line.contains(value.toString()));
}
}
|
MultipartOutputUsingBlockingEndpointsTest
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/AnnotatedElementUtilsTests.java
|
{
"start": 50630,
"end": 50904
}
|
interface ____ {
String[] locations() default {};
@AliasFor(annotation = ContextConfig.class, attribute = "locations")
String[] xmlConfigFiles() default {};
}
@ContextConfig
@Retention(RetentionPolicy.RUNTIME)
@
|
HalfConventionBasedAndHalfAliasedComposedContextConfig
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java
|
{
"start": 1690,
"end": 3290
}
|
class ____ extends HandledTransportAction<Request, ActionResponse.Empty> {
private final ClusterService clusterService;
private final RepositoriesService repositoriesService;
private final ProjectResolver projectResolver;
@Inject
public TransportAction(
TransportService transportService,
ActionFilters actionFilters,
ThreadPool threadPool,
ClusterService clusterService,
RepositoriesService repositoriesService,
ProjectResolver projectResolver
) {
super(ACTION_NAME, transportService, actionFilters, Request::new, threadPool.executor(ThreadPool.Names.SNAPSHOT));
this.clusterService = clusterService;
this.repositoriesService = repositoriesService;
this.projectResolver = projectResolver;
}
@Override
protected void doExecute(Task task, Request request, ActionListener<ActionResponse.Empty> listener) {
DiscoveryNode localNode = clusterService.state().nodes().getLocalNode();
try {
Repository repository = repositoriesService.repository(projectResolver.getProjectId(), request.repository);
repository.verify(request.verificationToken, localNode);
listener.onResponse(ActionResponse.Empty.INSTANCE);
} catch (Exception e) {
logger.warn(() -> "[" + request.repository + "] failed to verify repository", e);
listener.onFailure(e);
}
}
}
public static
|
TransportAction
|
java
|
apache__camel
|
components/camel-ai/camel-tensorflow-serving/src/main/java/org/apache/camel/component/tensorflow/serving/TensorFlowServingConverter.java
|
{
"start": 1258,
"end": 3244
}
|
class ____ {
@Converter
public static GetModelStatus.GetModelStatusRequest toGetModelStatusRequest(
GetModelStatus.GetModelStatusRequest.Builder builder) {
return builder.build();
}
@Converter
public static GetModelMetadata.GetModelMetadataRequest toGetModelMetadataRequest(
GetModelMetadata.GetModelMetadataRequest.Builder builder) {
return builder.build();
}
@Converter
public static Classification.ClassificationRequest toClassificationRequest(
Classification.ClassificationRequest.Builder builder) {
return builder.build();
}
@Converter
public static Classification.ClassificationRequest toClassificationRequest(
InputOuterClass.Input input) {
return Classification.ClassificationRequest.newBuilder().setInput(input).build();
}
@Converter
public static Classification.ClassificationRequest toClassificationRequest(
InputOuterClass.Input.Builder builder) {
return Classification.ClassificationRequest.newBuilder().setInput(builder).build();
}
@Converter
public static RegressionOuterClass.RegressionRequest toRegressionRequest(
RegressionOuterClass.RegressionRequest.Builder builder) {
return builder.build();
}
@Converter
public static RegressionOuterClass.RegressionRequest toRegressionRequest(
InputOuterClass.Input input) {
return RegressionOuterClass.RegressionRequest.newBuilder().setInput(input).build();
}
@Converter
public static RegressionOuterClass.RegressionRequest toRegressionRequest(
InputOuterClass.Input.Builder builder) {
return RegressionOuterClass.RegressionRequest.newBuilder().setInput(builder).build();
}
@Converter
public static Predict.PredictRequest toPredictRequest(
Predict.PredictRequest.Builder builder) {
return builder.build();
}
}
|
TensorFlowServingConverter
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java
|
{
"start": 67789,
"end": 70016
}
|
class ____ extends LatchAckListener implements ClusterStateTaskListener {
Task() {
super(latch);
}
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
}
masterService.<Task>createTaskQueue("success-test", Priority.NORMAL, batchExecutionContext -> {
for (final var taskContext : batchExecutionContext.taskContexts()) {
final var responseHeaderValue = randomAlphaOfLength(10);
try (var ignored = taskContext.captureResponseHeaders()) {
threadPool.getThreadContext().addResponseHeader(responseHeaderName, responseHeaderValue);
}
taskContext.success(() -> {
assertThat(
threadPool.getThreadContext().getResponseHeaders().get(responseHeaderName),
equalTo(List.of(responseHeaderValue))
);
latch.countDown();
}, taskContext.getTask());
}
return randomBoolean()
? batchExecutionContext.initialState()
: ClusterState.builder(batchExecutionContext.initialState()).build();
}).submitTask("success-test", new Task(), null);
deterministicTaskQueue.runAllTasksInTimeOrder();
safeAwait(latch);
}
// check that we complete a dynamic ack listener supplied by the task
{
final CountDownLatch latch = new CountDownLatch(2);
publisherRef.set((clusterChangedEvent, publishListener, ackListener) -> {
publishListener.onResponse(null);
ackListener.onCommit(TimeValue.ZERO);
ackListener.onNodeAck(node1, null);
ackListener.onNodeAck(node2, null);
ackListener.onNodeAck(node3, null);
});
|
Task
|
java
|
apache__maven
|
compat/maven-resolver-provider/src/main/java/org/apache/maven/repository/internal/scopes/Maven3ScopeManagerConfiguration.java
|
{
"start": 2225,
"end": 7884
}
|
class ____ implements ScopeManagerConfiguration {
public static final Maven3ScopeManagerConfiguration INSTANCE = new Maven3ScopeManagerConfiguration();
public static final String DS_COMPILE = "compile";
public static final String DS_RUNTIME = "runtime";
public static final String DS_PROVIDED = "provided";
public static final String DS_SYSTEM = "system";
public static final String DS_TEST = "test";
public static final String RS_NONE = "none";
public static final String RS_MAIN_COMPILE = "main-compile";
public static final String RS_MAIN_COMPILE_PLUS_RUNTIME = "main-compilePlusRuntime";
public static final String RS_MAIN_RUNTIME = "main-runtime";
public static final String RS_MAIN_RUNTIME_PLUS_SYSTEM = "main-runtimePlusSystem";
public static final String RS_TEST_COMPILE = "test-compile";
public static final String RS_TEST_RUNTIME = "test-runtime";
private Maven3ScopeManagerConfiguration() {}
@Override
public String getId() {
return "Maven3";
}
@Override
public boolean isStrictDependencyScopes() {
return false;
}
@Override
public boolean isStrictResolutionScopes() {
return false;
}
@Override
public BuildScopeSource getBuildScopeSource() {
return new BuildScopeMatrixSource(
Collections.singletonList(CommonBuilds.PROJECT_PATH_MAIN),
Arrays.asList(CommonBuilds.BUILD_PATH_COMPILE, CommonBuilds.BUILD_PATH_RUNTIME),
CommonBuilds.MAVEN_TEST_BUILD_SCOPE);
}
@Override
public Collection<DependencyScope> buildDependencyScopes(InternalScopeManager internalScopeManager) {
ArrayList<DependencyScope> result = new ArrayList<>();
result.add(internalScopeManager.createDependencyScope(DS_COMPILE, true, all()));
result.add(internalScopeManager.createDependencyScope(
DS_RUNTIME, true, byBuildPath(CommonBuilds.BUILD_PATH_RUNTIME)));
result.add(internalScopeManager.createDependencyScope(
DS_PROVIDED,
false,
union(
byBuildPath(CommonBuilds.BUILD_PATH_COMPILE),
select(CommonBuilds.PROJECT_PATH_TEST, CommonBuilds.BUILD_PATH_RUNTIME))));
result.add(internalScopeManager.createDependencyScope(
DS_TEST, false, byProjectPath(CommonBuilds.PROJECT_PATH_TEST)));
result.add(internalScopeManager.createSystemDependencyScope(
DS_SYSTEM, false, all(), ArtifactProperties.LOCAL_PATH));
return result;
}
@Override
public Collection<ResolutionScope> buildResolutionScopes(InternalScopeManager internalScopeManager) {
Collection<DependencyScope> allDependencyScopes = internalScopeManager.getDependencyScopeUniverse();
Collection<DependencyScope> nonTransitiveDependencyScopes =
allDependencyScopes.stream().filter(s -> !s.isTransitive()).collect(Collectors.toSet());
DependencyScope system =
internalScopeManager.getDependencyScope(DS_SYSTEM).orElse(null);
ArrayList<ResolutionScope> result = new ArrayList<>();
result.add(internalScopeManager.createResolutionScope(
RS_NONE,
InternalScopeManager.Mode.REMOVE,
Collections.emptySet(),
Collections.emptySet(),
allDependencyScopes));
result.add(internalScopeManager.createResolutionScope(
RS_MAIN_COMPILE,
InternalScopeManager.Mode.ELIMINATE,
singleton(CommonBuilds.PROJECT_PATH_MAIN, CommonBuilds.BUILD_PATH_COMPILE),
Collections.singletonList(system),
nonTransitiveDependencyScopes));
result.add(internalScopeManager.createResolutionScope(
RS_MAIN_COMPILE_PLUS_RUNTIME,
InternalScopeManager.Mode.ELIMINATE,
byProjectPath(CommonBuilds.PROJECT_PATH_MAIN),
Collections.singletonList(system),
nonTransitiveDependencyScopes));
result.add(internalScopeManager.createResolutionScope(
RS_MAIN_RUNTIME,
InternalScopeManager.Mode.ELIMINATE,
singleton(CommonBuilds.PROJECT_PATH_MAIN, CommonBuilds.BUILD_PATH_RUNTIME),
Collections.emptySet(),
nonTransitiveDependencyScopes));
result.add(internalScopeManager.createResolutionScope(
RS_MAIN_RUNTIME_PLUS_SYSTEM,
InternalScopeManager.Mode.ELIMINATE,
singleton(CommonBuilds.PROJECT_PATH_MAIN, CommonBuilds.BUILD_PATH_RUNTIME),
Collections.singletonList(system),
nonTransitiveDependencyScopes));
result.add(internalScopeManager.createResolutionScope(
RS_TEST_COMPILE,
InternalScopeManager.Mode.ELIMINATE,
select(CommonBuilds.PROJECT_PATH_TEST, CommonBuilds.BUILD_PATH_COMPILE),
Collections.singletonList(system),
nonTransitiveDependencyScopes));
result.add(internalScopeManager.createResolutionScope(
RS_TEST_RUNTIME,
InternalScopeManager.Mode.ELIMINATE,
select(CommonBuilds.PROJECT_PATH_TEST, CommonBuilds.BUILD_PATH_RUNTIME),
Collections.singletonList(system),
nonTransitiveDependencyScopes));
return result;
}
// ===
public static void main(String... args) {
ScopeManagerDump.dump(Maven3ScopeManagerConfiguration.INSTANCE);
}
}
|
Maven3ScopeManagerConfiguration
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/android/FragmentNotInstantiableTest.java
|
{
"start": 6291,
"end": 6459
}
|
class ____ extends Fragment {
public MyFragment2() {}
public MyFragment2(int x) {}
}
public static
|
MyFragment2
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/testresources/Person.java
|
{
"start": 693,
"end": 1255
}
|
class ____ {
private String privateName;
private int age;
Company company;
public Person(int age) {
this.age = age;
}
public Person(String name) {
this.privateName = name;
}
public Person(String name, Company company) {
this.privateName = name;
this.company = company;
}
public String getName() {
return privateName;
}
public void setName(String n) {
this.privateName = n;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public Company getCompany() {
return company;
}
}
|
Person
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/annotation/AutowiredAnnotationBeanPostProcessorTests.java
|
{
"start": 140564,
"end": 140823
}
|
class ____ {
protected ITestBean testBean3;
public ConstructorWithoutFallbackBean(ITestBean testBean3) {
this.testBean3 = testBean3;
}
public ITestBean getTestBean3() {
return this.testBean3;
}
}
public static
|
ConstructorWithoutFallbackBean
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/launcher/InterceptorInjectedLauncherSessionListener.java
|
{
"start": 481,
"end": 1113
}
|
class ____ implements LauncherSessionListener {
public static int CALLS;
public InterceptorInjectedLauncherSessionListener() {
assertEquals(TestLauncherInterceptor1.CLASSLOADER_NAME,
Thread.currentThread().getContextClassLoader().getName());
assertTrue(TestLauncherInterceptor2.INTERCEPTING);
}
@Override
public void launcherSessionOpened(LauncherSession session) {
CALLS++;
}
@Override
public void launcherSessionClosed(LauncherSession session) {
assertEquals(TestLauncherInterceptor1.CLASSLOADER_NAME,
Thread.currentThread().getContextClassLoader().getName());
}
}
|
InterceptorInjectedLauncherSessionListener
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/asyncio/AsyncIOResource.java
|
{
"start": 303,
"end": 650
}
|
class ____ {
@Inject
Vertx vertx;
@GET
public CompletionStage<String> getOnIOThread() {
CompletableFuture<String> ret = new CompletableFuture<>();
vertx.setTimer(100, res -> {
ret.complete(Context.isOnEventLoopThread() ? "OK" : "not on event loop");
});
return ret;
}
}
|
AsyncIOResource
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsV2MemoryResourceHandlerImpl.java
|
{
"start": 908,
"end": 1152
}
|
class ____ handle the memory controller. YARN already ships a
* physical memory monitor in Java but it isn't as
* good as CGroups. This handler sets the soft and hard memory limits. The soft
* limit is set to 90% of the hard limit.
*/
public
|
to
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/web/reactive/function/client/ServletOAuth2AuthorizedClientExchangeFilterFunction.java
|
{
"start": 26620,
"end": 26912
}
|
interface ____ {
Mono<ClientResponse> handleResponse(ClientRequest request, Mono<ClientResponse> response);
}
/**
* Forwards authentication and authorization failures to an
* {@link OAuth2AuthorizationFailureHandler}.
*
* @since 5.3
*/
private static final
|
ClientResponseHandler
|
java
|
alibaba__nacos
|
ai/src/test/java/com/alibaba/nacos/ai/controller/McpAdminControllerTest.java
|
{
"start": 2784,
"end": 16400
}
|
class ____ {
private static final String MCP_SERVER_SPEC =
"{\"protocol\":\"stdio\",\"frontProtocol\":\"stdio\",\"name\":\"nacos-mcp-server\","
+ "\"id\":\"\",\"description\":\"nacos local mcp server(test version)\",\"versionDetail\":{\"version\":\"1.0.0\"},"
+ "\"enabled\":true,\"localServerConfig\":{}}'";
private McpAdminController mcpAdminController;
private MockMvc mockMvc;
private ConfigurableEnvironment cachedEnvironment;
@Mock
private McpServerOperationService mcpServerOperationService;
@BeforeEach
void setUp() {
cachedEnvironment = EnvUtil.getEnvironment();
EnvUtil.setEnvironment(new StandardEnvironment());
mcpAdminController = new McpAdminController(mcpServerOperationService);
mockMvc = MockMvcBuilders.standaloneSetup(mcpAdminController).build();
}
@AfterEach
void tearDown() {
EnvUtil.setEnvironment(cachedEnvironment);
}
@Test
void listMcpServersWithIllegalSearch() throws Throwable {
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.get(Constants.MCP_ADMIN_PATH + "/list")
.param("search", "illegal");
assertServletException(NacosApiException.class, () -> mockMvc.perform(builder).andReturn(),
"ErrCode:400, ErrMsg:Request parameter `search` should be `accurate` or `blur`.");
}
@Test
void listMcpServersWithIllegalPage() throws Throwable {
final MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.get(Constants.MCP_ADMIN_PATH + "/list")
.param("search", "blur").param("pageNo", "-1");
assertServletException(NacosApiException.class, () -> mockMvc.perform(builder).andReturn(),
"ErrCode:400, ErrMsg:Required parameter 'pageNo' should be positive integer, current is -1");
final MockHttpServletRequestBuilder builder2 = MockMvcRequestBuilders.get(Constants.MCP_ADMIN_PATH + "/list")
.param("search", "blur").param("pageNo", "1").param("pageSize", "0");
assertServletException(NacosApiException.class, () -> mockMvc.perform(builder2).andReturn(),
"ErrCode:400, ErrMsg:Required parameter 'pageSize' should be positive integer, current is 0");
}
@Test
void listMcpServersSuccess() throws Throwable {
when(mcpServerOperationService.listMcpServerWithPage(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE, null,
Constants.MCP_LIST_SEARCH_ACCURATE, 1, 100)).thenReturn(new Page<>());
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.get(Constants.MCP_ADMIN_PATH + "/list")
.param("pageNo", "1").param("pageSize", "100");
MockHttpServletResponse response = mockMvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
Result<Page<McpServerBasicInfo>> result = JacksonUtils.toObj(response.getContentAsString(),
new TypeReference<>() {
});
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertInstanceOf(Page.class, result.getData());
}
@Test
void getMcpServerWithoutMcpIdAndMcpName() throws Throwable {
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.get(Constants.MCP_ADMIN_PATH);
assertServletException(NacosApiException.class, () -> mockMvc.perform(builder).andReturn(),
"ErrCode:400, ErrMsg:Required parameter 'mcpId' or 'mcpName' type String at lease one is not present");
}
@Test
void getMcpServerWithMcpName() throws Exception {
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.get(Constants.MCP_ADMIN_PATH)
.param("mcpName", "testName");
when(mcpServerOperationService.getMcpServerDetail(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE, null, "testName",
null)).thenReturn(new McpServerDetailInfo());
MockHttpServletResponse response = mockMvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
Result<McpServerDetailInfo> result = JacksonUtils.toObj(response.getContentAsString(), new TypeReference<>() {
});
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertInstanceOf(McpServerDetailInfo.class, result.getData());
}
@Test
void getMcpServerWithMcpId() throws Exception {
String id = UUID.randomUUID().toString();
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.get(Constants.MCP_ADMIN_PATH).param("mcpId", id);
when(mcpServerOperationService.getMcpServerDetail(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE, id, null,
null)).thenReturn(new McpServerDetailInfo());
MockHttpServletResponse response = mockMvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
Result<McpServerDetailInfo> result = JacksonUtils.toObj(response.getContentAsString(), new TypeReference<>() {
});
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertInstanceOf(McpServerDetailInfo.class, result.getData());
}
@Test
void getMcpServerWithVersion() throws Exception {
String id = UUID.randomUUID().toString();
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.get(Constants.MCP_ADMIN_PATH).param("mcpId", id)
.param("namespaceId", "testNs").param("version", "1.0.0");
when(mcpServerOperationService.getMcpServerDetail("testNs", id, null, "1.0.0")).thenReturn(
new McpServerDetailInfo());
MockHttpServletResponse response = mockMvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
Result<McpServerDetailInfo> result = JacksonUtils.toObj(response.getContentAsString(), new TypeReference<>() {
});
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertInstanceOf(McpServerDetailInfo.class, result.getData());
}
@Test
void createMcpServerWithoutSpec() throws Throwable {
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.post(Constants.MCP_ADMIN_PATH);
assertServletException(NacosApiException.class, () -> mockMvc.perform(builder).andReturn(),
"ErrCode:400, ErrMsg:Required parameter 'serverSpecification' type McpServerBasicInfo is not present");
}
@Test
void createMcpServerWithSpec() throws Exception {
String mcpId = UUID.randomUUID().toString();
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.post(Constants.MCP_ADMIN_PATH)
.param("serverSpecification", MCP_SERVER_SPEC);
when(mcpServerOperationService.createMcpServer(eq(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE),
any(McpServerBasicInfo.class), any(), any())).thenReturn(mcpId);
MockHttpServletResponse response = mockMvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
Result<String> result = JacksonUtils.toObj(response.getContentAsString(), new TypeReference<>() {
});
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertEquals(mcpId, result.getData());
verify(mcpServerOperationService).createMcpServer(eq(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE),
any(McpServerBasicInfo.class), isNull(), isNull());
}
@Test
void updateMcpServerWithoutSpec() throws Throwable {
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.put(Constants.MCP_ADMIN_PATH);
assertServletException(NacosApiException.class, () -> mockMvc.perform(builder).andReturn(),
"ErrCode:400, ErrMsg:Required parameter 'serverSpecification' type McpServerBasicInfo is not present");
}
@Test
void updateMcpServerWithSpec() throws Exception {
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.put(Constants.MCP_ADMIN_PATH)
.param("serverSpecification", MCP_SERVER_SPEC);
MockHttpServletResponse response = mockMvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
Result<String> result = JacksonUtils.toObj(response.getContentAsString(), new TypeReference<>() {
});
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertEquals("ok", result.getData());
verify(mcpServerOperationService).updateMcpServer(eq(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE), eq(true),
any(McpServerBasicInfo.class), isNull(), isNull(), eq(false));
}
@Test
void updateMcpServerWithOverrideExisting() throws Exception {
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.put(Constants.MCP_ADMIN_PATH)
.param("serverSpecification", MCP_SERVER_SPEC).param("overrideExisting", "true");
MockHttpServletResponse response = mockMvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
Result<String> result = JacksonUtils.toObj(response.getContentAsString(), new TypeReference<>() {
});
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertEquals("ok", result.getData());
verify(mcpServerOperationService).updateMcpServer(eq(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE), eq(true),
any(McpServerBasicInfo.class), isNull(), isNull(), eq(true));
}
@Test
void updateMcpServerWithoutLatest() throws Exception {
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.put(Constants.MCP_ADMIN_PATH)
.param("serverSpecification", MCP_SERVER_SPEC).param("latest", "false");
MockHttpServletResponse response = mockMvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
Result<String> result = JacksonUtils.toObj(response.getContentAsString(), new TypeReference<>() {
});
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertEquals("ok", result.getData());
verify(mcpServerOperationService).updateMcpServer(eq(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE), eq(false),
any(McpServerBasicInfo.class), isNull(), isNull(), eq(false));
}
@Test
void deleteMcpServerWithoutMcpIdAndMcpName() throws Throwable {
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.delete(Constants.MCP_ADMIN_PATH);
assertServletException(NacosApiException.class, () -> mockMvc.perform(builder).andReturn(),
"ErrCode:400, ErrMsg:Required parameter 'mcpId' or 'mcpName' type String at lease one is not present");
}
@Test
void deleteMcpServerWithMcpName() throws Exception {
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.delete(Constants.MCP_ADMIN_PATH)
.param("mcpName", "testName");
MockHttpServletResponse response = mockMvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
Result<String> result = JacksonUtils.toObj(response.getContentAsString(), new TypeReference<>() {
});
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertEquals("ok", result.getData());
verify(mcpServerOperationService).deleteMcpServer(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE, "testName", null,
null);
}
@Test
void deleteMcpServerWithMcpId() throws Exception {
String id = UUID.randomUUID().toString();
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.delete(Constants.MCP_ADMIN_PATH)
.param("mcpId", id);
MockHttpServletResponse response = mockMvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
Result<String> result = JacksonUtils.toObj(response.getContentAsString(), new TypeReference<>() {
});
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertEquals("ok", result.getData());
verify(mcpServerOperationService).deleteMcpServer(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE, null, id, null);
}
@Test
void deleteMcpServerWithVersion() throws Exception {
String id = UUID.randomUUID().toString();
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.delete(Constants.MCP_ADMIN_PATH)
.param("mcpId", id).param("namespaceId", "testNs").param("version", "1.0.0");
MockHttpServletResponse response = mockMvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
Result<String> result = JacksonUtils.toObj(response.getContentAsString(), new TypeReference<>() {
});
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertEquals("ok", result.getData());
verify(mcpServerOperationService).deleteMcpServer("testNs", null, id, "1.0.0");
}
private static <T extends Throwable> void assertServletException(Class<T> expectedCause, Executable executable,
String expectedMsg) throws Throwable {
try {
executable.execute();
} catch (ServletException e) {
Throwable caused = e.getCause();
assertInstanceOf(expectedCause, caused);
assertEquals(expectedMsg, caused.toString());
}
}
}
|
McpAdminControllerTest
|
java
|
apache__kafka
|
shell/src/main/java/org/apache/kafka/shell/command/PwdCommandHandler.java
|
{
"start": 1357,
"end": 2654
}
|
class ____ implements Commands.Type {
private PwdCommandType() {
}
@Override
public String name() {
return "pwd";
}
@Override
public String description() {
return "Print the current working directory.";
}
@Override
public boolean shellOnly() {
return true;
}
@Override
public void addArguments(ArgumentParser parser) {
// nothing to do
}
@Override
public Commands.Handler createHandler(Namespace namespace) {
return new PwdCommandHandler();
}
@Override
public void completeNext(
MetadataShellState state,
List<String> nextWords,
List<Candidate> candidates
) throws Exception {
// nothing to do
}
}
@Override
public void run(
Optional<InteractiveShell> shell,
PrintWriter writer,
MetadataShellState state
) throws Exception {
writer.println(state.workingDirectory());
}
@Override
public int hashCode() {
return 0;
}
@Override
public boolean equals(Object other) {
return other instanceof PwdCommandHandler;
}
}
|
PwdCommandType
|
java
|
quarkusio__quarkus
|
extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftUtils.java
|
{
"start": 714,
"end": 945
}
|
class ____ copied from Dekorate, with the difference that the {@code waitForImageStreamTags} method
* take a client as the argument
*
* TODO: Update dekorate to take the client as an argument and then remove this class
*/
public
|
is
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/router/BaseRouterPoliciesTest.java
|
{
"start": 2744,
"end": 2825
}
|
class ____ router policies tests, tests for null input cases.
*/
public abstract
|
for
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/common/typeinfo/descriptor/MapTypeDescriptorImpl.java
|
{
"start": 1244,
"end": 1501
}
|
class ____ initiated via
* reflection. So, changing its path or constructor will brake tests.
*
* @param <K> type for which key {@link TypeInformation} is created.
* @param <V> type for which value {@link TypeInformation} is created.
*/
@Internal
public
|
is
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/data/conversion/DateLocalDateConverter.java
|
{
"start": 1108,
"end": 1536
}
|
class ____
implements DataStructureConverter<Integer, java.time.LocalDate> {
private static final long serialVersionUID = 1L;
@Override
public Integer toInternal(java.time.LocalDate external) {
return DateTimeUtils.toInternal(external);
}
@Override
public java.time.LocalDate toExternal(Integer internal) {
return DateTimeUtils.toLocalDate(internal);
}
}
|
DateLocalDateConverter
|
java
|
quarkusio__quarkus
|
extensions/liquibase/liquibase-common/src/main/java/io/quarkus/liquibase/common/runtime/graal/SubstituteScope.java
|
{
"start": 377,
"end": 1021
}
|
class ____ {
@Alias
@RecomputeFieldValue(kind = RecomputeFieldValue.Kind.NewInstance, declClass = SmartMap.class)
private SmartMap values = new SmartMap();
/**
* All the following code is here to reset <a href=
* "https://github.com/liquibase/liquibase/blob/51de1de4437e5b5fbcbd25cff006d1c6d5313bab/liquibase-standard/src/main/java/liquibase/Scope.java#L95-L102">this</a>
*/
@Alias
@RecomputeFieldValue(kind = RecomputeFieldValue.Kind.NewInstance, declClass = CustomInheritableThreadLocal.class)
private static InheritableThreadLocal<ScopeManager> scopeManager;
public static final
|
SubstituteScope
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/DefaultCheckpointStatsTracker.java
|
{
"start": 33879,
"end": 34183
}
|
class ____ implements Gauge<Long> {
@Override
public Long getValue() {
CompletedCheckpointStats completed = latestCompletedCheckpoint;
return completed != null ? completed.getMetadataSize() : -1L;
}
}
private
|
LatestCompletedCheckpointMetadataSizeGauge
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.